hive git commit: HIVE-19882: Fix QTestUtil session lifecycle (Zoltan Haindrich reviewed by Jason Dere)

2018-06-20 Thread kgyrtkirk
Repository: hive
Updated Branches:
  refs/heads/master aea590834 -> e6577a0d9


HIVE-19882: Fix QTestUtil session lifecycle (Zoltan Haindrich reviewed by Jason 
Dere)

Signed-off-by: Zoltan Haindrich 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e6577a0d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e6577a0d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e6577a0d

Branch: refs/heads/master
Commit: e6577a0d910ce7fc982f497848229e0195859dc1
Parents: aea5908
Author: Zoltan Haindrich 
Authored: Thu Jun 21 06:15:49 2018 +0200
Committer: Zoltan Haindrich 
Committed: Thu Jun 21 06:15:49 2018 +0200

--
 data/scripts/q_test_init.sql|  52 
 data/scripts/q_test_init_parse.sql  |  10 +
 .../hadoop/hive/ql/TestLocationQueries.java |   3 +-
 .../apache/hadoop/hive/ql/TestMTQueries.java|   1 +
 .../test/resources/testconfiguration.properties |   1 -
 .../hadoop/hive/accumulo/AccumuloQTestUtil.java |   1 +
 .../control/AbstractCoreBlobstoreCliDriver.java |   6 +-
 .../hadoop/hive/cli/control/CliConfigs.java |   2 +-
 .../hive/cli/control/CoreAccumuloCliDriver.java |  32 ++-
 .../hadoop/hive/cli/control/CoreCliDriver.java  |   8 +-
 .../hive/cli/control/CoreCompareCliDriver.java  |   9 +-
 .../hive/cli/control/CoreHBaseCliDriver.java|  33 +--
 .../cli/control/CoreHBaseNegativeCliDriver.java |  36 ++-
 .../hive/cli/control/CoreNegativeCliDriver.java |   6 +-
 .../hive/cli/control/CorePerfCliDriver.java |   5 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 262 ++-
 .../hadoop/hive/ql/parse/CoreParseNegative.java |   3 +-
 .../clientpositive/druidkafkamini_basic.q   |   2 +-
 .../queries/positive/input_testsequencefile.q   |  11 +
 19 files changed, 192 insertions(+), 291 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/data/scripts/q_test_init.sql
--
diff --git a/data/scripts/q_test_init.sql b/data/scripts/q_test_init.sql
index a269c55..df05828 100644
--- a/data/scripts/q_test_init.sql
+++ b/data/scripts/q_test_init.sql
@@ -6,57 +6,5 @@ set hive.stats.dbclass=fs;
 DROP FUNCTION IF EXISTS qtest_get_java_boolean;
 CREATE FUNCTION qtest_get_java_boolean AS 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean';
 
---
--- Table dest1
---
-DROP TABLE IF EXISTS dest1;
-
-CREATE TABLE dest1 (key STRING COMMENT 'default', value STRING COMMENT 
'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-
---
--- Table dest2
---
-DROP TABLE IF EXISTS dest2;
-
-CREATE TABLE dest2 (key STRING COMMENT 'default', value STRING COMMENT 
'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-
---
--- Table dest3
---
-DROP TABLE IF EXISTS dest3;
-
-CREATE TABLE dest3 (key STRING COMMENT 'default', value STRING COMMENT 
'default')
-PARTITIONED BY (ds STRING, hr STRING)
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-ALTER TABLE dest3 ADD PARTITION (ds='2008-04-08',hr='12');
-
---
--- Table dest4
---
-DROP TABLE IF EXISTS dest4;
-
-CREATE TABLE dest4 (key STRING COMMENT 'default', value STRING COMMENT 
'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-
---
--- Table dest4_sequencefile
---
-DROP TABLE IF EXISTS dest4_sequencefile;
-
-CREATE TABLE dest4_sequencefile (key STRING COMMENT 'default', value STRING 
COMMENT 'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
-
 reset;
 set hive.stats.dbclass=fs;

http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/data/scripts/q_test_init_parse.sql
--
diff --git a/data/scripts/q_test_init_parse.sql 
b/data/scripts/q_test_init_parse.sql
new file mode 100644
index 000..f84c847
--- /dev/null
+++ b/data/scripts/q_test_init_parse.sql
@@ -0,0 +1,10 @@
+--
+-- Table dest1
+--
+DROP TABLE IF EXISTS dest1;
+
+CREATE TABLE dest1 (key STRING COMMENT 'default', value STRING COMMENT 
'default')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
+

http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java

hive git commit: HIVE-19880: Repl Load to return recoverable vs non-recoverable error codes (Mahesh Kumar Behera, reviewed by Sankar Hariappan)

2018-06-20 Thread sankarh
Repository: hive
Updated Branches:
  refs/heads/branch-3 5773bca1f -> 2b57dd27a


HIVE-19880: Repl Load to return recoverable vs non-recoverable error codes 
(Mahesh Kumar Behera, reviewed by Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2b57dd27
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2b57dd27
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2b57dd27

Branch: refs/heads/branch-3
Commit: 2b57dd27ad61e552f93817ac69313066af6562d9
Parents: 5773bca
Author: Sankar Hariappan 
Authored: Wed Jun 20 13:54:25 2018 -0700
Committer: Sankar Hariappan 
Committed: Wed Jun 20 13:54:25 2018 -0700

--
 .../hive/ql/parse/TestReplicationScenarios.java | 31 ++--
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 22 ++
 .../org/apache/hadoop/hive/ql/ErrorMsg.java | 11 +++
 .../hadoop/hive/ql/exec/ReplCopyTask.java   |  3 +-
 .../hadoop/hive/ql/exec/repl/ReplDumpTask.java  |  3 +-
 .../ql/exec/repl/bootstrap/ReplLoadTask.java|  3 +-
 .../filesystem/DatabaseEventsIterator.java  |  4 +--
 .../ql/parse/ReplicationSemanticAnalyzer.java   | 20 ++---
 .../hadoop/hive/ql/parse/repl/CopyUtils.java| 16 +-
 .../hive/ql/parse/repl/dump/TableExport.java|  2 +-
 .../ql/parse/repl/dump/io/FileOperations.java   |  5 ++--
 .../hive/metastore/HiveMetaStoreClient.java |  5 +++-
 .../hive/metastore/messaging/EventUtils.java|  4 +--
 13 files changed, 98 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2b57dd27/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 862140f..d161841 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -73,9 +73,9 @@ import org.junit.rules.TestName;
 import org.junit.rules.TestRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 
 import javax.annotation.Nullable;
-
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
@@ -853,7 +853,8 @@ public class TestReplicationScenarios {
 
InjectableBehaviourObjectStore.setGetNextNotificationBehaviour(eventIdSkipper);
 
 advanceDumpDir();
-verifyFail("REPL DUMP " + dbName + " FROM " + replDumpId, driver);
+CommandProcessorResponse ret = driver.run("REPL DUMP " + dbName + " FROM " 
+ replDumpId);
+assertTrue(ret.getResponseCode() == 
ErrorMsg.REPL_EVENTS_MISSING_IN_METASTORE.getErrorCode());
 eventIdSkipper.assertInjectionsPerformed(true,false);
 InjectableBehaviourObjectStore.resetGetNextNotificationBehaviour(); // 
reset the behaviour
   }
@@ -3158,6 +3159,32 @@ public class TestReplicationScenarios {
   }
 
   @Test
+  public void testLoadCmPathMissing() throws IOException {
+String dbName = createDB(testName.getMethodName(), driver);
+run("CREATE TABLE " + dbName + ".normal(a int)", driver);
+run("INSERT INTO " + dbName + ".normal values (1)", driver);
+
+advanceDumpDir();
+run("repl dump " + dbName, true, driver);
+String dumpLocation = getResult(0, 0, driver);
+
+run("DROP TABLE " + dbName + ".normal", driver);
+
+String cmDir = hconf.getVar(HiveConf.ConfVars.REPLCMDIR);
+Path path = new Path(cmDir);
+FileSystem fs = path.getFileSystem(hconf);
+ContentSummary cs = fs.getContentSummary(path);
+long fileCount = cs.getFileCount();
+assertTrue(fileCount != 0);
+fs.delete(path);
+
+CommandProcessorResponse ret = driverMirror.run("REPL LOAD " + dbName + " 
FROM '" + dumpLocation + "'");
+assertTrue(ret.getResponseCode() == 
ErrorMsg.REPL_FILE_MISSING_FROM_SRC_AND_CM_PATH.getErrorCode());
+run("drop database " + dbName, true, driver);
+fs.create(path, false);
+  }
+
+  @Test
   public void testDumpNonReplDatabase() throws IOException {
 String dbName = createDBNonRepl(testName.getMethodName(), driver);
 verifyFail("REPL DUMP " + dbName, driver);

http://git-wip-us.apache.org/repos/asf/hive/blob/2b57dd27/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index b217259..f597bef 100644
--- 

hive git commit: HIVE-19725: Add ability to dump non-native tables in replication metadata dump (Mahesh Kumar Behera, reviewed by Sankar Hariappan)

2018-06-20 Thread sankarh
Repository: hive
Updated Branches:
  refs/heads/branch-3 b6f7df252 -> 5773bca1f


HIVE-19725: Add ability to dump non-native tables in replication metadata dump 
(Mahesh Kumar Behera, reviewed by Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5773bca1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5773bca1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5773bca1

Branch: refs/heads/branch-3
Commit: 5773bca1ffc27613b665e232cf620b60955da26e
Parents: b6f7df2
Author: Sankar Hariappan 
Authored: Wed Jun 20 13:50:19 2018 -0700
Committer: Sankar Hariappan 
Committed: Wed Jun 20 13:50:19 2018 -0700

--
 .../hadoop/hive/ql/parse/TestExportImport.java  | 44 +++-
 ...TestReplicationScenariosAcrossInstances.java | 29 -
 .../hadoop/hive/ql/parse/repl/dump/Utils.java   |  3 +-
 3 files changed, 73 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5773bca1/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
index 67b74c2..53d13d8 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
@@ -30,9 +30,12 @@ import org.junit.Test;
 import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import java.io.IOException;
 import java.util.HashMap;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 public class TestExportImport {
 
@@ -122,4 +125,43 @@ public class TestExportImport {
 .verifyResults(new String[] { "1", "2" });
 
   }
+
+  @Test
+  public void testExportNonNativeTable() throws Throwable {
+String path = "hdfs:///tmp/" + dbName + "/";
+String exportPath = path + "1/";
+String exportMetaPath = exportPath + "/Meta";
+String tableName =  testName.getMethodName();
+String createTableQuery =
+"CREATE TABLE " + tableName + " ( serde_id bigint COMMENT 'from 
deserializer', name string "
++ "COMMENT 'from deserializer', slib string COMMENT 'from 
deserializer') "
++ "ROW FORMAT SERDE 
'org.apache.hive.storage.jdbc.JdbcSerDe' "
++ "STORED BY 
'org.apache.hive.storage.jdbc.JdbcStorageHandler' "
++ "WITH SERDEPROPERTIES ('serialization.format'='1') "
++ "TBLPROPERTIES ( "
++ "'hive.sql.database.type'='METASTORE', "
++ "'hive.sql.query'='SELECT \"SERDE_ID\", \"NAME\", 
\"SLIB\" FROM \"SERDES\"')";
+
+srcHiveWarehouse.run("use " + dbName)
+.run(createTableQuery)
+.runFailure("export table " + tableName + " to '" + exportPath + 
"'")
+.run("export table " + tableName + " to '" + exportMetaPath + "'" 
+ " for metadata replication('1')");
+
+destHiveWarehouse.run("use " + replDbName)
+.runFailure("import table " +  tableName + " from '" + exportPath 
+ "'")
+.run("show tables")
+.verifyFailure(new String[] {tableName})
+.run("import table " + tableName + " from '" + exportMetaPath + 
"'")
+.run("show tables")
+.verifyResult(tableName);
+
+// check physical path
+Path checkPath = new Path(exportPath);
+checkPath = new Path(checkPath, EximUtil.DATA_PATH_NAME);
+FileSystem fs = checkPath.getFileSystem(srcHiveWarehouse.hiveConf);
+assertFalse(fs.exists(checkPath));
+checkPath = new Path(exportMetaPath);
+checkPath = new Path(checkPath, EximUtil.METADATA_NAME);
+assertTrue(fs.exists(checkPath));
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/5773bca1/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
index 26e308c..0f67174 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
@@ -797,7 +797,7 @@ public 

hive git commit: HIVE-15190: Field names are not preserved in ORC files written with ACID (Anthony Hsu reviewed by Prasanth Jayachandran)

2018-06-20 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/master 6c4d7a9b9 -> aea590834


HIVE-15190: Field names are not preserved in ORC files written with ACID
(Anthony Hsu reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/aea59083
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/aea59083
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/aea59083

Branch: refs/heads/master
Commit: aea5908341c7b7084551216efc6c89d64b1cb06a
Parents: 6c4d7a9
Author: Prasanth Jayachandran 
Authored: Wed Jun 20 10:24:36 2018 -0700
Committer: Prasanth Jayachandran 
Committed: Wed Jun 20 10:24:41 2018 -0700

--
 .../hive/ql/txn/compactor/TestCompactor.java|  2 +-
 .../apache/hadoop/hive/ql/io/orc/OrcFile.java   |  6 +-
 .../hive/ql/io/orc/OrcRawRecordMerger.java  | 12 +++-
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java | 52 -
 .../io/orc/VectorizedOrcAcidRowBatchReader.java |  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands2.java | 60 
 .../results/clientpositive/acid_nullscan.q.out  |  8 +--
 .../clientpositive/acid_table_stats.q.out   | 14 ++---
 .../clientpositive/autoColumnStats_4.q.out  |  4 +-
 .../llap/acid_bucket_pruning.q.out  |  6 +-
 .../llap/acid_vectorization_original.q.out  |  4 +-
 .../llap/default_constraint.q.out   | 14 ++---
 .../llap/dynpart_sort_optimization_acid.q.out   | 14 ++---
 .../insert_values_orig_table_use_metadata.q.out | 24 
 .../materialized_view_create_rewrite_3.q.out| 20 +++
 .../materialized_view_create_rewrite_4.q.out| 22 +++
 .../materialized_view_create_rewrite_5.q.out| 34 +--
 ...ized_view_create_rewrite_rebuild_dummy.q.out | 20 +++
 .../test/results/clientpositive/row__id.q.out   | 18 +++---
 .../tez/acid_vectorization_original_tez.q.out   |  4 +-
 .../clientpositive/tez/explainanalyze_5.q.out   |  2 +-
 21 files changed, 228 insertions(+), 114 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/aea59083/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
index 4a0e834..cffa21a 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
@@ -1596,7 +1596,7 @@ public class TestCompactor {
   "'transactional'='true'," +
   "'compactor.mapreduce.map.memory.mb'='2048'," + // 2048 MB memory for 
compaction map job
   "'compactorthreshold.hive.compactor.delta.num.threshold'='4'," +  // 
minor compaction if more than 4 delta dirs
-  "'compactorthreshold.hive.compactor.delta.pct.threshold'='0.49'" + // 
major compaction if more than 49%
+  "'compactorthreshold.hive.compactor.delta.pct.threshold'='0.47'" + // 
major compaction if more than 47%
   ")", driver);
 
 // Insert 5 rows to both tables

http://git-wip-us.apache.org/repos/asf/hive/blob/aea59083/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index 68e88cf..56d590c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -125,8 +125,10 @@ public final class OrcFile extends org.apache.orc.OrcFile {
  * @return this
  */
 public WriterOptions setSchema(TypeDescription schema) {
-  this.explicitSchema = true;
-  super.setSchema(schema);
+  if (schema != null) {
+this.explicitSchema = true;
+super.setSchema(schema);
+  }
   return this;
 }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/aea59083/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
index 6571a24..929ea9b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.orc.OrcUtils;
 import org.apache.orc.StripeInformation;
 import org.apache.orc.TypeDescription;
+import 

hive git commit: HIVE-15190: Field names are not preserved in ORC files written with ACID (Anthony Hsu reviewed by Prasanth Jayachandran)

2018-06-20 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/branch-3 40fe6c397 -> b6f7df252


HIVE-15190: Field names are not preserved in ORC files written with ACID
(Anthony Hsu reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b6f7df25
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b6f7df25
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b6f7df25

Branch: refs/heads/branch-3
Commit: b6f7df252f9fcd6643f1d44eb6949f19a8972dde
Parents: 40fe6c3
Author: Prasanth Jayachandran 
Authored: Wed Jun 20 10:24:36 2018 -0700
Committer: Prasanth Jayachandran 
Committed: Wed Jun 20 10:26:27 2018 -0700

--
 .../hive/ql/txn/compactor/TestCompactor.java|  2 +-
 .../apache/hadoop/hive/ql/io/orc/OrcFile.java   |  6 +-
 .../hive/ql/io/orc/OrcRawRecordMerger.java  | 12 +++-
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java | 52 -
 .../io/orc/VectorizedOrcAcidRowBatchReader.java |  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands2.java | 60 
 .../results/clientpositive/acid_nullscan.q.out  |  8 +--
 .../clientpositive/acid_table_stats.q.out   | 14 ++---
 .../clientpositive/autoColumnStats_4.q.out  |  4 +-
 .../llap/acid_bucket_pruning.q.out  |  6 +-
 .../llap/acid_vectorization_original.q.out  |  4 +-
 .../llap/default_constraint.q.out   | 14 ++---
 .../llap/dynpart_sort_optimization_acid.q.out   | 14 ++---
 .../insert_values_orig_table_use_metadata.q.out | 24 
 .../materialized_view_create_rewrite_3.q.out| 20 +++
 .../materialized_view_create_rewrite_4.q.out| 22 +++
 .../materialized_view_create_rewrite_5.q.out| 34 +--
 ...ized_view_create_rewrite_rebuild_dummy.q.out | 20 +++
 .../test/results/clientpositive/row__id.q.out   | 18 +++---
 .../tez/acid_vectorization_original_tez.q.out   |  4 +-
 .../clientpositive/tez/explainanalyze_5.q.out   |  2 +-
 21 files changed, 228 insertions(+), 114 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b6f7df25/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
index 46c99d6..c4658f9 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
@@ -1596,7 +1596,7 @@ public class TestCompactor {
   "'transactional'='true'," +
   "'compactor.mapreduce.map.memory.mb'='2048'," + // 2048 MB memory for 
compaction map job
   "'compactorthreshold.hive.compactor.delta.num.threshold'='4'," +  // 
minor compaction if more than 4 delta dirs
-  "'compactorthreshold.hive.compactor.delta.pct.threshold'='0.49'" + // 
major compaction if more than 49%
+  "'compactorthreshold.hive.compactor.delta.pct.threshold'='0.47'" + // 
major compaction if more than 47%
   ")", driver);
 
 // Insert 5 rows to both tables

http://git-wip-us.apache.org/repos/asf/hive/blob/b6f7df25/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index 68e88cf..56d590c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -125,8 +125,10 @@ public final class OrcFile extends org.apache.orc.OrcFile {
  * @return this
  */
 public WriterOptions setSchema(TypeDescription schema) {
-  this.explicitSchema = true;
-  super.setSchema(schema);
+  if (schema != null) {
+this.explicitSchema = true;
+super.setSchema(schema);
+  }
   return this;
 }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b6f7df25/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
index 6571a24..929ea9b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.orc.OrcUtils;
 import org.apache.orc.StripeInformation;
 import org.apache.orc.TypeDescription;
+import 

hive git commit: HIVE-19908 Block Insert Overwrite with Union All on full CRUD ACID tables using HIVE_UNION_SUBDIR_ (Eugene Koifman, reviewed by Prasanth Jayachandran)

2018-06-20 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-3 665198c4c -> 40fe6c397


HIVE-19908 Block Insert Overwrite with Union All on full CRUD ACID tables using 
HIVE_UNION_SUBDIR_ (Eugene Koifman, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/40fe6c39
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/40fe6c39
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/40fe6c39

Branch: refs/heads/branch-3
Commit: 40fe6c397ecaee0793db70187bd7ce84b871d4fe
Parents: 665198c
Author: Eugene Koifman 
Authored: Wed Jun 20 09:07:45 2018 -0700
Committer: Eugene Koifman 
Committed: Wed Jun 20 09:07:45 2018 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 15 +--
 .../apache/hadoop/hive/ql/plan/FileSinkDesc.java  |  9 +
 .../apache/hadoop/hive/ql/TestTxnNoBuckets.java   | 18 ++
 3 files changed, 40 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/40fe6c39/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index e43d1af..4a7131a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.cache.results.CacheUsage;
 import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
 import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache.CacheEntry;
+import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.DagUtils;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
@@ -1578,9 +1579,19 @@ public class Driver implements IDriver {
   Utilities.getTableName(tableInfo.getTableName()));
   desc.setTableWriteId(writeId);
 
-  //it's possible to have > 1 FileSink writing to the same 
table/partition
-  //e.g. Merge stmt, multi-insert stmt when mixing DP and SP writes
+  /**
+   * it's possible to have > 1 FileSink writing to the same 
table/partition
+   * e.g. Merge stmt, multi-insert stmt when mixing DP and SP writes
+   * Insert ... Select ... Union All Select ... using
+   * {@link 
org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator#UNION_SUDBIR_PREFIX}
+   */
   desc.setStatementId(queryTxnMgr.getStmtIdAndIncrement());
+  String unionAllSubdir = "/" + 
AbstractFileMergeOperator.UNION_SUDBIR_PREFIX;
+  if(desc.getInsertOverwrite() && 
desc.getDirName().toString().contains(unionAllSubdir) &&
+  desc.isFullAcidTable()) {
+throw new UnsupportedOperationException("QueryId=" + 
plan.getQueryId() +
+" is not supported due to OVERWRITE and UNION ALL.  Please use 
truncate + insert");
+  }
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/40fe6c39/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
index 1d05468..42b8f40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
@@ -300,6 +300,15 @@ public class FileSinkDesc extends AbstractOperatorDesc 
implements IStatsGatherDe
   return AcidUtils.isInsertOnlyTable(getTableInfo().getProperties());
 }
   }
+  public boolean isFullAcidTable() {
+if(getTable() != null) {
+  return AcidUtils.isFullAcidTable(table);
+}
+else {
+  return 
AcidUtils.isTablePropertyTransactional(getTableInfo().getProperties()) &&
+  !AcidUtils.isInsertOnlyTable(getTableInfo().getProperties());
+}
+  }
 
   public boolean isMaterialization() {
 return materialization;

http://git-wip-us.apache.org/repos/asf/hive/blob/40fe6c39/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
index f071531..7ab76b3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
@@ -275,6 +275,24 @@ ekoifman:apache-hive-3.0.0-SNAPSHOT-bin ekoifman$ tree 
/Users/ekoifman/dev/hiver
 };
 checkExpected(rs, expected, 

hive git commit: HIVE-19908 Block Insert Overwrite with Union All on full CRUD ACID tables using HIVE_UNION_SUBDIR_ (Eugene Koifman, reviewed by Prasanth Jayachandran)

2018-06-20 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 52f190e15 -> 6c4d7a9b9


HIVE-19908 Block Insert Overwrite with Union All on full CRUD ACID tables using 
HIVE_UNION_SUBDIR_ (Eugene Koifman, reviewed by Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6c4d7a9b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6c4d7a9b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6c4d7a9b

Branch: refs/heads/master
Commit: 6c4d7a9b945f1dad27ef1a6193a689040fee7b66
Parents: 52f190e
Author: Eugene Koifman 
Authored: Wed Jun 20 09:01:52 2018 -0700
Committer: Eugene Koifman 
Committed: Wed Jun 20 09:01:52 2018 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 15 +--
 .../apache/hadoop/hive/ql/plan/FileSinkDesc.java  |  9 +
 .../apache/hadoop/hive/ql/TestTxnNoBuckets.java   | 18 ++
 3 files changed, 40 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6c4d7a9b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 43a78ca..762e57c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.cache.results.CacheUsage;
 import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
 import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache.CacheEntry;
+import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.DagUtils;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
@@ -1581,9 +1582,19 @@ public class Driver implements IDriver {
   Utilities.getTableName(tableInfo.getTableName()));
   desc.setTableWriteId(writeId);
 
-  //it's possible to have > 1 FileSink writing to the same 
table/partition
-  //e.g. Merge stmt, multi-insert stmt when mixing DP and SP writes
+  /**
+   * it's possible to have > 1 FileSink writing to the same 
table/partition
+   * e.g. Merge stmt, multi-insert stmt when mixing DP and SP writes
+   * Insert ... Select ... Union All Select ... using
+   * {@link 
org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator#UNION_SUDBIR_PREFIX}
+   */
   desc.setStatementId(queryTxnMgr.getStmtIdAndIncrement());
+  String unionAllSubdir = "/" + 
AbstractFileMergeOperator.UNION_SUDBIR_PREFIX;
+  if(desc.getInsertOverwrite() && 
desc.getDirName().toString().contains(unionAllSubdir) &&
+  desc.isFullAcidTable()) {
+throw new UnsupportedOperationException("QueryId=" + 
plan.getQueryId() +
+" is not supported due to OVERWRITE and UNION ALL.  Please use 
truncate + insert");
+  }
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6c4d7a9b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
index 1d05468..42b8f40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
@@ -300,6 +300,15 @@ public class FileSinkDesc extends AbstractOperatorDesc 
implements IStatsGatherDe
   return AcidUtils.isInsertOnlyTable(getTableInfo().getProperties());
 }
   }
+  public boolean isFullAcidTable() {
+if(getTable() != null) {
+  return AcidUtils.isFullAcidTable(table);
+}
+else {
+  return 
AcidUtils.isTablePropertyTransactional(getTableInfo().getProperties()) &&
+  !AcidUtils.isInsertOnlyTable(getTableInfo().getProperties());
+}
+  }
 
   public boolean isMaterialization() {
 return materialization;

http://git-wip-us.apache.org/repos/asf/hive/blob/6c4d7a9b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
index f071531..7ab76b3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
@@ -275,6 +275,24 @@ ekoifman:apache-hive-3.0.0-SNAPSHOT-bin ekoifman$ tree 
/Users/ekoifman/dev/hiver
 };
 checkExpected(rs, expected, "Unexpected