hive git commit: HIVE-19467: Make storage format configurable for temp tables created using LLAP external client (Jason Dere, reviewed by Deepak Jaiswal)

2018-05-09 Thread jdere
Repository: hive
Updated Branches:
  refs/heads/master 8ac625744 -> 1cd5274c5


HIVE-19467: Make storage format configurable for temp tables created using LLAP 
external client (Jason Dere, reviewed by Deepak Jaiswal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1cd5274c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1cd5274c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1cd5274c

Branch: refs/heads/master
Commit: 1cd5274c58eb53cae8f38d172bbbc1517f199251
Parents: 8ac6257
Author: Jason Dere 
Authored: Wed May 9 18:05:50 2018 -0700
Committer: Jason Dere 
Committed: Wed May 9 18:05:50 2018 -0700

--
 .../java/org/apache/hadoop/hive/conf/HiveConf.java   |  3 +++
 .../org/apache/hive/jdbc/TestJdbcWithMiniLlap.java   | 13 +
 .../hive/ql/udf/generic/GenericUDTFGetSplits.java| 15 ++-
 3 files changed, 30 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1cd5274c/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index fa3f788..cc490af 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -4145,6 +4145,9 @@ public class HiveConf extends Configuration {
 
LLAP_DAEMON_OUTPUT_SERVICE_MAX_PENDING_WRITES("hive.llap.daemon.output.service.max.pending.writes",
 8, "Maximum number of queued writes allowed per connection when 
sending data\n" +
 " via the LLAP output service to external clients."),
+
LLAP_EXTERNAL_SPLITS_TEMP_TABLE_STORAGE_FORMAT("hive.llap.external.splits.temp.table.storage.format",
+"orc", new StringSet("default", "text", "orc"),
+"Storage format for temp tables created using LLAP external client"),
 LLAP_ENABLE_GRACE_JOIN_IN_LLAP("hive.llap.enable.grace.join.in.llap", 
false,
 "Override if grace join should be allowed to run in llap."),
 

http://git-wip-us.apache.org/repos/asf/hive/blob/1cd5274c/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
index 68a8e21..7e35fef 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
@@ -439,6 +439,19 @@ public class TestJdbcWithMiniLlap {
 assertArrayEquals("X'01FF'".getBytes("UTF-8"), (byte[]) rowValues[22]);
   }
 
+
+  @Test(timeout = 6)
+  public void testComplexQuery() throws Exception {
+createTestTable("testtab1");
+
+RowCollector rowCollector = new RowCollector();
+String query = "select value, count(*) from testtab1 where under_col=0 
group by value";
+int rowCount = processQuery(query, 1, rowCollector);
+assertEquals(1, rowCount);
+
+assertArrayEquals(new String[] {"val_0", "3"}, rowCollector.rows.get(0));
+  }
+
   private interface RowProcessor {
 void process(Row row);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/1cd5274c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
index cae02a9..e74a188 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
@@ -268,7 +268,8 @@ public class GenericUDTFGetSplits extends GenericUDTF {
 
 String tableName = 
"table_"+UUID.randomUUID().toString().replaceAll("[^A-Za-z0-9 ]", "");
 
-String ctas = "create temporary table " + tableName + " as " + query;
+String storageFormatString = getTempTableStorageFormatString(conf);
+String ctas = "create temporary table " + tableName + " " + 
storageFormatString + " as " + query;
 LOG.info("Materializing the query for LLAPIF; CTAS: " + ctas);
 driver.releaseResources();
 HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, originalMode);
@@ -641,6 +642,18 @@ public class GenericUDTFGetSplits extends GenericUDTF {
 return Schema;
   }
 
+  private String getTempTableStorageFormatString(HiveConf conf) {
+String formatString = "";
+String storageFormatOption =
+
conf.getVar(HiveCon

hive git commit: HIVE-19135 Need tool to allow admins to create catalogs and move existing dbs to catalog during upgrade (Alan Gates, reviewed by Thejas Nair)

2018-05-09 Thread vgarg
Repository: hive
Updated Branches:
  refs/heads/branch-3 a8fc0e671 -> 5bb3df0ee


HIVE-19135 Need tool to allow admins to create catalogs and move existing dbs 
to catalog during upgrade (Alan Gates, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5bb3df0e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5bb3df0e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5bb3df0e

Branch: refs/heads/branch-3
Commit: 5bb3df0eec9fd80f921c7bfb7a2ffbc40312bd58
Parents: a8fc0e6
Author: Alan Gates 
Authored: Tue May 1 12:29:55 2018 -0700
Committer: Vineet Garg 
Committed: Wed May 9 13:32:07 2018 -0700

--
 .../org/apache/hive/beeline/HiveSchemaTool.java | 275 +-
 .../hive/beeline/TestSchemaToolCatalogOps.java  | 375 +++
 .../hadoop/hive/metastore/ObjectStore.java  |   1 +
 3 files changed, 649 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5bb3df0e/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java 
b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index a90127b..a469cd4 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hive.beeline;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -33,6 +34,7 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.DatabaseProduct;
 import org.apache.hadoop.hive.metastore.HiveMetaException;
 import org.apache.hadoop.hive.metastore.IMetaStoreSchemaInfo;
 import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfoFactory;
@@ -42,6 +44,7 @@ import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper;
 import 
org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.MetaStoreConnectionInfo;
 import 
org.apache.hadoop.hive.metastore.tools.HiveSchemaHelper.NestedScriptParser;
+import org.apache.hadoop.hive.metastore.tools.SQLGenerator;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -71,6 +74,8 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import static 
org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier;
+
 public class HiveSchemaTool {
   private String userName = null;
   private String passWord = null;
@@ -85,6 +90,7 @@ public class HiveSchemaTool {
   private final String metaDbType;
   private final IMetaStoreSchemaInfo metaStoreSchemaInfo;
   private boolean needsQuotedIdentifier;
+  private String quoteCharacter;
 
   static final private Logger LOG = 
LoggerFactory.getLogger(HiveSchemaTool.class.getName());
 
@@ -100,7 +106,9 @@ public class HiveSchemaTool {
 this.hiveConf = hiveConf;
 this.dbType = dbType;
 this.metaDbType = metaDbType;
-this.needsQuotedIdentifier = getDbCommandParser(dbType, 
metaDbType).needsQuotedIdentifier();
+NestedScriptParser parser = getDbCommandParser(dbType, metaDbType);
+this.needsQuotedIdentifier = parser.needsQuotedIdentifier();
+this.quoteCharacter = parser.getQuoteCharacter();
 this.metaStoreSchemaInfo = MetaStoreSchemaInfoFactory.get(hiveConf, 
hiveHome, dbType);
   }
 
@@ -878,6 +886,204 @@ public class HiveSchemaTool {
 }
   }
 
+  @VisibleForTesting
+  void createCatalog(String catName, String location, String description, 
boolean ifNotExists)
+  throws HiveMetaException {
+catName = normalizeIdentifier(catName);
+System.out.println("Create catalog " + catName + " at location " + 
location);
+
+Connection conn = getConnectionToMetastore(true);
+boolean success = false;
+try {
+  conn.setAutoCommit(false);
+  try (Statement stmt = conn.createStatement()) {
+// If they set ifNotExists check for existence first, and bail if it 
exists.  This is
+// more reliable then attempting to parse the error message from the 
SQLException.
+if (ifNotExists) {
+  String query = "select " + quoteIf("NAME") + " from " + 
quoteIf("CTLGS") +
+  " where " + quoteIf("NAME") + " = '" + catName + "'";
+  LOG.debug("Going to run " + query);
+  ResultSet rs = stmt.executeQuery(query);
+  if (rs.next()) {
+System.out.println(

hive git commit: HIVE-19389: Schematool: For Hive's Information Schema, use embedded HS2 as default (Vaibhav Gumashta reviewed by Daniel Dai)

2018-05-09 Thread vgumashta
Repository: hive
Updated Branches:
  refs/heads/master 72eff127a -> 8ac625744


HIVE-19389: Schematool: For Hive's Information Schema, use embedded HS2 as 
default (Vaibhav Gumashta reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8ac62574
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8ac62574
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8ac62574

Branch: refs/heads/master
Commit: 8ac625744109fde23e105fde3e02f5da894da8d4
Parents: 72eff12
Author: Vaibhav Gumashta 
Authored: Wed May 9 12:23:10 2018 -0700
Committer: Vaibhav Gumashta 
Committed: Wed May 9 12:23:10 2018 -0700

--
 beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java  | 7 +++
 .../apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java  | 3 +++
 2 files changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8ac62574/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java 
b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index a469cd4..7aad265 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -110,6 +110,13 @@ public class HiveSchemaTool {
 this.needsQuotedIdentifier = parser.needsQuotedIdentifier();
 this.quoteCharacter = parser.getQuoteCharacter();
 this.metaStoreSchemaInfo = MetaStoreSchemaInfoFactory.get(hiveConf, 
hiveHome, dbType);
+// If the dbType is "hive", this is setting up the information schema in 
Hive. 
+// We will set the default jdbc url and driver.
+// It is overriden by command line options if passed (-url and -driver
+if (dbType.equalsIgnoreCase(HiveSchemaHelper.DB_HIVE)) {
+  url = HiveSchemaHelper.EMBEDDED_HS2_URL;
+  driver = HiveSchemaHelper.HIVE_JDBC_DRIVER;
+}
   }
 
   public HiveConf getHiveConf() {

http://git-wip-us.apache.org/repos/asf/hive/blob/8ac62574/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
index 785978b..70746e8 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java
@@ -44,6 +44,9 @@ public class HiveSchemaHelper {
   public static final String DB_MYSQL = "mysql";
   public static final String DB_POSTGRACE = "postgres";
   public static final String DB_ORACLE = "oracle";
+  public static final String EMBEDDED_HS2_URL = "jdbc:hive2://";
+  public static final String HIVE_JDBC_DRIVER = 
"org.apache.hive.jdbc.HiveDriver";
+  
 
   /***
* Get JDBC connection to metastore db



hive git commit: HIVE-18193: Migrate existing ACID tables to use write id per table rather than global transaction id (Sankar Hariappan, reviewed by Mahesh Kumar Behera, Thejas M Nair)

2018-05-09 Thread sankarh
Repository: hive
Updated Branches:
  refs/heads/master f2172cdbc -> 72eff127a


HIVE-18193: Migrate existing ACID tables to use write id per table rather than 
global transaction id (Sankar Hariappan, reviewed by Mahesh Kumar Behera, 
Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/72eff127
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/72eff127
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/72eff127

Branch: refs/heads/master
Commit: 72eff127ad0b2b94c814409cd1eb5ae9822724c0
Parents: f2172cd
Author: Sankar Hariappan 
Authored: Thu May 10 00:14:21 2018 +0530
Committer: Sankar Hariappan 
Committed: Thu May 10 00:14:21 2018 +0530

--
 .../upgrade/derby/057-HIVE-18193.derby.sql  | 24 
 .../hadoop/hive/metastore/txn/TxnDbUtil.java|  4 +-
 .../main/sql/derby/hive-schema-3.0.0.derby.sql  |  4 +-
 .../sql/derby/upgrade-2.3.0-to-3.0.0.derby.sql  | 25 +
 .../main/sql/mssql/hive-schema-3.0.0.mssql.sql  |  4 +-
 .../sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql  | 25 +
 .../main/sql/mysql/hive-schema-3.0.0.mysql.sql  |  2 +-
 .../sql/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql  | 27 ++
 .../sql/oracle/hive-schema-3.0.0.oracle.sql | 22 +--
 .../oracle/upgrade-2.3.0-to-3.0.0.oracle.sql| 39 
 .../sql/postgres/hive-schema-3.0.0.postgres.sql |  4 +-
 .../upgrade-2.3.0-to-3.0.0.postgres.sql | 25 +
 12 files changed, 178 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/72eff127/metastore/scripts/upgrade/derby/057-HIVE-18193.derby.sql
--
diff --git a/metastore/scripts/upgrade/derby/057-HIVE-18193.derby.sql 
b/metastore/scripts/upgrade/derby/057-HIVE-18193.derby.sql
new file mode 100644
index 000..499d06e
--- /dev/null
+++ b/metastore/scripts/upgrade/derby/057-HIVE-18193.derby.sql
@@ -0,0 +1,24 @@
+
+-- Populate NEXT_WRITE_ID for each Transactional table and set next write ID 
same as next txn ID
+INSERT INTO NEXT_WRITE_ID (NWI_DATABASE, NWI_TABLE, NWI_NEXT)
+SELECT * FROM
+(SELECT DB.NAME, TBL_INFO.TBL_NAME FROM DBS DB,
+(SELECT TBL.DB_ID, TBL.TBL_NAME FROM TBLS TBL,
+(SELECT TBL_ID FROM TABLE_PARAMS WHERE 
PARAM_KEY='transactional' AND CAST(PARAM_VALUE AS VARCHAR(128))='true') 
TBL_PARAM
+WHERE TBL.TBL_ID=TBL_PARAM.TBL_ID) TBL_INFO
+where DB.DB_ID=TBL_INFO.DB_ID) DB_TBL_NAME,
+(SELECT NTXN_NEXT FROM NEXT_TXN_ID) NEXT_TXN_ID;
+
+-- Populate TXN_TO_WRITE_ID for each aborted/open txns and set write ID equal 
to txn ID
+INSERT INTO TXN_TO_WRITE_ID (T2W_DATABASE, T2W_TABLE, T2W_TXNID, T2W_WRITEID)
+SELECT * FROM
+(SELECT DB.NAME, TBL_INFO.TBL_NAME FROM DBS DB,
+(SELECT TBL.DB_ID, TBL.TBL_NAME FROM TBLS TBL,
+(SELECT TBL_ID FROM TABLE_PARAMS WHERE 
PARAM_KEY='transactional' AND CAST(PARAM_VALUE AS VARCHAR(128))='true') 
TBL_PARAM
+WHERE TBL.TBL_ID=TBL_PARAM.TBL_ID) TBL_INFO
+where DB.DB_ID=TBL_INFO.DB_ID) DB_TBL_NAME,
+(SELECT TXN_ID, TXN_ID as WRITE_ID FROM TXNS) TXN_INFO;
+
+-- Update TXN_COMPONENTS and COMPLETED_TXN_COMPONENTS for write ID which is 
same as txn ID
+UPDATE TXN_COMPONENTS SET TC_WRITEID = TC_TXNID;
+UPDATE COMPLETED_TXN_COMPONENTS SET CTC_WRITEID = CTC_TXNID;

http://git-wip-us.apache.org/repos/asf/hive/blob/72eff127/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
--
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
index cf89ab2..4597166 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java
@@ -82,14 +82,14 @@ public final class TxnDbUtil {
   "  TXN_HOST varchar(128) NOT NULL)");
 
   stmt.execute("CREATE TABLE TXN_COMPONENTS (" +
-  "  TC_TXNID bigint REFERENCES TXNS (TXN_ID)," +
+  "  TC_TXNID bigint NOT NULL REFERENCES TXNS (TXN_ID)," +
   "  TC_DATABASE varchar(128) NOT NULL," +
   "  TC_TABLE varchar(128)," +
   "  TC_PARTITION varchar(767)," +
   "  TC_OPERATION_TYPE char(1) NOT NULL," +
   "  TC_WRITEID bigint)");
   stmt.execute("CREATE TABLE COMPLETED_TXN_COMPONENTS (" +
-  "  CTC_TXNID bigint," +
+  "  CTC_TXNID bigint NOT NULL," +
   "  CTC_DATABASE varchar(128) NOT NULL," +
   "  CTC_TABLE varchar(128)," +
   "  CTC_PARTITION varcha

hive git commit: HIVE-19248: REPL LOAD couldn't copy file from source CM path and also doesn't throw error if file copy fails (Sankar Hariappan, reviewed by Mahesh Kumar Behera, Thejas M Nair)

2018-05-09 Thread sankarh
Repository: hive
Updated Branches:
  refs/heads/branch-3 31027371f -> a8fc0e671


HIVE-19248: REPL LOAD couldn't copy file from source CM path and also doesn't 
throw error if file copy fails (Sankar Hariappan, reviewed by Mahesh Kumar 
Behera, Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a8fc0e67
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a8fc0e67
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a8fc0e67

Branch: refs/heads/branch-3
Commit: a8fc0e67183ed063bb2b3aee69a290ac734b3f51
Parents: 3102737
Author: Sankar Hariappan 
Authored: Thu May 10 00:04:52 2018 +0530
Committer: Sankar Hariappan 
Committed: Thu May 10 00:04:52 2018 +0530

--
 .../listener/DbNotificationListener.java|  19 +--
 .../hive/metastore/TestReplChangeManager.java   |  54 +++---
 .../hadoop/hive/ql/exec/ReplCopyTask.java   |  12 +-
 .../hadoop/hive/ql/parse/repl/CopyUtils.java| 168 ---
 .../hive/ql/parse/repl/load/DumpMetaData.java   |   1 -
 .../load/message/CreateFunctionHandler.java |   4 +-
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |   3 +-
 .../hadoop/hive/shims/TestHadoop23Shims.java|  24 +--
 .../hive/metastore/ReplChangeManager.java   |  94 ++-
 .../hadoop/hive/metastore/utils/FileUtils.java  |  64 ++-
 .../hadoop/hive/metastore/utils/HdfsUtils.java  |   3 +-
 .../hive/metastore/utils/StringUtils.java   |  24 ++-
 12 files changed, 302 insertions(+), 168 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a8fc0e67/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
--
diff --git 
a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
 
b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
index 7835691..6321f9b 100644
--- 
a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
+++ 
b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
@@ -426,7 +426,14 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
 
 @Override
 public String next() {
-  String result = encodeFileUri(files.get(i), chksums != null? 
chksums.get(i) : null);
+  String result;
+  try {
+result = ReplChangeManager.encodeFileUri(files.get(i), chksums != null 
? chksums.get(i) : null, null);
+  } catch (IOException e) {
+// File operations failed
+LOG.error("Encoding file URI failed with error " + e.getMessage());
+throw new RuntimeException(e.getMessage());
+  }
   i++;
   return result;
 }
@@ -788,14 +795,4 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
 }
 
   }
-
-  // TODO: this needs to be enhanced once change management based filesystem 
is implemented
-  // Currently using fileuri#checksum as the format
-  private String encodeFileUri(String fileUriStr, String fileChecksum) {
-if (fileChecksum != null) {
-  return fileUriStr + "#" + fileChecksum;
-} else {
-  return fileUriStr;
-}
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/a8fc0e67/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
index 6ade76d..e63250c 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
@@ -175,19 +175,19 @@ public class TestReplChangeManager {
 // verify cm.recycle(db, table, part) api moves file to cmroot dir
 int ret = cm.recycle(part1Path, RecycleType.MOVE, false);
 Assert.assertEquals(ret, 1);
-Path cmPart1Path = ReplChangeManager.getCMPath(hiveConf, 
part1Path.getName(), path1Chksum);
+Path cmPart1Path = ReplChangeManager.getCMPath(hiveConf, 
part1Path.getName(), path1Chksum, cmroot.toString());
 assertTrue(cmPart1Path.getFileSystem(hiveConf).exists(cmPart1Path));
 
 // Verify dropPartition recycle part files
 client.dropPartition(dbName, tblName, Arrays.asList("20160102"));
 assertFalse(part2Path.getFileSystem(hiveConf).exists(part2Path));
-Path cmPart2Path = ReplChangeManager.getCMPath(hiveConf, 
part2Path.getName(), path2Chksum);
+Path cmPart2Path = ReplChangeManager.getCM

hive git commit: HIVE-19228: Remove commons-httpclient 3.x usage (Janaki Lahorani reviewed by Aihua Xu)

2018-05-09 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master e1e497574 -> f2172cdbc


HIVE-19228: Remove commons-httpclient 3.x usage (Janaki Lahorani reviewed by 
Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2172cdb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2172cdb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2172cdb

Branch: refs/heads/master
Commit: f2172cdbc3af92e18db68c3daac44a73e92eaf48
Parents: e1e4975
Author: Aihua Xu 
Authored: Wed May 9 10:56:32 2018 -0700
Committer: Aihua Xu 
Committed: Wed May 9 11:27:45 2018 -0700

--
 .../apache/hive/jdbc/TestActivePassiveHA.java   | 99 +---
 pom.xml |  6 --
 ql/pom.xml  | 15 ---
 .../hive/ql/parse/LoadSemanticAnalyzer.java | 16 +++-
 .../apache/hive/service/server/HiveServer2.java | 52 ++
 5 files changed, 111 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f2172cdb/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
index c55271f..4055f13 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
@@ -36,11 +36,6 @@ import java.util.Map;
 import java.util.UUID;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpMethodBase;
-import org.apache.commons.httpclient.methods.DeleteMethod;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.methods.OptionsMethod;
 import org.apache.curator.test.TestingServer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -54,9 +49,22 @@ import 
org.apache.hive.service.server.HS2ActivePassiveHARegistryClient;
 import org.apache.hive.service.server.HiveServer2Instance;
 import org.apache.hive.service.server.TestHS2HttpServerPam;
 import org.apache.hive.service.servlet.HS2Peers;
+import org.apache.http.Header;
 import org.apache.http.HttpException;
 import org.apache.http.HttpHeaders;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpOptions;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.StatusLine;
+import org.apache.http.util.EntityUtils;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.eclipse.jetty.http.HttpHeader;
+import org.eclipse.jetty.util.B64Code;
+import org.eclipse.jetty.util.StringUtil;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -406,7 +414,7 @@ public class TestActivePassiveHA {
   assertEquals("true", sendGet(url1, true));
 
   // trigger failover on miniHS2_1 without authorization header
-  assertEquals("Unauthorized", sendDelete(url1, false));
+  assertTrue(sendDelete(url1, false).contains("Unauthorized"));
   assertTrue(sendDelete(url1, true).contains("Failover successful!"));
   assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
   assertEquals(false, miniHS2_1.isLeader());
@@ -541,56 +549,79 @@ public class TestActivePassiveHA {
   }
 
   private String sendGet(String url, boolean enableAuth) throws Exception {
-return sendAuthMethod(new GetMethod(url), enableAuth, false);
+return sendAuthMethod(new HttpGet(url), enableAuth, false);
   }
 
   private String sendGet(String url, boolean enableAuth, boolean enableCORS) 
throws Exception {
-return sendAuthMethod(new GetMethod(url), enableAuth, enableCORS);
+return sendAuthMethod(new HttpGet(url), enableAuth, enableCORS);
   }
 
   private String sendDelete(String url, boolean enableAuth) throws Exception {
-return sendAuthMethod(new DeleteMethod(url), enableAuth, false);
+return sendAuthMethod(new HttpDelete(url), enableAuth, false);
   }
 
   private String sendDelete(String url, boolean enableAuth, boolean 
enableCORS) throws Exception {
-return sendAuthMethod(new DeleteMethod(url), enableAuth, enableCORS);
+return sendAuthMethod(new HttpDelete(url), enableAuth, enableCORS);
   }
 
-  private String sendAuthMethod(HttpMethodBase method, boolean enableAuth, 
boolean enableCORS) throws Exception {
-HttpClient client = new HttpClient();
-try {
-

hive git commit: HIVE-19453 : Extend Load Data statement to take Input file format and Serde as parameters (Deepak Jaiswal, reviewed by Jason Dere)

2018-05-09 Thread djaiswal
Repository: hive
Updated Branches:
  refs/heads/master f2cb7f2b0 -> e1e497574


HIVE-19453 : Extend Load Data statement to take Input file format and Serde as 
parameters (Deepak Jaiswal, reviewed by Jason Dere)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e1e49757
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e1e49757
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e1e49757

Branch: refs/heads/master
Commit: e1e4975744a3104df1e63263fb05dd726518f211
Parents: f2cb7f2
Author: Deepak Jaiswal 
Authored: Wed May 9 11:06:34 2018 -0700
Committer: Deepak Jaiswal 
Committed: Wed May 9 11:06:34 2018 -0700

--
 .../apache/hadoop/hive/ql/parse/HiveParser.g| 12 +--
 .../hive/ql/parse/LoadSemanticAnalyzer.java | 33 ++--
 .../clientpositive/load_data_using_job.q|  8 +++--
 .../llap/load_data_using_job.q.out  |  8 +
 4 files changed, 54 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e1e49757/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index a837d67..3712a53 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -422,6 +422,7 @@ TOK_ADD_TRIGGER;
 TOK_REPLACE;
 TOK_LIKERP;
 TOK_UNMANAGED;
+TOK_INPUTFORMAT;
 }
 
 
@@ -835,8 +836,8 @@ execStatement
 loadStatement
 @init { pushMsg("load statement", state); }
 @after { popMsg(state); }
-: KW_LOAD KW_DATA (islocal=KW_LOCAL)? KW_INPATH (path=StringLiteral) 
(isoverwrite=KW_OVERWRITE)? KW_INTO KW_TABLE (tab=tableOrPartition)
--> ^(TOK_LOAD $path $tab $islocal? $isoverwrite?)
+: KW_LOAD KW_DATA (islocal=KW_LOCAL)? KW_INPATH (path=StringLiteral) 
(isoverwrite=KW_OVERWRITE)? KW_INTO KW_TABLE (tab=tableOrPartition) 
inputFileFormat?
+-> ^(TOK_LOAD $path $tab $islocal? $isoverwrite? inputFileFormat?)
 ;
 
 replicationClause
@@ -1489,6 +1490,13 @@ fileFormat
 | genericSpec=identifier -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
 ;
 
+inputFileFormat
+@init { pushMsg("Load Data input file format specification", state); }
+@after { popMsg(state); }
+: KW_INPUTFORMAT inFmt=StringLiteral KW_SERDE serdeCls=StringLiteral
+  -> ^(TOK_INPUTFORMAT $inFmt $serdeCls)
+;
+
 tabTypeExpr
 @init { pushMsg("specifying table types", state); }
 @after { popMsg(state); }

http://git-wip-us.apache.org/repos/asf/hive/blob/e1e49757/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
index 2b88ea6..866f43d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
@@ -79,6 +79,8 @@ public class LoadSemanticAnalyzer extends SemanticAnalyzer {
   // AST specific data
   private Tree fromTree, tableTree;
   private boolean isLocal = false, isOverWrite = false;
+  private String inputFormatClassName = null;
+  private String serDeClassName = null;
 
   public LoadSemanticAnalyzer(QueryState queryState) throws SemanticException {
 super(queryState);
@@ -257,12 +259,30 @@ public class LoadSemanticAnalyzer extends 
SemanticAnalyzer {
 fromTree = ast.getChild(0);
 tableTree = ast.getChild(1);
 
-if (ast.getChildCount() == 4) {
+boolean inputInfo = false;
+// Check the last node
+ASTNode child = (ASTNode)ast.getChild(ast.getChildCount() - 1);
+if (child.getToken().getType() == HiveParser.TOK_INPUTFORMAT) {
+  if (child.getChildCount() != 2) {
+throw new SemanticException("FileFormat should contain both input 
format and Serde");
+  }
+  try {
+inputFormatClassName = stripQuotes(child.getChild(0).getText());
+serDeClassName = stripQuotes(child.getChild(1).getText());
+inputInfo = true;
+  } catch (Exception e) {
+throw new SemanticException("FileFormat inputFormatClassName or 
serDeClassName is incorrect");
+  }
+}
+
+if ((!inputInfo && ast.getChildCount() == 4) ||
+(inputInfo && ast.getChildCount() == 5)) {
   isLocal = true;
   isOverWrite = true;
 }
 
-if (ast.getChildCount() == 3) {
+if ((!inputInfo && ast.getChildCount() == 3) ||
+(inputInfo && ast.getChildCount() == 4)) {
   if (ast.getChild(2).getText().toLowerCase().equals("local")) {
 isLocal = true;
   } else {
@@ -450,7 +470,14 @@ publ

hive git commit: HIVE-19451: Druid Query Execution fails with ClassNotFoundException org.antlr.v4.runtime.CharStream (Nishant Bangarwa reviewed by Jesus Camacho Rodriguez)

2018-05-09 Thread jcamacho
Repository: hive
Updated Branches:
  refs/heads/branch-3 78e6bfac0 -> 31027371f


HIVE-19451: Druid Query Execution fails with ClassNotFoundException 
org.antlr.v4.runtime.CharStream (Nishant Bangarwa reviewed by Jesus Camacho 
Rodriguez)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/31027371
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/31027371
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/31027371

Branch: refs/heads/branch-3
Commit: 31027371f9040cb51c3e680c19c8100ef3415b4f
Parents: 78e6bfa
Author: Nishant Bangarwa 
Authored: Wed May 9 09:35:25 2018 -0700
Committer: Jesus Camacho Rodriguez 
Committed: Wed May 9 09:36:06 2018 -0700

--
 druid-handler/pom.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/31027371/druid-handler/pom.xml
--
diff --git a/druid-handler/pom.xml b/druid-handler/pom.xml
index d25706c..9c2bebf 100644
--- a/druid-handler/pom.xml
+++ b/druid-handler/pom.xml
@@ -362,6 +362,7 @@
   org.roaringbitmap:*
   org.apache.derby:*
   org.asynchttpclient:*
+  org.antlr:*
 
   
   



hive git commit: HIVE-19451: Druid Query Execution fails with ClassNotFoundException org.antlr.v4.runtime.CharStream (Nishant Bangarwa reviewed by Jesus Camacho Rodriguez)

2018-05-09 Thread jcamacho
Repository: hive
Updated Branches:
  refs/heads/master dec0a7ed8 -> f2cb7f2b0


HIVE-19451: Druid Query Execution fails with ClassNotFoundException 
org.antlr.v4.runtime.CharStream (Nishant Bangarwa reviewed by Jesus Camacho 
Rodriguez)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2cb7f2b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2cb7f2b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2cb7f2b

Branch: refs/heads/master
Commit: f2cb7f2b0787ba54a13aa762b531278c88c00670
Parents: dec0a7e
Author: Nishant Bangarwa 
Authored: Wed May 9 09:35:25 2018 -0700
Committer: Jesus Camacho Rodriguez 
Committed: Wed May 9 09:35:25 2018 -0700

--
 druid-handler/pom.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f2cb7f2b/druid-handler/pom.xml
--
diff --git a/druid-handler/pom.xml b/druid-handler/pom.xml
index 24dcb1d..33bc928 100644
--- a/druid-handler/pom.xml
+++ b/druid-handler/pom.xml
@@ -362,6 +362,7 @@
   org.roaringbitmap:*
   org.apache.derby:*
   org.asynchttpclient:*
+  org.antlr:*
 
   
   



hive git commit: HIVE-19448: Vectorization: sysdb test doesn't work after enabling vectorization by default (Matt McCline, reviewed by Deepak Jaiswal)

2018-05-09 Thread mmccline
Repository: hive
Updated Branches:
  refs/heads/branch-3 92bc9cf04 -> 78e6bfac0


HIVE-19448: Vectorization: sysdb test doesn't work after enabling vectorization 
by default (Matt McCline, reviewed by Deepak Jaiswal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/78e6bfac
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/78e6bfac
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/78e6bfac

Branch: refs/heads/branch-3
Commit: 78e6bfac030ab2137483dc2b97a3f58fc03c41bd
Parents: 92bc9cf
Author: Matt McCline 
Authored: Wed May 9 11:06:56 2018 -0500
Committer: Matt McCline 
Committed: Wed May 9 11:07:39 2018 -0500

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +-
 .../hive/ql/exec/vector/VectorMapOperator.java  |  11 +-
 ql/src/test/queries/clientpositive/sysdb.q  |   5 +
 .../clientpositive/llap/jdbc_handler.q.out  |   2 +-
 .../results/clientpositive/llap/sysdb.q.out | 526 ---
 5 files changed, 359 insertions(+), 187 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/78e6bfac/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7b0fabe..3bb1e80 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3530,7 +3530,7 @@ public class HiveConf extends Configuration {
 "The default value is false."),
 HIVE_VECTORIZATION_ROW_DESERIALIZE_INPUTFORMAT_EXCLUDES(
 "hive.vectorized.row.serde.inputformat.excludes",
-
"org.apache.parquet.hadoop.ParquetInputFormat,org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat",
+
"org.apache.parquet.hadoop.ParquetInputFormat,org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat,org.apache.hive.storage.jdbc.JdbcInputFormat",
 "The input formats not supported by row deserialize vectorization."),
 HIVE_VECTOR_ADAPTOR_USAGE_MODE("hive.vectorized.adaptor.usage.mode", 
"all", new StringSet("none", "chosen", "all"),
 "Specifies the extent to which the VectorUDFAdaptor will be used for 
UDFs that do not have a corresponding vectorized class.\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/78e6bfac/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
index 6f1346d..2542e03 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
@@ -945,8 +945,15 @@ public class VectorMapOperator extends AbstractMapOperator 
{
 
   // Convert input row to standard objects.
   List standardObjects = new ArrayList();
-  ObjectInspectorUtils.copyToStandardObject(standardObjects, 
deserialized,
-  currentPartRawRowObjectInspector, 
ObjectInspectorCopyOption.WRITABLE);
+  try {
+ObjectInspectorUtils.copyToStandardObject(
+standardObjects,
+deserialized,
+currentPartRawRowObjectInspector,
+ObjectInspectorCopyOption.WRITABLE);
+  } catch (Exception e) {
+throw new HiveException("copyToStandardObject failed: " + e);
+  }
   if (standardObjects.size() < currentDataColumnCount) {
 throw new HiveException("Input File Format returned row with 
too few columns");
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/78e6bfac/ql/src/test/queries/clientpositive/sysdb.q
--
diff --git a/ql/src/test/queries/clientpositive/sysdb.q 
b/ql/src/test/queries/clientpositive/sysdb.q
index 399c3ce..7f88fe1 100644
--- a/ql/src/test/queries/clientpositive/sysdb.q
+++ b/ql/src/test/queries/clientpositive/sysdb.q
@@ -7,6 +7,8 @@ set 
hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 
 set hive.cbo.enable=false;
 
+-- SORT_QUERY_RESULTS
+
 create table src_buck (key int, value string) clustered by(value) into 2 
buckets;
 
 create table src_skew (key int) skewed by (key) on (1,2,3);
@@ -59,6 +61,9 @@ select role_name from roles order by role_name limit 5;
 
 select principal_name, grantor from role_map order by principal_name, grantor 
limit 5;
 
+explain vectorization detail
+select count(*) from sds;
+
 select count(*) from sds;
 

hive git commit: HIVE-19448: Vectorization: sysdb test doesn't work after enabling vectorization by default (Matt McCline, reviewed by Deepak Jaiswal)

2018-05-09 Thread mmccline
Repository: hive
Updated Branches:
  refs/heads/master 99a2b8bd6 -> dec0a7ed8


HIVE-19448: Vectorization: sysdb test doesn't work after enabling vectorization 
by default (Matt McCline, reviewed by Deepak Jaiswal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/dec0a7ed
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/dec0a7ed
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/dec0a7ed

Branch: refs/heads/master
Commit: dec0a7ed8da89f002ee5e7d881fec3c7a7e698a7
Parents: 99a2b8b
Author: Matt McCline 
Authored: Wed May 9 11:06:56 2018 -0500
Committer: Matt McCline 
Committed: Wed May 9 11:06:56 2018 -0500

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +-
 .../hive/ql/exec/vector/VectorMapOperator.java  |  11 +-
 ql/src/test/queries/clientpositive/sysdb.q  |   5 +
 .../clientpositive/llap/jdbc_handler.q.out  |   2 +-
 .../results/clientpositive/llap/sysdb.q.out | 526 ---
 5 files changed, 359 insertions(+), 187 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/dec0a7ed/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 88a7cfc..fa3f788 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3536,7 +3536,7 @@ public class HiveConf extends Configuration {
 "The default value is false."),
 HIVE_VECTORIZATION_ROW_DESERIALIZE_INPUTFORMAT_EXCLUDES(
 "hive.vectorized.row.serde.inputformat.excludes",
-
"org.apache.parquet.hadoop.ParquetInputFormat,org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat",
+
"org.apache.parquet.hadoop.ParquetInputFormat,org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat,org.apache.hive.storage.jdbc.JdbcInputFormat",
 "The input formats not supported by row deserialize vectorization."),
 HIVE_VECTOR_ADAPTOR_USAGE_MODE("hive.vectorized.adaptor.usage.mode", 
"all", new StringSet("none", "chosen", "all"),
 "Specifies the extent to which the VectorUDFAdaptor will be used for 
UDFs that do not have a corresponding vectorized class.\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/dec0a7ed/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
index 6f1346d..2542e03 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
@@ -945,8 +945,15 @@ public class VectorMapOperator extends AbstractMapOperator 
{
 
   // Convert input row to standard objects.
   List standardObjects = new ArrayList();
-  ObjectInspectorUtils.copyToStandardObject(standardObjects, 
deserialized,
-  currentPartRawRowObjectInspector, 
ObjectInspectorCopyOption.WRITABLE);
+  try {
+ObjectInspectorUtils.copyToStandardObject(
+standardObjects,
+deserialized,
+currentPartRawRowObjectInspector,
+ObjectInspectorCopyOption.WRITABLE);
+  } catch (Exception e) {
+throw new HiveException("copyToStandardObject failed: " + e);
+  }
   if (standardObjects.size() < currentDataColumnCount) {
 throw new HiveException("Input File Format returned row with 
too few columns");
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/dec0a7ed/ql/src/test/queries/clientpositive/sysdb.q
--
diff --git a/ql/src/test/queries/clientpositive/sysdb.q 
b/ql/src/test/queries/clientpositive/sysdb.q
index 1dfcbce..f291354 100644
--- a/ql/src/test/queries/clientpositive/sysdb.q
+++ b/ql/src/test/queries/clientpositive/sysdb.q
@@ -8,6 +8,8 @@ set 
hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 
 set hive.cbo.enable=false;
 
+-- SORT_QUERY_RESULTS
+
 create table src_buck (key int, value string) clustered by(value) into 2 
buckets;
 
 create table src_skew (key int) skewed by (key) on (1,2,3);
@@ -60,6 +62,9 @@ select role_name from roles order by role_name limit 5;
 
 select principal_name, grantor from role_map order by principal_name, grantor 
limit 5;
 
+explain vectorization detail
+select count(*) from sds;
+
 select count(*) from sds;
 
 se