hive git commit: HIVE-17456: Set current database for external LLAP interface (Jason Dere, reviewed by Sergey Shelukhin)

2017-09-07 Thread jdere
Repository: hive
Updated Branches:
  refs/heads/master 5663b9717 -> 1e3e74e54


HIVE-17456: Set current database for external LLAP interface (Jason Dere, 
reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1e3e74e5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1e3e74e5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1e3e74e5

Branch: refs/heads/master
Commit: 1e3e74e54d25921f96d1a0e0eae04d4aeb5a40bd
Parents: 5663b97
Author: Jason Dere 
Authored: Thu Sep 7 11:18:19 2017 -0700
Committer: Jason Dere 
Committed: Thu Sep 7 11:18:19 2017 -0700

--
 .../apache/hive/jdbc/TestJdbcWithMiniLlap.java  | 22 +---
 .../hadoop/hive/llap/LlapBaseInputFormat.java   |  5 +
 2 files changed, 24 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1e3e74e5/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
index 68d2ddc..28fa7a5 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
@@ -156,8 +156,17 @@ public class TestJdbcWithMiniLlap {
   }
 
   private void createTestTable(String tableName) throws Exception {
+createTestTable(null, tableName);
+  }
+
+  private void createTestTable(String database, String tableName) throws 
Exception {
 Statement stmt = hs2Conn.createStatement();
 
+if (database != null) {
+  stmt.execute("CREATE DATABASE IF NOT EXISTS " + database);
+  stmt.execute("USE " + database);
+}
+
 // create table
 stmt.execute("DROP TABLE IF EXISTS " + tableName);
 stmt.execute("CREATE TABLE " + tableName
@@ -225,12 +234,12 @@ public class TestJdbcWithMiniLlap {
 
   @Test(timeout = 6)
   public void testNonAsciiStrings() throws Exception {
-createTestTable("testtab1");
+createTestTable("nonascii", "testtab_nonascii");
 
 RowCollector rowCollector = new RowCollector();
 String nonAscii = "À côté du garçon";
-String query = "select value, '" + nonAscii + "' from testtab1 where 
under_col=0";
-int rowCount = processQuery(query, 1, rowCollector);
+String query = "select value, '" + nonAscii + "' from testtab_nonascii 
where under_col=0";
+int rowCount = processQuery("nonascii", query, 1, rowCollector);
 assertEquals(3, rowCount);
 
 assertArrayEquals(new String[] {"val_0", nonAscii}, 
rowCollector.rows.get(0));
@@ -474,6 +483,10 @@ public class TestJdbcWithMiniLlap {
   }
 
   private int processQuery(String query, int numSplits, RowProcessor 
rowProcessor) throws Exception {
+return processQuery(null, query, numSplits, rowProcessor);
+  }
+
+  private int processQuery(String currentDatabase, String query, int 
numSplits, RowProcessor rowProcessor) throws Exception {
 String url = miniHS2.getJdbcURL();
 String user = System.getProperty("user.name");
 String pwd = user;
@@ -488,6 +501,9 @@ public class TestJdbcWithMiniLlap {
 job.set(LlapBaseInputFormat.PWD_KEY, pwd);
 job.set(LlapBaseInputFormat.QUERY_KEY, query);
 job.set(LlapBaseInputFormat.HANDLE_ID, handleId);
+if (currentDatabase != null) {
+  job.set(LlapBaseInputFormat.DB_KEY, currentDatabase);
+}
 
 InputSplit[] splits = inputFormat.getSplits(job, numSplits);
 assertTrue(splits.length > 0);

http://git-wip-us.apache.org/repos/asf/hive/blob/1e3e74e5/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
--
diff --git 
a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java 
b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
index 215f5b1..1708df1 100644
--- 
a/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
+++ 
b/llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java
@@ -109,6 +109,7 @@ public class LlapBaseInputFormat>
   public static final String USER_KEY = "llap.if.user";
   public static final String PWD_KEY = "llap.if.pwd";
   public static final String HANDLE_ID = "llap.if.handleid";
+  public static final String DB_KEY = "llap.if.database";
 
   public final String SPLIT_QUERY = "select get_splits(\"%s\",%d)";
   public static final LlapServiceInstance[] serviceInstanceArray = new 
LlapServiceInstance[0];
@@ -210,6 +211,7 @@ public class LlapBaseInputFormat>
 if (query == null) query = 

hive git commit: HIVE-17421: Clear incorrect stats after replication (Daniel Dai, reviewed by Anishek Agarwal, Thejas Nair)

2017-09-07 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 1e3e74e54 -> c52aba1a6


HIVE-17421: Clear incorrect stats after replication (Daniel Dai, reviewed by 
Anishek Agarwal, Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c52aba1a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c52aba1a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c52aba1a

Branch: refs/heads/master
Commit: c52aba1a6bc7c983a8c5776723ce138a76b52064
Parents: 1e3e74e
Author: Daniel Dai 
Authored: Thu Sep 7 12:17:57 2017 -0700
Committer: Daniel Dai 
Committed: Thu Sep 7 12:17:57 2017 -0700

--
 .../hive/ql/parse/TestReplicationScenarios.java | 70 
 .../hive/ql/parse/ImportSemanticAnalyzer.java   |  5 ++
 2 files changed, 75 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c52aba1a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 9667449..6a2e400 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -141,6 +141,7 @@ public class TestReplicationScenarios {
 hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
 hconf.set(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname,
   
"org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore");
+hconf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true);
 System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
 System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
 
@@ -2852,6 +2853,75 @@ public class TestReplicationScenarios {
 }
   }
 
+  @Test
+  public void testRemoveStats() throws IOException {
+String name = testName.getMethodName();
+String dbName = createDB(name, driver);
+
+String[] unptn_data = new String[]{ "1" , "2" };
+String[] ptn_data_1 = new String[]{ "5", "7", "8"};
+String[] ptn_data_2 = new String[]{ "3", "2", "9"};
+
+String unptn_locn = new Path(TEST_PATH, name + "_unptn").toUri().getPath();
+String ptn_locn_1 = new Path(TEST_PATH, name + "_ptn1").toUri().getPath();
+String ptn_locn_2 = new Path(TEST_PATH, name + "_ptn2").toUri().getPath();
+
+createTestDataFile(unptn_locn, unptn_data);
+createTestDataFile(ptn_locn_1, ptn_data_1);
+createTestDataFile(ptn_locn_2, ptn_data_2);
+
+run("CREATE TABLE " + dbName + ".unptned(a int) STORED AS TEXTFILE", 
driver);
+run("LOAD DATA LOCAL INPATH '" + unptn_locn + "' OVERWRITE INTO TABLE " + 
dbName + ".unptned", driver);
+run("CREATE TABLE " + dbName + ".ptned(a int) partitioned by (b int) 
STORED AS TEXTFILE", driver);
+run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + 
dbName + ".ptned PARTITION(b=1)", driver);
+run("ANALYZE TABLE " + dbName + ".unptned COMPUTE STATISTICS FOR COLUMNS", 
driver);
+run("ANALYZE TABLE " + dbName + ".unptned COMPUTE STATISTICS", driver);
+run("ANALYZE TABLE " + dbName + ".ptned partition(b) COMPUTE STATISTICS 
FOR COLUMNS", driver);
+run("ANALYZE TABLE " + dbName + ".ptned partition(b) COMPUTE STATISTICS", 
driver);
+
+verifySetup("SELECT * from " + dbName + ".unptned", unptn_data, driver);
+verifySetup("SELECT a from " + dbName + ".ptned WHERE b=1", ptn_data_1, 
driver);
+verifySetup("SELECT count(*) from " + dbName + ".unptned", new 
String[]{"2"}, driver);
+verifySetup("SELECT count(*) from " + dbName + ".ptned", new 
String[]{"3"}, driver);
+verifySetup("SELECT max(a) from " + dbName + ".unptned", new 
String[]{"2"}, driver);
+verifySetup("SELECT max(a) from " + dbName + ".ptned where b=1", new 
String[]{"8"}, driver);
+
+advanceDumpDir();
+run("REPL DUMP " + dbName, driver);
+String replDumpLocn = getResult(0,0,driver);
+String replDumpId = getResult(0,1,true,driver);
+LOG.info("Dumped to {} with id {}",replDumpLocn,replDumpId);
+run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'", 
driverMirror);
+
+verifyRun("SELECT count(*) from " + dbName + "_dupe.unptned", new 
String[]{"2"}, driverMirror);
+verifyRun("SELECT count(*) from " + dbName + "_dupe.ptned", new 
String[]{"3"}, driverMirror);
+verifyRun("SELECT max(a) from " + dbName + "_dupe.unptned", new 
String[]{"2"}, driverMirror);
+verifyRun("SELECT max(a) from " + dbName + "_dupe.ptned where b=1", new 

hive git commit: HIVE-17152 : Improve security of random generator for HS2 cookies (Tao Li via Thejas Nair)

2017-09-07 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/master 849fa02c9 -> bb4035b68


HIVE-17152 : Improve security of random generator for HS2 cookies (Tao Li via 
Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bb4035b6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bb4035b6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bb4035b6

Branch: refs/heads/master
Commit: bb4035b68f3e98e158466848ec0fd501f89137b6
Parents: 849fa02
Author: Tao LI 
Authored: Thu Sep 7 10:53:58 2017 -0700
Committer: Thejas M Nair 
Committed: Thu Sep 7 10:53:58 2017 -0700

--
 service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java | 4 ++--
 .../org/apache/hive/service/cli/thrift/ThriftHttpServlet.java| 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/bb4035b6/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
--
diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java 
b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
index f11c0e4a..8b5661a 100644
--- a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
+++ b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
@@ -21,11 +21,11 @@ package org.apache.hive.service.auth;
 import java.security.AccessControlContext;
 import java.security.AccessController;
 import java.security.PrivilegedExceptionAction;
+import java.security.SecureRandom;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
-import java.util.Random;
 import java.util.Set;
 import java.util.StringTokenizer;
 
@@ -96,7 +96,7 @@ public final class HttpAuthUtils {
 
sb.append(COOKIE_CLIENT_USER_NAME).append(COOKIE_KEY_VALUE_SEPARATOR).append(clientUserName).
 append(COOKIE_ATTR_SEPARATOR);
 sb.append(COOKIE_CLIENT_RAND_NUMBER).append(COOKIE_KEY_VALUE_SEPARATOR).
-append((new Random(System.currentTimeMillis())).nextLong());
+append((new SecureRandom()).nextLong());
 return sb.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/bb4035b6/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index cda736c..f3bbf8a 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -22,11 +22,11 @@ import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.net.InetAddress;
 import java.security.PrivilegedExceptionAction;
+import java.security.SecureRandom;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
@@ -84,7 +84,7 @@ public class ThriftHttpServlet extends TServlet {
   // Class members for cookie based authentication.
   private CookieSigner signer;
   public static final String AUTH_COOKIE = "hive.server2.auth";
-  private static final Random RAN = new Random();
+  private static final SecureRandom RAN = new SecureRandom();
   private boolean isCookieAuthEnabled;
   private String cookieDomain;
   private String cookiePath;



hive git commit: HIVE-17183 : Disable rename operations during bootstrap dump (Sankar Hariappan, reviewed by Anishek Agarwal, Thejas Nair)

2017-09-07 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/master bb4035b68 -> 5663b9717


HIVE-17183 : Disable rename operations during bootstrap dump (Sankar Hariappan, 
reviewed by Anishek Agarwal, Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5663b971
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5663b971
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5663b971

Branch: refs/heads/master
Commit: 5663b971776bd3e6a6e17426875f44313f6eff9f
Parents: bb4035b
Author: Sankar Hariappan 
Authored: Thu Sep 7 11:07:24 2017 -0700
Committer: Thejas M Nair 
Committed: Thu Sep 7 11:07:24 2017 -0700

--
 .../hive/ql/parse/TestReplicationScenarios.java | 75 
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 15 
 .../hadoop/hive/ql/exec/repl/ReplDumpTask.java  | 18 +++--
 .../apache/hadoop/hive/ql/parse/EximUtil.java   | 14 
 .../hadoop/hive/ql/parse/repl/dump/Utils.java   | 67 +
 5 files changed, 182 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5663b971/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index b19c1aa..9667449 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -551,6 +551,81 @@ public class TestReplicationScenarios {
   }
 
   @Test
+  public void testBootstrapWithConcurrentRename() throws IOException {
+String name = testName.getMethodName();
+String dbName = createDB(name, driver);
+String replDbName = dbName + "_dupe";
+run("CREATE TABLE " + dbName + ".ptned(a string) partitioned by (b int) 
STORED AS TEXTFILE", driver);
+
+String[] ptn_data = new String[]{ "eleven" , "twelve" };
+String[] empty = new String[]{};
+String ptn_locn = new Path(TEST_PATH, name + "_ptn").toUri().getPath();
+
+createTestDataFile(ptn_locn, ptn_data);
+run("LOAD DATA LOCAL INPATH '" + ptn_locn + "' OVERWRITE INTO TABLE " + 
dbName + ".ptned PARTITION(b=1)", driver);
+
+BehaviourInjection ptnedTableRenamer = new 
BehaviourInjection(){
+  boolean success = false;
+
+  @Nullable
+  @Override
+  public Table apply(@Nullable Table table) {
+if (injectionPathCalled) {
+  nonInjectedPathCalled = true;
+} else {
+  // getTable is invoked after fetching the table names
+  injectionPathCalled = true;
+  Thread t = new Thread(new Runnable() {
+public void run() {
+  try {
+LOG.info("Entered new thread");
+Driver driver2 = new Driver(hconf);
+SessionState.start(new CliSessionState(hconf));
+CommandProcessorResponse ret = driver2.run("ALTER TABLE " + 
dbName + ".ptned PARTITION (b=1) RENAME TO PARTITION (b=10)");
+success = (ret.getException() == null);
+assertFalse(success);
+ret = driver2.run("ALTER TABLE " + dbName + ".ptned RENAME TO 
" + dbName + ".ptned_renamed");
+success = (ret.getException() == null);
+assertFalse(success);
+LOG.info("Exit new thread success - {}", success);
+  } catch (CommandNeedRetryException e) {
+LOG.info("Hit Exception {} from new thread", e.getMessage());
+throw new RuntimeException(e);
+  }
+}
+  });
+  t.start();
+  LOG.info("Created new thread {}", t.getName());
+  try {
+t.join();
+  } catch (InterruptedException e) {
+throw new RuntimeException(e);
+  }
+}
+return table;
+  }
+};
+InjectableBehaviourObjectStore.setGetTableBehaviour(ptnedTableRenamer);
+
+// The intermediate rename would've failed as bootstrap dump in progress
+bootstrapLoadAndVerify(dbName, replDbName);
+
+ptnedTableRenamer.assertInjectionsPerformed(true,true);
+InjectableBehaviourObjectStore.resetGetTableBehaviour(); // reset the 
behaviour
+
+// The ptned table should be there in both source and target as rename was 
not successful
+verifyRun("SELECT a from " + dbName + ".ptned WHERE (b=1) ORDER BY a", 
ptn_data, driver);
+verifyRun("SELECT a from " + dbName + "_dupe.ptned WHERE (b=1) ORDER BY 
a", ptn_data, 

hive git commit: HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, reviewed by Aihua Xu)

2017-09-07 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master c52aba1a6 -> 8482c5fbe


HIVE-17429: Hive JDBC doesn't return rows when querying Impala (Zach Amsden, 
reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8482c5fb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8482c5fb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8482c5fb

Branch: refs/heads/master
Commit: 8482c5fbe9d2b9a62132e0e94d5578a5eaa22fbd
Parents: c52aba1
Author: Aihua Xu 
Authored: Thu Sep 7 17:17:14 2017 -0700
Committer: Aihua Xu 
Committed: Thu Sep 7 17:17:14 2017 -0700

--
 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8482c5fb/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
index b743b46..c6bd41f 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
@@ -263,7 +263,7 @@ public class HiveStatement implements java.sql.Statement {
 TGetOperationStatusResp status = waitForOperationToComplete();
 
 // The query should be completed by now
-if (!status.isHasResultSet()) {
+if (!status.isHasResultSet() && !stmtHandle.isHasResultSet()) {
   return false;
 }
 resultSet =  new 
HiveQueryResultSet.Builder(this).setClient(client).setSessionHandle(sessHandle)