hive git commit: HIVE-20555: HiveServer2: Preauthenticated subject for http transport is not retained for entire duration of http communication in some cases (Vaibhav Gumashta reviewed by Daniel Dai)

2018-09-21 Thread vgumashta
Repository: hive
Updated Branches:
  refs/heads/branch-3 ca5e241c2 -> d8c97cf28


HIVE-20555: HiveServer2: Preauthenticated subject for http transport is not 
retained for entire duration of http communication in some cases (Vaibhav 
Gumashta reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d8c97cf2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d8c97cf2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d8c97cf2

Branch: refs/heads/branch-3
Commit: d8c97cf2804e4f48c0ae8ae3df64d1c9e10cb28d
Parents: ca5e241
Author: Vaibhav Gumashta 
Authored: Fri Sep 21 16:27:15 2018 -0700
Committer: Vaibhav Gumashta 
Committed: Fri Sep 21 16:33:06 2018 -0700

--
 .../org/apache/hive/jdbc/HiveConnection.java| 19 +---
 .../jdbc/HttpKerberosRequestInterceptor.java| 23 +--
 .../apache/hive/service/auth/HttpAuthUtils.java | 24 +---
 3 files changed, 33 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d8c97cf2/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 14939cb..a4920bf 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -70,6 +70,7 @@ import org.slf4j.LoggerFactory;
 import javax.net.ssl.KeyManagerFactory;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.TrustManagerFactory;
+import javax.security.auth.Subject;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import java.io.BufferedReader;
@@ -81,6 +82,8 @@ import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
+import java.security.AccessControlContext;
+import java.security.AccessController;
 import java.security.KeyStore;
 import java.security.SecureRandom;
 import java.sql.Array;
@@ -140,6 +143,7 @@ public class HiveConnection implements java.sql.Connection {
   private String initFile = null;
   private String wmPool = null, wmApp = null;
   private Properties clientInfo;
+  private Subject loggedInSubject;
 
   /**
* Get all direct HiveServer2 URLs from a ZooKeeper based HiveServer2 URL
@@ -405,15 +409,24 @@ public class HiveConnection implements 
java.sql.Connection {
 }
 // Configure http client for kerberos/password based authentication
 if (isKerberosAuthMode()) {
+  if (assumeSubject) {
+// With this option, we're assuming that the external application,
+// using the JDBC driver has done a JAAS kerberos login already
+AccessControlContext context = AccessController.getContext();
+loggedInSubject = Subject.getSubject(context);
+if (loggedInSubject == null) {
+  throw new SQLException("The Subject is not set");
+}
+  }
   /**
* Add an interceptor which sets the appropriate header in the request.
* It does the kerberos authentication and get the final service ticket,
* for sending to the server before every request.
* In https mode, the entire information is encrypted
*/
-  requestInterceptor = new HttpKerberosRequestInterceptor(
-  sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, 
getServerHttpUrl(useSsl),
-  assumeSubject, cookieStore, cookieName, useSsl, 
additionalHttpHeaders, customCookies);
+  requestInterceptor = new 
HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL),
+  host, getServerHttpUrl(useSsl), loggedInSubject, cookieStore, 
cookieName, useSsl, additionalHttpHeaders,
+  customCookies);
 } else {
   // Check for delegation token, if present add it in the header
   String tokenStr = getClientDelegationToken(sessConfMap);

http://git-wip-us.apache.org/repos/asf/hive/blob/d8c97cf2/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
--
diff --git 
a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java 
b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
index 28d42d7..516825f 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
@@ -21,6 +21,8 @@ package org.apache.hive.jdbc;
 import java.util.Map;
 import java.util.concurrent.locks.ReentrantLock;
 
+import javax.security.auth.Subject;
+
 import org.apache.hive.service.auth.HttpAuthUtils;
 import org.apache.htt

hive git commit: HIVE-20555: HiveServer2: Preauthenticated subject for http transport is not retained for entire duration of http communication in some cases (Vaibhav Gumashta reviewed by Daniel Dai)

2018-09-21 Thread vgumashta
Repository: hive
Updated Branches:
  refs/heads/branch-3.1 bcc7df958 -> 3560db30c


HIVE-20555: HiveServer2: Preauthenticated subject for http transport is not 
retained for entire duration of http communication in some cases (Vaibhav 
Gumashta reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3560db30
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3560db30
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3560db30

Branch: refs/heads/branch-3.1
Commit: 3560db30c461215ecedf19fd7f6e38fcbc85cec3
Parents: bcc7df9
Author: Vaibhav Gumashta 
Authored: Fri Sep 21 16:27:15 2018 -0700
Committer: Vaibhav Gumashta 
Committed: Fri Sep 21 16:33:52 2018 -0700

--
 .../org/apache/hive/jdbc/HiveConnection.java| 19 +---
 .../jdbc/HttpKerberosRequestInterceptor.java| 23 +--
 .../apache/hive/service/auth/HttpAuthUtils.java | 24 +---
 3 files changed, 33 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3560db30/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 458158e..a654b05 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -70,6 +70,7 @@ import org.slf4j.LoggerFactory;
 import javax.net.ssl.KeyManagerFactory;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.TrustManagerFactory;
+import javax.security.auth.Subject;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import java.io.BufferedReader;
@@ -81,6 +82,8 @@ import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
+import java.security.AccessControlContext;
+import java.security.AccessController;
 import java.security.KeyStore;
 import java.security.SecureRandom;
 import java.sql.Array;
@@ -140,6 +143,7 @@ public class HiveConnection implements java.sql.Connection {
   private String initFile = null;
   private String wmPool = null, wmApp = null;
   private Properties clientInfo;
+  private Subject loggedInSubject;
 
   /**
* Get all direct HiveServer2 URLs from a ZooKeeper based HiveServer2 URL
@@ -397,15 +401,24 @@ public class HiveConnection implements 
java.sql.Connection {
 }
 // Configure http client for kerberos/password based authentication
 if (isKerberosAuthMode()) {
+  if (assumeSubject) {
+// With this option, we're assuming that the external application,
+// using the JDBC driver has done a JAAS kerberos login already
+AccessControlContext context = AccessController.getContext();
+loggedInSubject = Subject.getSubject(context);
+if (loggedInSubject == null) {
+  throw new SQLException("The Subject is not set");
+}
+  }
   /**
* Add an interceptor which sets the appropriate header in the request.
* It does the kerberos authentication and get the final service ticket,
* for sending to the server before every request.
* In https mode, the entire information is encrypted
*/
-  requestInterceptor = new HttpKerberosRequestInterceptor(
-  sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, 
getServerHttpUrl(useSsl),
-  assumeSubject, cookieStore, cookieName, useSsl, 
additionalHttpHeaders, customCookies);
+  requestInterceptor = new 
HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL),
+  host, getServerHttpUrl(useSsl), loggedInSubject, cookieStore, 
cookieName, useSsl, additionalHttpHeaders,
+  customCookies);
 } else {
   // Check for delegation token, if present add it in the header
   String tokenStr = getClientDelegationToken(sessConfMap);

http://git-wip-us.apache.org/repos/asf/hive/blob/3560db30/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
--
diff --git 
a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java 
b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
index 28d42d7..516825f 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
@@ -21,6 +21,8 @@ package org.apache.hive.jdbc;
 import java.util.Map;
 import java.util.concurrent.locks.ReentrantLock;
 
+import javax.security.auth.Subject;
+
 import org.apache.hive.service.auth.HttpAuthUtils;
 import org.apache

hive git commit: HIVE-20555: HiveServer2: Preauthenticated subject for http transport is not retained for entire duration of http communication in some cases (Vaibhav Gumashta reviewed by Daniel Dai)

2018-09-21 Thread vgumashta
Repository: hive
Updated Branches:
  refs/heads/master cfdb433bc -> cdba00c96


HIVE-20555: HiveServer2: Preauthenticated subject for http transport is not 
retained for entire duration of http communication in some cases (Vaibhav 
Gumashta reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cdba00c9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cdba00c9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cdba00c9

Branch: refs/heads/master
Commit: cdba00c96fd86c4f9c28dbaa411727f1666d26cb
Parents: cfdb433
Author: Vaibhav Gumashta 
Authored: Fri Sep 21 16:27:15 2018 -0700
Committer: Vaibhav Gumashta 
Committed: Fri Sep 21 16:32:35 2018 -0700

--
 .../org/apache/hive/jdbc/HiveConnection.java| 19 +---
 .../jdbc/HttpKerberosRequestInterceptor.java| 23 +--
 .../apache/hive/service/auth/HttpAuthUtils.java | 24 +---
 3 files changed, 33 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cdba00c9/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 335995c..8d5aa70 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -70,6 +70,7 @@ import org.slf4j.LoggerFactory;
 import javax.net.ssl.KeyManagerFactory;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.TrustManagerFactory;
+import javax.security.auth.Subject;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import java.io.BufferedReader;
@@ -81,6 +82,8 @@ import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
+import java.security.AccessControlContext;
+import java.security.AccessController;
 import java.security.KeyStore;
 import java.security.SecureRandom;
 import java.sql.Array;
@@ -140,6 +143,7 @@ public class HiveConnection implements java.sql.Connection {
   private String initFile = null;
   private String wmPool = null, wmApp = null;
   private Properties clientInfo;
+  private Subject loggedInSubject;
 
   /**
* Get all direct HiveServer2 URLs from a ZooKeeper based HiveServer2 URL
@@ -478,15 +482,24 @@ public class HiveConnection implements 
java.sql.Connection {
 }
 // Configure http client for kerberos/password based authentication
 if (isKerberosAuthMode()) {
+  if (assumeSubject) {
+// With this option, we're assuming that the external application,
+// using the JDBC driver has done a JAAS kerberos login already
+AccessControlContext context = AccessController.getContext();
+loggedInSubject = Subject.getSubject(context);
+if (loggedInSubject == null) {
+  throw new SQLException("The Subject is not set");
+}
+  }
   /**
* Add an interceptor which sets the appropriate header in the request.
* It does the kerberos authentication and get the final service ticket,
* for sending to the server before every request.
* In https mode, the entire information is encrypted
*/
-  requestInterceptor = new HttpKerberosRequestInterceptor(
-  sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, 
getServerHttpUrl(useSsl),
-  assumeSubject, cookieStore, cookieName, useSsl, 
additionalHttpHeaders, customCookies);
+  requestInterceptor = new 
HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL),
+  host, getServerHttpUrl(useSsl), loggedInSubject, cookieStore, 
cookieName, useSsl, additionalHttpHeaders,
+  customCookies);
 } else {
   // Check for delegation token, if present add it in the header
   String tokenStr = getClientDelegationToken(sessConfMap);

http://git-wip-us.apache.org/repos/asf/hive/blob/cdba00c9/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
--
diff --git 
a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java 
b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
index 28d42d7..516825f 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
@@ -21,6 +21,8 @@ package org.apache.hive.jdbc;
 import java.util.Map;
 import java.util.concurrent.locks.ReentrantLock;
 
+import javax.security.auth.Subject;
+
 import org.apache.hive.service.auth.HttpAuthUtils;
 import org.apache.http.Ht

hive git commit: HIVE-20444: Parameter is not properly quoted in DbNotificationListener.addWriteNotificationLog (Daniel Dai, reviewed by mahesh kumar behera, Sankar Hariappan)

2018-09-21 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-3 6a3d42b3b -> ca5e241c2


HIVE-20444: Parameter is not properly quoted in 
DbNotificationListener.addWriteNotificationLog (Daniel Dai, reviewed by mahesh 
kumar behera, Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ca5e241c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ca5e241c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ca5e241c

Branch: refs/heads/branch-3
Commit: ca5e241c22aa77755d00624248e1f0bd9f12ef1e
Parents: 6a3d42b
Author: Daniel Dai 
Authored: Fri Sep 21 14:44:19 2018 -0700
Committer: Daniel Dai 
Committed: Fri Sep 21 14:44:19 2018 -0700

--
 .../listener/DbNotificationListener.java| 57 +---
 1 file changed, 39 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ca5e241c/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
--
diff --git 
a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
 
b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
index 2ab59d7..909ed56 100644
--- 
a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
+++ 
b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
@@ -19,6 +19,7 @@ package org.apache.hive.hcatalog.listener;
 
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -728,7 +729,8 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
 LOG.debug("DbNotificationListener: adding write notification log for : 
{}", event.getMessage());
 assert ((dbConn != null) && (sqlGenerator != null));
 
-Statement stmt =null;
+Statement stmt = null;
+PreparedStatement pst = null;
 ResultSet rs = null;
 String dbName = acidWriteEvent.getDatabase();
 String tblName = acidWriteEvent.getTable();
@@ -754,16 +756,26 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
 // if rs is empty then no lock is taken and thus it can not cause 
deadlock.
 long nextNLId = getNextNLId(stmt, sqlGenerator,
 
"org.apache.hadoop.hive.metastore.model.MTxnWriteNotificationLog");
-s = "insert into \"TXN_WRITE_NOTIFICATION_LOG\" (\"WNL_ID\", 
\"WNL_TXNID\", \"WNL_WRITEID\"," +
-" \"WNL_DATABASE\", \"WNL_TABLE\"," +
-" \"WNL_PARTITION\", \"WNL_TABLE_OBJ\", \"WNL_PARTITION_OBJ\", 
\"WNL_FILES\", \"WNL_EVENT_TIME\")" +
-" values (" + nextNLId
-+ "," + acidWriteEvent.getTxnId() +  "," + 
acidWriteEvent.getWriteId()+  "," +
-quoteString(dbName)+  "," +  quoteString(tblName)+  "," + 
quoteString(partition)+  "," +
-quoteString(tableObj)+  "," + quoteString(partitionObj) +  "," 
+  quoteString(files)+
-"," +  now() + ")";
-LOG.info("Going to execute insert <" + s + ">");
-stmt.execute(sqlGenerator.addEscapeCharacters(s));
+s = "insert into \"TXN_WRITE_NOTIFICATION_LOG\" " +
+"(\"WNL_ID\", \"WNL_TXNID\", \"WNL_WRITEID\", 
\"WNL_DATABASE\", \"WNL_TABLE\", " +
+"\"WNL_PARTITION\", \"WNL_TABLE_OBJ\", \"WNL_PARTITION_OBJ\", 
" +
+"\"WNL_FILES\", \"WNL_EVENT_TIME\") VALUES 
(?,?,?,?,?,?,?,?,?,?)";
+int currentTime = now();
+pst = dbConn.prepareStatement(sqlGenerator.addEscapeCharacters(s));
+pst.setLong(1, nextNLId);
+pst.setLong(2, acidWriteEvent.getTxnId());
+pst.setLong(3, acidWriteEvent.getWriteId());
+pst.setString(4, dbName);
+pst.setString(5, tblName);
+pst.setString(6, partition);
+pst.setString(7, tableObj);
+pst.setString(8, partitionObj);
+pst.setString(9, files);
+pst.setInt(10, currentTime);
+LOG.info("Going to execute insert <" + s.replaceAll("\\?", "{}") + 
">", nextNLId
+, acidWriteEvent.getTxnId(), acidWriteEvent.getWriteId(), 
quoteString(dbName), quoteString(tblName),
+quoteString(partition), quoteString(tableObj), 
quoteString(partitionObj), quoteString(files), currentTime);
+pst.execute();
   } else {
 String existingFiles = rs.getString(1);
 if (existingFiles.contains(sqlGenerator.addEscapeCharacters(files))) {
@@ -773,20 +785,29 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
   re

hive git commit: HIVE-20444: Parameter is not properly quoted in DbNotificationListener.addWriteNotificationLog (Daniel Dai, reviewed by mahesh kumar behera, Sankar Hariappan)

2018-09-21 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master f404b0ddf -> cfdb433bc


HIVE-20444: Parameter is not properly quoted in 
DbNotificationListener.addWriteNotificationLog (Daniel Dai, reviewed by mahesh 
kumar behera, Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cfdb433b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cfdb433b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cfdb433b

Branch: refs/heads/master
Commit: cfdb433bc1e306161996be10636da8854d682d72
Parents: f404b0d
Author: Daniel Dai 
Authored: Fri Sep 21 14:43:47 2018 -0700
Committer: Daniel Dai 
Committed: Fri Sep 21 14:43:47 2018 -0700

--
 .../listener/DbNotificationListener.java| 57 +---
 1 file changed, 39 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cfdb433b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
--
diff --git 
a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
 
b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
index 4f8f1ab..369d9a4 100644
--- 
a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
+++ 
b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
@@ -19,6 +19,7 @@ package org.apache.hive.hcatalog.listener;
 
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -729,7 +730,8 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
 LOG.debug("DbNotificationListener: adding write notification log for : 
{}", event.getMessage());
 assert ((dbConn != null) && (sqlGenerator != null));
 
-Statement stmt =null;
+Statement stmt = null;
+PreparedStatement pst = null;
 ResultSet rs = null;
 String dbName = acidWriteEvent.getDatabase();
 String tblName = acidWriteEvent.getTable();
@@ -755,16 +757,26 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
 // if rs is empty then no lock is taken and thus it can not cause 
deadlock.
 long nextNLId = getNextNLId(stmt, sqlGenerator,
 
"org.apache.hadoop.hive.metastore.model.MTxnWriteNotificationLog");
-s = "insert into \"TXN_WRITE_NOTIFICATION_LOG\" (\"WNL_ID\", 
\"WNL_TXNID\", \"WNL_WRITEID\"," +
-" \"WNL_DATABASE\", \"WNL_TABLE\"," +
-" \"WNL_PARTITION\", \"WNL_TABLE_OBJ\", \"WNL_PARTITION_OBJ\", 
\"WNL_FILES\", \"WNL_EVENT_TIME\")" +
-" values (" + nextNLId
-+ "," + acidWriteEvent.getTxnId() +  "," + 
acidWriteEvent.getWriteId()+  "," +
-quoteString(dbName)+  "," +  quoteString(tblName)+  "," + 
quoteString(partition)+  "," +
-quoteString(tableObj)+  "," + quoteString(partitionObj) +  "," 
+  quoteString(files)+
-"," +  now() + ")";
-LOG.info("Going to execute insert <" + s + ">");
-stmt.execute(sqlGenerator.addEscapeCharacters(s));
+s = "insert into \"TXN_WRITE_NOTIFICATION_LOG\" " +
+"(\"WNL_ID\", \"WNL_TXNID\", \"WNL_WRITEID\", 
\"WNL_DATABASE\", \"WNL_TABLE\", " +
+"\"WNL_PARTITION\", \"WNL_TABLE_OBJ\", \"WNL_PARTITION_OBJ\", 
" +
+"\"WNL_FILES\", \"WNL_EVENT_TIME\") VALUES 
(?,?,?,?,?,?,?,?,?,?)";
+int currentTime = now();
+pst = dbConn.prepareStatement(sqlGenerator.addEscapeCharacters(s));
+pst.setLong(1, nextNLId);
+pst.setLong(2, acidWriteEvent.getTxnId());
+pst.setLong(3, acidWriteEvent.getWriteId());
+pst.setString(4, dbName);
+pst.setString(5, tblName);
+pst.setString(6, partition);
+pst.setString(7, tableObj);
+pst.setString(8, partitionObj);
+pst.setString(9, files);
+pst.setInt(10, currentTime);
+LOG.info("Going to execute insert <" + s.replaceAll("\\?", "{}") + 
">", nextNLId
+, acidWriteEvent.getTxnId(), acidWriteEvent.getWriteId(), 
quoteString(dbName), quoteString(tblName),
+quoteString(partition), quoteString(tableObj), 
quoteString(partitionObj), quoteString(files), currentTime);
+pst.execute();
   } else {
 String existingFiles = rs.getString(1);
 if (existingFiles.contains(sqlGenerator.addEscapeCharacters(files))) {
@@ -774,20 +786,29 @@ public class DbNotificationListener extends 
TransactionalMetaStoreEventListener
   return

hive git commit: HIVE-20598: Fix typos in HiveAlgorithmsUtil calculations (Zoltan Haindrich reviewed by Ashutosh Chauhan)

2018-09-21 Thread kgyrtkirk
Repository: hive
Updated Branches:
  refs/heads/master bd453b8e1 -> f404b0ddf


HIVE-20598: Fix typos in HiveAlgorithmsUtil calculations (Zoltan Haindrich 
reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f404b0dd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f404b0dd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f404b0dd

Branch: refs/heads/master
Commit: f404b0ddf9b0432429177351818da7649edf48bd
Parents: bd453b8
Author: Zoltan Haindrich 
Authored: Fri Sep 21 17:28:21 2018 +0200
Committer: Zoltan Haindrich 
Committed: Fri Sep 21 17:28:21 2018 +0200

--
 .../calcite/cost/HiveAlgorithmsUtil.java| 30 ++--
 .../calcite/cost/HiveOnTezCostModel.java| 12 
 2 files changed, 21 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f404b0dd/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
index 8b87ae4..579a2e7 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
@@ -39,7 +39,7 @@ import com.google.common.collect.ImmutableList;
 
 public class HiveAlgorithmsUtil {
 
-  private final double cpuCost;
+  private final double cpuUnitCost;
   private final double netCost;
   private final double localFSWrite;
   private final double localFSRead;
@@ -47,8 +47,8 @@ public class HiveAlgorithmsUtil {
   private final double hdfsRead;
 
   HiveAlgorithmsUtil(HiveConf conf) {
-cpuCost = Double.parseDouble(HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_CBO_COST_MODEL_CPU));
-netCost = cpuCost
+cpuUnitCost = Double.parseDouble(HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_CBO_COST_MODEL_CPU));
+netCost = cpuUnitCost
 * Double.parseDouble(HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_CBO_COST_MODEL_NET));
 localFSWrite = netCost
 * Double.parseDouble(HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_CBO_COST_MODEL_LFS_WRITE));
@@ -80,13 +80,13 @@ public class HiveAlgorithmsUtil {
 cpuCost += computeSortCPUCost(cardinality);
   }
   // Merge cost
-  cpuCost += cardinality * cpuCost;
+  cpuCost += cardinality * cpuUnitCost;
 }
 return cpuCost;
   }
 
   public double computeSortCPUCost(Double cardinality) {
-return cardinality * Math.log(cardinality) * cpuCost;
+return cardinality * Math.log(cardinality) * cpuUnitCost;
   }
 
   public double computeSortMergeIOCost(
@@ -113,7 +113,7 @@ public class HiveAlgorithmsUtil {
 return ioCost;
   }
 
-  public static double computeMapJoinCPUCost(
+  public double computeMapJoinCPUCost(
   ImmutableList cardinalities,
   ImmutableBitSet streaming) {
 // Hash-join
@@ -123,7 +123,7 @@ public class HiveAlgorithmsUtil {
   if (!streaming.get(i)) {
 cpuCost += cardinality;
   }
-  cpuCost += cardinality * cpuCost;
+  cpuCost += cardinality * cpuUnitCost;
 }
 return cpuCost;
   }
@@ -151,9 +151,9 @@ public class HiveAlgorithmsUtil {
 for (int i=0; i cardinalities) {
 // Hash-join
 double cpuCost = 0.0;
 for (int i=0; ihttp://git-wip-us.apache.org/repos/asf/hive/blob/f404b0dd/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java
index 1fc9ea4..b451b13 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java
@@ -23,22 +23,22 @@ import java.util.List;
 import org.apache.calcite.plan.RelOptCost;
 import org.apache.calcite.rel.RelCollation;
 import org.apache.calcite.rel.RelDistribution;
-import org.apache.calcite.rel.RelDistributions;
 import org.apache.calcite.rel.RelDistribution.Type;
+import org.apache.calcite.rel.RelDistributions;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.metadata.RelMetadataQuery;
 import org.apache.calcite.util.ImmutableBitSet;
 import org.apache.calcite.util.ImmutableIntList;
 import org.apache.calcite.util.Pair;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;