hive git commit: HIVE-16188: beeline should block the connection if given invalid database name. (Sahil Takiar, reviewed by Sergio Pena, Vihang Karajgaonkar, Pavas Garg)

2017-03-27 Thread spena
Repository: hive
Updated Branches:
  refs/heads/branch-2 982606e96 -> 0273823fb


HIVE-16188: beeline should block the connection if given invalid database name. 
(Sahil Takiar, reviewed by Sergio Pena, Vihang Karajgaonkar, Pavas Garg)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0273823f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0273823f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0273823f

Branch: refs/heads/branch-2
Commit: 0273823fb1f0e3e5a453bf25968f2595a9dde37f
Parents: 982606e
Author: Sahil Takiar 
Authored: Mon Mar 27 15:58:11 2017 -0700
Committer: Sergio Pena 
Committed: Mon Mar 27 17:00:35 2017 -0700

--
 .../test/java/org/apache/hive/jdbc/TestJdbcDriver2.java |  9 -
 .../java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java  | 12 
 .../hive/service/cli/session/HiveSessionImpl.java   |  7 +++
 3 files changed, 27 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0273823f/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 35aad6d..6e9223a 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
 import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
+import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
 import 
org.apache.hive.service.cli.operation.ClassicTableTypeMapping.ClassicTableTypes;
 import org.apache.hive.service.cli.operation.HiveTableTypeMapping;
@@ -577,7 +578,7 @@ public class TestJdbcDriver2 {
 
   @Test
   public void testSetOnConnection() throws Exception {
-Connection connection = 
getConnection("test?conf1=conf2;conf3=conf4#var1=var2;var3=var4");
+Connection connection = getConnection(testDbName + 
"?conf1=conf2;conf3=conf4#var1=var2;var3=var4");
 try {
   verifyConfValue(connection, "conf1", "conf2");
   verifyConfValue(connection, "conf3", "conf4");
@@ -2922,4 +2923,10 @@ public class TestJdbcDriver2 {
 assertEquals(rowCount, dataFileRowCount);
 stmt.execute("drop table " + tblName);
   }
+
+  // Test that opening a JDBC connection to a non-existent database throws a 
HiveSQLException
+  @Test(expected = HiveSQLException.class)
+  public void testConnectInvalidDatabase() throws SQLException {
+DriverManager.getConnection("jdbc:hive2:///databasedoesnotexist", "", "");
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/0273823f/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
index 3780b4e..fc2cb08 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -112,6 +112,14 @@ public class TestJdbcWithMiniHS2 {
 stmt.execute("drop database if exists " + testDbName + " cascade");
 stmt.execute("create database " + testDbName);
 stmt.close();
+
+try {
+  openTestConnections();
+} catch (Exception e) {
+  System.out.println("Unable to open default connections to MiniHS2: " + 
e);
+  throw e;
+}
+
 // tables in test db
 createTestTables(conTestDb, testDbName);
   }
@@ -183,6 +191,7 @@ public class TestJdbcWithMiniHS2 {
 HiveConf conf = new HiveConf();
 startMiniHS2(conf);
 openDefaultConnections();
+openTestConnections();
   }
 
   private static void startMiniHS2(HiveConf conf) throws Exception {
@@ -208,6 +217,9 @@ public class TestJdbcWithMiniHS2 {
 
   private static void openDefaultConnections() throws Exception {
 conDefault = getConnection();
+  }
+
+  private static void openTestConnections() throws Exception {
 conTestDb = getConnection(testDbName);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/0273823f/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java 
b/service/src/java/org/apache/hive/service/cli/session/Hive

hive git commit: HIVE-16188: beeline should block the connection if given invalid database name. (Sahil Takiar, reviewed by Sergio Pena, Vihang Karajgaonkar, Pavas Garg)

2017-03-27 Thread spena
Repository: hive
Updated Branches:
  refs/heads/master 84f4e3a3b -> 267633a24


HIVE-16188: beeline should block the connection if given invalid database name. 
(Sahil Takiar, reviewed by Sergio Pena, Vihang Karajgaonkar, Pavas Garg)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/267633a2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/267633a2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/267633a2

Branch: refs/heads/master
Commit: 267633a24461af41915e1aa0657c22beafdad84f
Parents: 84f4e3a
Author: Sahil Takiar 
Authored: Mon Mar 27 15:58:11 2017 -0700
Committer: Sergio Pena 
Committed: Mon Mar 27 16:04:28 2017 -0700

--
 .../test/java/org/apache/hive/jdbc/TestJdbcDriver2.java |  9 -
 .../java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java  | 12 
 .../hive/service/cli/session/HiveSessionImpl.java   |  7 +++
 3 files changed, 27 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/267633a2/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 35aad6d..6e9223a 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
 import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
+import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
 import 
org.apache.hive.service.cli.operation.ClassicTableTypeMapping.ClassicTableTypes;
 import org.apache.hive.service.cli.operation.HiveTableTypeMapping;
@@ -577,7 +578,7 @@ public class TestJdbcDriver2 {
 
   @Test
   public void testSetOnConnection() throws Exception {
-Connection connection = 
getConnection("test?conf1=conf2;conf3=conf4#var1=var2;var3=var4");
+Connection connection = getConnection(testDbName + 
"?conf1=conf2;conf3=conf4#var1=var2;var3=var4");
 try {
   verifyConfValue(connection, "conf1", "conf2");
   verifyConfValue(connection, "conf3", "conf4");
@@ -2922,4 +2923,10 @@ public class TestJdbcDriver2 {
 assertEquals(rowCount, dataFileRowCount);
 stmt.execute("drop table " + tblName);
   }
+
+  // Test that opening a JDBC connection to a non-existent database throws a 
HiveSQLException
+  @Test(expected = HiveSQLException.class)
+  public void testConnectInvalidDatabase() throws SQLException {
+DriverManager.getConnection("jdbc:hive2:///databasedoesnotexist", "", "");
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/267633a2/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
index 3780b4e..fc2cb08 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -112,6 +112,14 @@ public class TestJdbcWithMiniHS2 {
 stmt.execute("drop database if exists " + testDbName + " cascade");
 stmt.execute("create database " + testDbName);
 stmt.close();
+
+try {
+  openTestConnections();
+} catch (Exception e) {
+  System.out.println("Unable to open default connections to MiniHS2: " + 
e);
+  throw e;
+}
+
 // tables in test db
 createTestTables(conTestDb, testDbName);
   }
@@ -183,6 +191,7 @@ public class TestJdbcWithMiniHS2 {
 HiveConf conf = new HiveConf();
 startMiniHS2(conf);
 openDefaultConnections();
+openTestConnections();
   }
 
   private static void startMiniHS2(HiveConf conf) throws Exception {
@@ -208,6 +217,9 @@ public class TestJdbcWithMiniHS2 {
 
   private static void openDefaultConnections() throws Exception {
 conDefault = getConnection();
+  }
+
+  private static void openTestConnections() throws Exception {
 conTestDb = getConnection(testDbName);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/267633a2/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java 
b/service/src/java/org/apache/hive/service/cli/session/HiveSess

hive git commit: HIVE-16219: metastore notification_log contains serialized message with non functional fields (Anishek Agarwal reviewed by Vaibhav Gumashta)

2017-03-27 Thread vgumashta
Repository: hive
Updated Branches:
  refs/heads/master 4d9810afb -> 84f4e3a3b


HIVE-16219: metastore notification_log contains serialized message with non 
functional fields (Anishek Agarwal reviewed by Vaibhav Gumashta)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/84f4e3a3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/84f4e3a3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/84f4e3a3

Branch: refs/heads/master
Commit: 84f4e3a3bf8ccc924fc6c260a47a30b6e4f1adcc
Parents: 4d9810a
Author: Vaibhav Gumashta 
Authored: Mon Mar 27 15:54:41 2017 -0700
Committer: Vaibhav Gumashta 
Committed: Mon Mar 27 15:57:20 2017 -0700

--
 metastore/pom.xml   |  6 ++
 .../metastore/messaging/PartitionFiles.java |  3 +
 .../messaging/json/JSONMessageDeserializer.java |  4 +
 .../json/JSONMessageDeserializerTest.java   | 89 
 pom.xml |  6 ++
 5 files changed, 108 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/84f4e3a3/metastore/pom.xml
--
diff --git a/metastore/pom.xml b/metastore/pom.xml
index 35752ff..ef908ca 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -255,6 +255,12 @@
   ${disruptor.version}
   test
 
+
+  org.skyscreamer
+  jsonassert
+  1.4.0
+  test
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hive/blob/84f4e3a3/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/PartitionFiles.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/PartitionFiles.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/PartitionFiles.java
index b10b8a8..4fd7f8c 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/PartitionFiles.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/PartitionFiles.java
@@ -22,10 +22,13 @@ import java.util.Iterator;
 import java.util.List;
 
 import com.google.common.collect.Lists;
+import org.codehaus.jackson.annotate.JsonProperty;
 
 public class PartitionFiles {
 
+  @JsonProperty
   private String partitionName;
+  @JsonProperty
   private List files;
 
   public PartitionFiles(String partitionName, Iterator files) {

http://git-wip-us.apache.org/repos/asf/hive/blob/84f4e3a3/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
index 41732c7..40ef5fb 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
@@ -36,6 +36,7 @@ import 
org.apache.hadoop.hive.metastore.messaging.InsertMessage;
 import org.apache.hadoop.hive.metastore.messaging.MessageDeserializer;
 import org.codehaus.jackson.map.DeserializationConfig;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.SerializationConfig;
 
 /**
  * MessageDeserializer implementation, for deserializing from JSON strings.
@@ -46,6 +47,9 @@ public class JSONMessageDeserializer extends 
MessageDeserializer {
 
   static {
 mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, 
false);
+mapper.configure(SerializationConfig.Feature.AUTO_DETECT_GETTERS, false);
+mapper.configure(SerializationConfig.Feature.AUTO_DETECT_IS_GETTERS, 
false);
+mapper.configure(SerializationConfig.Feature.AUTO_DETECT_FIELDS, false);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/84f4e3a3/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java
--
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java
 
b/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java
new file mode 100644
index 000..b7c6304
--- /dev/null
+++ 
b/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java
@@ -0,0 +1,89 @@
+package org.apache.hadoop.hive.metastore.messaging.json;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+import org.json.JSONException;
+import org.junit.Test;
+import org.skyscreamer.jsonassert.JSONAssert;
+
+import java.io.IOException;
+import java.util.HashM

hive git commit: HIVE-16292: SparkUtilities upload to HDFS doesn't work with viewfs (Jimmy Xiang, reviewed by Rui Li)

2017-03-27 Thread jxiang
Repository: hive
Updated Branches:
  refs/heads/master 94ed34fa4 -> 4d9810afb


HIVE-16292: SparkUtilities upload to HDFS doesn't work with viewfs (Jimmy 
Xiang, reviewed by Rui Li)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4d9810af
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4d9810af
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4d9810af

Branch: refs/heads/master
Commit: 4d9810afba996eadb506864ef0bd0d5e3617186d
Parents: 94ed34f
Author: Jimmy Xiang 
Authored: Fri Mar 24 09:24:59 2017 -0700
Committer: Jimmy Xiang 
Committed: Mon Mar 27 09:31:32 2017 -0700

--
 .../org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java | 4 ++--
 .../java/org/apache/hive/spark/client/SparkClientUtilities.java  | 3 ++-
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4d9810af/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java
index 7d18c0a..eb9883a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java
@@ -78,7 +78,7 @@ public class SparkUtilities {
 Path localFile = new Path(source.getPath());
 Path remoteFile = new 
Path(SessionState.get().getSparkSession().getHDFSSessionDir(),
 getFileName(source));
-FileSystem fileSystem = FileSystem.get(conf);
+FileSystem fileSystem = FileSystem.get(remoteFile.toUri(), conf);
 // Overwrite if the remote file already exists. Whether the file can be 
added
 // on executor is up to spark, i.e. spark.files.overwrite
 fileSystem.copyFromLocalFile(false, true, localFile, remoteFile);
@@ -92,7 +92,7 @@ public class SparkUtilities {
 String deployMode = sparkConf.contains("spark.submit.deployMode") ?
 sparkConf.get("spark.submit.deployMode") : null;
 return SparkClientUtilities.isYarnClusterMode(master, deployMode) &&
-!source.getScheme().equals("hdfs");
+!(source.getScheme().equals("hdfs") || 
source.getScheme().equals("viewfs"));
   }
 
   private static String getFileName(URI uri) {

http://git-wip-us.apache.org/repos/asf/hive/blob/4d9810af/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
--
diff --git 
a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
 
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
index 9ef3f38..210da2a 100644
--- 
a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
+++ 
b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java
@@ -83,7 +83,8 @@ public class SparkClientUtilities {
 try {
   if (StringUtils.indexOf(path, "file:/") == 0) {
 url = new URL(path);
-  } else if (StringUtils.indexOf(path, "hdfs:/") == 0) {
+  } else if (StringUtils.indexOf(path, "hdfs:/") == 0
+  || StringUtils.indexOf(path, "viewfs:/") == 0) {
 Path remoteFile = new Path(path);
 Path localFile =
 new Path(localTmpDir.getAbsolutePath() + File.separator + 
remoteFile.getName());