[1/2] hive git commit: HIVE-16266 : Enable function metadata to be written during bootstrap (Anishek Agarwal, reviewed by Sushanth Sowmyan)

2017-04-03 Thread khorgath
Repository: hive
Updated Branches:
  refs/heads/master 9945b5d5d -> 2985262b8


http://git-wip-us.apache.org/repos/asf/hive/blob/2985262b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/EventHandlerFactory.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/EventHandlerFactory.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/EventHandlerFactory.java
new file mode 100644
index 000..53adea8
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/EventHandlerFactory.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.events;
+
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import org.apache.hadoop.hive.metastore.messaging.MessageFactory;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Modifier;
+import java.util.HashMap;
+import java.util.Map;
+
+public class EventHandlerFactory {
+  private EventHandlerFactory() {
+  }
+
+  private static Map registeredHandlers 
= new HashMap<>();
+
+  static {
+register(MessageFactory.ADD_PARTITION_EVENT, AddPartitionHandler.class);
+register(MessageFactory.ALTER_PARTITION_EVENT, 
AlterPartitionHandler.class);
+register(MessageFactory.ALTER_TABLE_EVENT, AlterTableHandler.class);
+register(MessageFactory.CREATE_TABLE_EVENT, CreateTableHandler.class);
+register(MessageFactory.DROP_PARTITION_EVENT, DropPartitionHandler.class);
+register(MessageFactory.DROP_TABLE_EVENT, DropTableHandler.class);
+register(MessageFactory.INSERT_EVENT, InsertHandler.class);
+  }
+
+  static void register(String event, Class 
handlerClazz) {
+try {
+  Constructor constructor =
+  handlerClazz.getDeclaredConstructor(NotificationEvent.class);
+  assert constructor != null;
+  assert !Modifier.isPrivate(constructor.getModifiers());
+  registeredHandlers.put(event, handlerClazz);
+} catch (NoSuchMethodException e) {
+  throw new IllegalArgumentException("handler class: " + 
handlerClazz.getCanonicalName()
+  + " does not have the a constructor with only parameter of type:"
+  + NotificationEvent.class.getCanonicalName(), e);
+}
+  }
+
+  public static EventHandler handlerFor(NotificationEvent event) {
+if (registeredHandlers.containsKey(event.getEventType())) {
+  Class handlerClazz = 
registeredHandlers.get(event.getEventType());
+  try {
+Constructor constructor =
+handlerClazz.getDeclaredConstructor(NotificationEvent.class);
+return constructor.newInstance(event);
+  } catch (NoSuchMethodException | IllegalAccessException | 
InstantiationException | InvocationTargetException e) {
+// this should never happen. however we want to make sure we propagate 
the exception
+throw new RuntimeException(
+"failed when creating handler for " + event.getEventType()
++ " with the responsible class being " + 
handlerClazz.getCanonicalName(), e);
+  }
+}
+return new DefaultHandler(event);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/2985262b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/InsertHandler.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/InsertHandler.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/InsertHandler.java
new file mode 100644
index 000..1346276
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/events/InsertHandler.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * 

[2/2] hive git commit: HIVE-16266 : Enable function metadata to be written during bootstrap (Anishek Agarwal, reviewed by Sushanth Sowmyan)

2017-04-03 Thread khorgath
HIVE-16266 : Enable function metadata to be written during bootstrap (Anishek 
Agarwal, reviewed by Sushanth Sowmyan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2985262b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2985262b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2985262b

Branch: refs/heads/master
Commit: 2985262b895cb00b22935d0ff660a6003082b631
Parents: 9945b5d
Author: Sushanth Sowmyan 
Authored: Mon Apr 3 15:30:58 2017 -0700
Committer: Sushanth Sowmyan 
Committed: Mon Apr 3 15:32:19 2017 -0700

--
 .../hive/ql/TestReplicationScenarios.java   |   8 +-
 .../apache/hadoop/hive/ql/parse/EximUtil.java   | 104 +-
 .../hive/ql/parse/FunctionSemanticAnalyzer.java |   8 +-
 .../ql/parse/ReplicationSemanticAnalyzer.java   | 355 +--
 .../hive/ql/parse/repl/dump/DBSerializer.java   |  54 +++
 .../ql/parse/repl/dump/FunctionSerializer.java  |  48 +++
 .../hive/ql/parse/repl/dump/JsonWriter.java |  54 +++
 .../ql/parse/repl/dump/PartitionSerializer.java |  64 
 .../repl/dump/ReplicationSpecSerializer.java|  36 ++
 .../ql/parse/repl/dump/TableSerializer.java | 113 ++
 .../repl/dump/VersionCompatibleSerializer.java  |  37 ++
 .../ql/parse/repl/events/AbstractHandler.java   |  46 +++
 .../parse/repl/events/AddPartitionHandler.java  | 114 ++
 .../repl/events/AlterPartitionHandler.java  | 103 ++
 .../ql/parse/repl/events/AlterTableHandler.java |  92 +
 .../parse/repl/events/CreateTableHandler.java   |  86 +
 .../ql/parse/repl/events/DefaultHandler.java|  43 +++
 .../parse/repl/events/DropPartitionHandler.java |  43 +++
 .../ql/parse/repl/events/DropTableHandler.java  |  43 +++
 .../hive/ql/parse/repl/events/EventHandler.java |  62 
 .../parse/repl/events/EventHandlerFactory.java  |  75 
 .../ql/parse/repl/events/InsertHandler.java |  96 +
 .../repl/events/TestEventHandlerFactory.java|  44 +++
 23 files changed, 1357 insertions(+), 371 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2985262b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
index 9e79b6a..2688f35 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
@@ -1251,8 +1251,6 @@ public class TestReplicationScenarios {
 assertFalse(EventUtils.andFilter(no, yes, no).accept(dummyEvent));
 assertFalse(EventUtils.andFilter(no, no, yes).accept(dummyEvent));
 assertFalse(EventUtils.andFilter(no, no, no).accept(dummyEvent));
-
-
   }
 
   private NotificationEvent createDummyEvent(String dbname, String tblname, 
long evid) {
@@ -1283,7 +1281,7 @@ public class TestReplicationScenarios {
 if (tblName != null){
   verifyRun("REPL STATUS " + dbName + "_dupe." + tblName, lastReplDumpId);
 }
-assertTrue(lastReplDumpId.compareTo(prevReplDumpId) > 0);
+assertTrue(Long.parseLong(lastReplDumpId) > 
Long.parseLong(prevReplDumpId));
 return lastReplDumpId;
   }
 
@@ -1298,7 +1296,7 @@ public class TestReplicationScenarios {
 run("REPL LOAD " + dbName + "_dupe." + tblName + " FROM '" + lastDumpLocn 
+ "'");
 verifyRun("REPL STATUS " + dbName + "_dupe", lastDbReplDumpId);
 verifyRun("REPL STATUS " + dbName + "_dupe." + tblName, lastReplDumpId);
-assertTrue(lastReplDumpId.compareTo(prevReplDumpId) > 0);
+assertTrue(Long.parseLong(lastReplDumpId) > 
Long.parseLong(prevReplDumpId));
 return lastReplDumpId;
   }
 
@@ -1392,7 +1390,7 @@ public class TestReplicationScenarios {
 return success;
   }
 
-  public static void createTestDataFile(String filename, String[] lines) 
throws IOException {
+  private static void createTestDataFile(String filename, String[] lines) 
throws IOException {
 FileWriter writer = null;
 try {
   File file = new File(filename);

http://git-wip-us.apache.org/repos/asf/hive/blob/2985262b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
index 10cc286..1ea5182 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import 

hive git commit: HIVE-16347 : HiveMetastoreChecker should skip listing partitions which are not valid when hive.msck.path.validation is set to skip or ignore (Vihang Karajgaonkar via Ashutosh Chauhan)

2017-04-03 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master 91d25b48a -> 9945b5d5d


HIVE-16347 : HiveMetastoreChecker should skip listing partitions which are not 
valid when hive.msck.path.validation is set to skip or ignore (Vihang 
Karajgaonkar via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9945b5d5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9945b5d5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9945b5d5

Branch: refs/heads/master
Commit: 9945b5d5d284677a2bcf042f236b43c2647c50e4
Parents: 91d25b4
Author: Vihang Karajgaonkar 
Authored: Mon Apr 3 14:36:46 2017 -0700
Committer: Ashutosh Chauhan 
Committed: Mon Apr 3 14:36:46 2017 -0700

--
 .../hive/ql/metadata/HiveMetaStoreChecker.java  |  5 +-
 .../ql/metadata/TestHiveMetaStoreChecker.java   | 52 
 2 files changed, 55 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9945b5d5/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
index da24c70..4add836 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java
@@ -498,9 +498,10 @@ public class HiveMetaStoreChecker {
 } else if 
(!parts[0].equalsIgnoreCase(partColNames.get(currentDepth))) {
   logOrThrowExceptionWithMsg(
   "Unexpected partition key " + parts[0] + " found at " + 
nextPath);
+} else {
+  // add sub-directory to the work queue if maxDepth is not yet 
reached
+  pendingPaths.add(new PathDepthInfo(nextPath, currentDepth + 1));
 }
-// add sub-directory to the work queue if maxDepth is not yet 
reached
-pendingPaths.add(new PathDepthInfo(nextPath, currentDepth + 1));
   }
 }
 if (currentDepth == partColNames.size()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/9945b5d5/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
index d7fbbce..90e6781 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
@@ -235,6 +235,34 @@ public class TestHiveMetaStoreChecker {
 checker.checkMetastore(dbName, tableName, null, new CheckResult());
   }
 
+  /*
+   * skip mode should not throw exception when a invalid partition directory
+   * is found. It should just ignore it
+   */
+  @Test
+  public void testSkipInvalidPartitionKeyName()
+  throws HiveException, AlreadyExistsException, IOException {
+hive.getConf().set(HiveConf.ConfVars.HIVE_MSCK_PATH_VALIDATION.varname, 
"skip");
+checker = new HiveMetaStoreChecker(hive);
+Table table = createTestTable();
+List partitions = hive.getPartitions(table);
+assertEquals(2, partitions.size());
+// add a fake partition dir on fs
+fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
+Path fakePart =
+new Path(table.getDataLocation().toString(), 
"fakedate=2009-01-01/fakecity=sanjose");
+fs.mkdirs(fakePart);
+fs.deleteOnExit(fakePart);
+createPartitionsDirectoriesOnFS(table, 2);
+CheckResult result = new CheckResult();
+checker.checkMetastore(dbName, tableName, null, result);
+assertEquals(Collections. emptySet(), result.getTablesNotInMs());
+assertEquals(Collections. emptySet(), result.getTablesNotOnFs());
+assertEquals(Collections. emptySet(), 
result.getPartitionsNotOnFs());
+// only 2 valid partitions should be added
+assertEquals(2, result.getPartitionsNotInMs().size());
+  }
+
   private Table createTestTable() throws AlreadyExistsException, HiveException 
{
 Database db = new Database();
 db.setName(dbName);
@@ -487,6 +515,30 @@ public class TestHiveMetaStoreChecker {
 CheckResult result = new CheckResult();
 checker.checkMetastore(dbName, tableName, null, result);
   }
+
+  /*
+   * In skip mode msck should ignore invalid partitions instead of
+   * throwing exception
+   */
+  @Test
+  public void testSkipInvalidOrderForPartitionKeysOnFS()
+  throws AlreadyExistsException, 

[hive] Git Push Summary

2017-04-03 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/branch-2.3 [created] e28330535


[2/2] hive git commit: HIVE-16061: Some of console output is not printed to the beeline console

2017-04-03 Thread aihuaxu
HIVE-16061: Some of console output is not printed to the beeline console


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/91d25b48
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/91d25b48
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/91d25b48

Branch: refs/heads/master
Commit: 91d25b48abe43642eebec640f3d093bb8410f768
Parents: a4a0ae1
Author: Aihua Xu 
Authored: Wed Mar 22 14:22:34 2017 -0400
Committer: Aihua Xu 
Committed: Mon Apr 3 15:59:00 2017 -0400

--
 .../org/apache/hadoop/hive/common/LogUtils.java |  30 ++
 .../operation/TestOperationLoggingLayout.java   |  16 -
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  16 -
 .../apache/hadoop/hive/ql/exec/TaskRunner.java  |   7 -
 .../hadoop/hive/ql/exec/mr/ExecDriver.java  |   4 +
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |  14 +-
 .../hadoop/hive/ql/log/LogDivertAppender.java   | 249 ++
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |   5 -
 .../hadoop/hive/ql/session/OperationLog.java|  70 +--
 .../beeline/drop_with_concurrency.q.out |  94 ++--
 .../beeline/escape_comments.q.out   | 478 +--
 .../cli/operation/HiveCommandOperation.java |   8 +-
 .../cli/operation/LogDivertAppender.java| 249 --
 .../hive/service/cli/operation/Operation.java   |  73 +--
 .../service/cli/operation/OperationManager.java |  30 +-
 .../service/cli/operation/SQLOperation.java |  20 +-
 16 files changed, 585 insertions(+), 778 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/91d25b48/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
index 01b2e7c..1d6b55e 100644
--- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -24,9 +24,11 @@ import java.net.URL;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.log4j.MDC;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.core.config.Configurator;
 import org.apache.logging.log4j.core.impl.Log4jContextFactory;
+import org.apache.logging.log4j.spi.DefaultThreadContextMap;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -47,6 +49,13 @@ public class LogUtils {
   private static final String KEY_TO_MASK_WITH = "password";
   private static final String MASKED_VALUE = "###_MASKED_###";
 
+  /**
+   * Constants of the key strings for the logging ThreadContext.
+   */
+  public static final String SESSIONID_LOG_KEY = "sessionId";
+  public static final String QUERYID_LOG_KEY = "queryId";
+  public static final String OPERATIONLOG_LEVEL_KEY = "operationLogLevel";
+
   @SuppressWarnings("serial")
   public static class LogInitializationException extends Exception {
 public LogInitializationException(String msg) {
@@ -109,6 +118,8 @@ public class LogUtils {
   System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), 
queryId);
 }
 final boolean async = checkAndSetAsyncLogging(conf);
+// required for MDC based routing appender so that child threads can 
inherit the MDC context
+System.setProperty(DefaultThreadContextMap.INHERITABLE_MAP, "true");
 Configurator.initialize(null, log4jFileName);
 logConfigLocation(conf);
 return "Logging initialized using configuration in " + log4jConfigFile 
+ " Async: " + async;
@@ -151,6 +162,7 @@ public class LogUtils {
 }
 if (hive_l4j != null) {
   final boolean async = checkAndSetAsyncLogging(conf);
+  System.setProperty(DefaultThreadContextMap.INHERITABLE_MAP, "true");
   Configurator.initialize(null, hive_l4j.toString());
   logConfigLocation(conf);
   return (logMessage + "\n" + "Logging initialized using configuration in 
" + hive_l4j +
@@ -192,4 +204,22 @@ public class LogUtils {
 }
 return value;
   }
+
+  /**
+   * Register logging context so that log system can print QueryId, SessionId, 
etc for each message
+   */
+  public static void registerLoggingContext(Configuration conf) {
+MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVESESSIONID));
+MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVEQUERYID));
+if (HiveConf.getBoolVar(conf, 
HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
+  MDC.put(OPERATIONLOG_LEVEL_KEY, HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL));
+}
+  }
+
+  /**
+   * 

[1/2] hive git commit: HIVE-16061: Some of console output is not printed to the beeline console

2017-04-03 Thread aihuaxu
Repository: hive
Updated Branches:
  refs/heads/master a4a0ae1ff -> 91d25b48a


http://git-wip-us.apache.org/repos/asf/hive/blob/91d25b48/service/src/java/org/apache/hive/service/cli/operation/Operation.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/Operation.java 
b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 11a820f..0b27608 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -27,6 +27,7 @@ import java.util.Set;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.common.metrics.common.Metrics;
 import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
 import org.apache.hadoop.hive.common.metrics.common.MetricsFactory;
@@ -35,7 +36,6 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.OperationLog;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.service.cli.FetchOrientation;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.OperationHandle;
@@ -46,17 +46,12 @@ import org.apache.hive.service.cli.RowSet;
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.session.HiveSession;
 import org.apache.hive.service.rpc.thrift.TProtocolVersion;
-import org.apache.logging.log4j.ThreadContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Sets;
 
 public abstract class Operation {
-  // Constants of the key strings for the log4j ThreadContext.
-  public static final String SESSIONID_LOG_KEY = "sessionId";
-  public static final String QUERYID_LOG_KEY = "queryId";
-
   protected final HiveSession parentSession;
   private volatile OperationState state = OperationState.INITIALIZED;
   private volatile MetricsScope currentStateScope;
@@ -212,44 +207,9 @@ public abstract class Operation {
 
   protected void createOperationLog() {
 if (parentSession.isOperationLogEnabled()) {
-  File operationLogFile = new 
File(parentSession.getOperationLogSessionDir(),
-  opHandle.getHandleIdentifier().toString());
+  File operationLogFile = new 
File(parentSession.getOperationLogSessionDir(), queryState.getQueryId());
   isOperationLogEnabled = true;
 
-  // create log file
-  try {
-if (operationLogFile.exists()) {
-  LOG.warn("The operation log file should not exist, but it is already 
there: " +
-  operationLogFile.getAbsolutePath());
-  operationLogFile.delete();
-}
-if (!operationLogFile.getParentFile().exists()) {
-  LOG.warn("Operations log directory for this session does not exist, 
it could have been deleted " +
-  "externally. Recreating the directory for future queries in this 
session but the older operation " +
-  "logs for this session are no longer available");
-  if (!operationLogFile.getParentFile().mkdir()) {
-LOG.warn("Log directory for this session could not be created, 
disabling " +
-"operation logs: " + 
operationLogFile.getParentFile().getAbsolutePath());
-isOperationLogEnabled = false;
-return;
-  }
-}
-if (!operationLogFile.createNewFile()) {
-  // the log file already exists and cannot be deleted.
-  // If it can be read/written, keep its contents and use it.
-  if (!operationLogFile.canRead() || !operationLogFile.canWrite()) {
-LOG.warn("The already existed operation log file cannot be 
recreated, " +
-"and it cannot be read or written: " + 
operationLogFile.getAbsolutePath());
-isOperationLogEnabled = false;
-return;
-  }
-}
-  } catch (Exception e) {
-LOG.warn("Unable to create operation log file: " + 
operationLogFile.getAbsolutePath(), e);
-isOperationLogEnabled = false;
-return;
-  }
-
   // create OperationLog object with above log file
   try {
 operationLog = new OperationLog(opHandle.toString(), operationLogFile, 
parentSession.getHiveConf());
@@ -259,15 +219,6 @@ public abstract class Operation {
 isOperationLogEnabled = false;
 return;
   }
-
-  // register this operationLog to current thread
-  OperationLog.setCurrentOperationLog(operationLog);
-}
-  }
-
-  protected void unregisterOperationLog() {
-if (isOperationLogEnabled) {
-  OperationLog.removeCurrentOperationLog();
 }
   }
 
@@ -277,22 +228,7 @@ public abstract class Operation {
*/
   protected void 

hive git commit: HIVE-16225: Memory leak in webhcat service (FileSystem CACHE entries) (Daniel Dai, reviewec by Thejas Nair)

2017-04-03 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 5bf52be60 -> a4a0ae1ff


HIVE-16225: Memory leak in webhcat service (FileSystem CACHE entries) (Daniel 
Dai, reviewec by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a4a0ae1f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a4a0ae1f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a4a0ae1f

Branch: refs/heads/master
Commit: a4a0ae1ffd3e7d12e3ab4d94c7cf2af0b22c02bd
Parents: 5bf52be
Author: Daniel Dai 
Authored: Mon Apr 3 12:01:26 2017 -0700
Committer: Daniel Dai 
Committed: Mon Apr 3 12:01:26 2017 -0700

--
 .../hcatalog/templeton/DeleteDelegator.java |  6 ++-
 .../hcatalog/templeton/LauncherDelegator.java   | 47 +---
 .../hive/hcatalog/templeton/ListDelegator.java  |  6 ++-
 .../hcatalog/templeton/SecureProxySupport.java  |  3 ++
 .../hcatalog/templeton/StatusDelegator.java |  6 ++-
 .../hcatalog/templeton/tool/TempletonUtils.java |  1 +
 6 files changed, 51 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a4a0ae1f/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
index 4b2dfec..622f92d 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
@@ -24,6 +24,7 @@ import java.util.List;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -41,10 +42,11 @@ public class DeleteDelegator extends TempletonDelegator {
   public QueueStatusBean run(String user, String id)
 throws NotAuthorizedException, BadParam, IOException, InterruptedException
   {
-UserGroupInformation ugi = UgiFactory.getUgi(user);
+UserGroupInformation ugi = null;
 WebHCatJTShim tracker = null;
 JobState state = null;
 try {
+  ugi = UgiFactory.getUgi(user);
   tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf, ugi);
   JobID jobid = StatusDelegator.StringToJobID(id);
   if (jobid == null)
@@ -69,6 +71,8 @@ public class DeleteDelegator extends TempletonDelegator {
 tracker.close();
   if (state != null)
 state.close();
+  if (ugi != null)
+FileSystem.closeAllForUGI(ugi);
 }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/a4a0ae1f/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
index 1455316..9bea897 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
@@ -28,6 +28,7 @@ import java.util.concurrent.TimeoutException;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
@@ -178,8 +179,9 @@ public class LauncherDelegator extends TempletonDelegator {
  List args, TempletonControllerJob controllerJob)
 throws NotAuthorizedException, BusyException,
 IOException, QueueException {
+UserGroupInformation ugi = null;
 try {
-  UserGroupInformation ugi = UgiFactory.getUgi(user);
+  ugi = UgiFactory.getUgi(user);
 
   final long startTime = System.nanoTime();
 
@@ -197,6 +199,10 @@ public class LauncherDelegator extends TempletonDelegator {
   return new EnqueueBean(id);
 } catch (InterruptedException e) {
   throw new QueueException("Unable to launch job " + e);
+} finally {
+  if (ugi != null) {
+FileSystem.closeAllForUGI(ugi);
+  }
 }
   }
 
@@ -344,24 +350,35 @@ public class LauncherDelegator extends TempletonDelegator 
{
*/
   private String getShimLibjars() {
 WebHCatJTShim shim = null;
+UserGroupInformation ugi = null;

hive git commit: HIVE-15923: Hive default partition causes errors in get partitions (Sergey Shelukhin, reviewed by Pengcheng Xiong, Aihua Xu)

2017-04-03 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/branch-2 a5d6d31be -> e28330535


HIVE-15923: Hive default partition causes errors in get partitions (Sergey 
Shelukhin, reviewed by Pengcheng Xiong, Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e2833053
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e2833053
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e2833053

Branch: refs/heads/branch-2
Commit: e28330535978d93f093f7f4208358d57d688e76a
Parents: a5d6d31
Author: Pengcheng Xiong 
Authored: Mon Apr 3 11:43:35 2017 -0700
Committer: Pengcheng Xiong 
Committed: Mon Apr 3 11:44:16 2017 -0700

--
 .../exec/ExprNodeConstantDefaultEvaluator.java  | 55 -
 .../hive/ql/exec/ExprNodeEvaluatorFactory.java  |  6 --
 .../hadoop/hive/ql/exec/FunctionRegistry.java   | 14 
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |  3 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java  | 27 +-
 .../ql/parse/ReplicationSemanticAnalyzer.java   |  5 +-
 .../ql/plan/ExprNodeConstantDefaultDesc.java| 86 
 .../hive/ql/udf/generic/GenericUDFOPEqual.java  |  9 --
 .../ql/udf/generic/GenericUDFOPNotEqual.java|  9 --
 .../drop_default_partition_filter.q |  7 ++
 .../clientpositive/drop_partitions_filter4.q| 10 +++
 .../clientpositive/partitions_filter_default.q  | 14 
 .../drop_default_partition_filter.q.out | 23 ++
 .../drop_partitions_filter4.q.out   | 71 
 .../partitions_filter_default.q.out | 67 +++
 15 files changed, 232 insertions(+), 174 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e2833053/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
deleted file mode 100644
index f53c3e3..000
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-/**
- * ExprNodeConstantEvaluator.
- *
- */
-public class ExprNodeConstantDefaultEvaluator extends 
ExprNodeEvaluator {
-
-  transient ObjectInspector writableObjectInspector;
-
-  public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr) {
-this(expr, null);
-  }
-
-  public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr, 
Configuration conf) {
-super(expr, conf);
-writableObjectInspector = expr.getWritableObjectInspector();
-  }
-
-  @Override
-  public ObjectInspector initialize(ObjectInspector rowInspector) throws 
HiveException {
-return writableObjectInspector;
-  }
-
-  @Override
-  protected Object _evaluate(Object row, int version) throws HiveException {
-return expr;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/e2833053/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
index 34aec55..cc40cae 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
@@ -24,7 +24,6 

hive git commit: HIVE-15923: Hive default partition causes errors in get partitions (Sergey Shelukhin, reviewed by Pengcheng Xiong, Aihua Xu)

2017-04-03 Thread pxiong
Repository: hive
Updated Branches:
  refs/heads/master bf98700a7 -> 5bf52be60


HIVE-15923: Hive default partition causes errors in get partitions (Sergey 
Shelukhin, reviewed by Pengcheng Xiong, Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5bf52be6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5bf52be6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5bf52be6

Branch: refs/heads/master
Commit: 5bf52be60cb54d66133b336308344780f0a82c77
Parents: bf98700
Author: Pengcheng Xiong 
Authored: Mon Apr 3 11:43:35 2017 -0700
Committer: Pengcheng Xiong 
Committed: Mon Apr 3 11:43:35 2017 -0700

--
 .../exec/ExprNodeConstantDefaultEvaluator.java  | 55 -
 .../hive/ql/exec/ExprNodeEvaluatorFactory.java  |  6 --
 .../hadoop/hive/ql/exec/FunctionRegistry.java   | 14 
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |  3 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java  | 27 +-
 .../ql/parse/ReplicationSemanticAnalyzer.java   |  5 +-
 .../ql/plan/ExprNodeConstantDefaultDesc.java| 86 
 .../hive/ql/udf/generic/GenericUDFOPEqual.java  |  9 --
 .../ql/udf/generic/GenericUDFOPNotEqual.java|  9 --
 .../drop_default_partition_filter.q |  7 ++
 .../clientpositive/drop_partitions_filter4.q| 10 +++
 .../clientpositive/partitions_filter_default.q  | 14 
 .../drop_default_partition_filter.q.out | 23 ++
 .../drop_partitions_filter4.q.out   | 71 
 .../partitions_filter_default.q.out | 67 +++
 15 files changed, 232 insertions(+), 174 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
deleted file mode 100644
index f53c3e3..000
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-/**
- * ExprNodeConstantEvaluator.
- *
- */
-public class ExprNodeConstantDefaultEvaluator extends 
ExprNodeEvaluator {
-
-  transient ObjectInspector writableObjectInspector;
-
-  public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr) {
-this(expr, null);
-  }
-
-  public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr, 
Configuration conf) {
-super(expr, conf);
-writableObjectInspector = expr.getWritableObjectInspector();
-  }
-
-  @Override
-  public ObjectInspector initialize(ObjectInspector rowInspector) throws 
HiveException {
-return writableObjectInspector;
-  }
-
-  @Override
-  protected Object _evaluate(Object row, int version) throws HiveException {
-return expr;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
index 34aec55..cc40cae 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
@@ -24,7 +24,6 @@ 

hive git commit: HIVE-16206: Make Codahale metrics reporters pluggable (Sunitha Beeram via Carl Steinbach)

2017-04-03 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 6a82a898e -> bf98700a7


HIVE-16206: Make Codahale metrics reporters pluggable (Sunitha Beeram via Carl 
Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bf98700a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bf98700a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bf98700a

Branch: refs/heads/master
Commit: bf98700a760ea1cca30f236e7dce10272fffdd48
Parents: 6a82a89
Author: Carl Steinbach 
Authored: Mon Apr 3 10:09:53 2017 -0700
Committer: Carl Steinbach 
Committed: Mon Apr 3 10:09:53 2017 -0700

--
 .../metrics/metrics2/CodahaleMetrics.java   | 192 +--
 .../metrics/metrics2/CodahaleReporter.java  |  29 +++
 .../metrics2/ConsoleMetricsReporter.java|  55 ++
 .../metrics/metrics2/JmxMetricsReporter.java|  56 ++
 .../metrics2/JsonFileMetricsReporter.java   | 136 +
 .../metrics/metrics2/Metrics2Reporter.java  |  62 ++
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  22 ++-
 .../metrics/metrics2/TestCodahaleMetrics.java   |   7 +-
 .../metrics2/TestCodahaleReportersConf.java | 145 ++
 9 files changed, 589 insertions(+), 115 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/bf98700a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
index e8abf6c..2d6c1b4 100644
--- 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
+++ 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
@@ -44,6 +44,8 @@ import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.common.collect.Lists;
 
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -190,22 +192,8 @@ public class CodahaleMetrics implements 
org.apache.hadoop.hive.common.metrics.co
 registerAll("threads", new ThreadStatesGaugeSet());
 registerAll("classLoading", new ClassLoadingGaugeSet());
 
-//Metrics reporter
-Set finalReporterList = new HashSet();
-List metricsReporterNames = Lists.newArrayList(
-  
Splitter.on(",").trimResults().omitEmptyStrings().split(conf.getVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER)));
-
-if(metricsReporterNames != null) {
-  for (String metricsReportingName : metricsReporterNames) {
-try {
-  MetricsReporting reporter = 
MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase());
-  finalReporterList.add(reporter);
-} catch (IllegalArgumentException e) {
-  LOGGER.warn("Metrics reporter skipped due to invalid configured 
reporter: " + metricsReportingName);
-}
-  }
-}
-initReporting(finalReporterList);
+//initialize reporters
+initReporting();
   }
 
 
@@ -385,107 +373,99 @@ public class CodahaleMetrics implements 
org.apache.hadoop.hive.common.metrics.co
   }
 
   /**
-   * Should be only called once to initialize the reporters
+   * Initializes reporters from HIVE_CODAHALE_METRICS_REPORTER_CLASSES or 
HIVE_METRICS_REPORTER if the former is not defined.
+   * Note: if both confs are defined, only  
HIVE_CODAHALE_METRICS_REPORTER_CLASSES will be used.
*/
-  private void initReporting(Set reportingSet) {
-for (MetricsReporting reporting : reportingSet) {
-  switch(reporting) {
-case CONSOLE:
-  final ConsoleReporter consoleReporter = 
ConsoleReporter.forRegistry(metricRegistry)
-.convertRatesTo(TimeUnit.SECONDS)
-.convertDurationsTo(TimeUnit.MILLISECONDS)
-.build();
-  consoleReporter.start(1, TimeUnit.SECONDS);
-  reporters.add(consoleReporter);
-  break;
-case JMX:
-  final JmxReporter jmxReporter = 
JmxReporter.forRegistry(metricRegistry)
-.convertRatesTo(TimeUnit.SECONDS)
-.convertDurationsTo(TimeUnit.MILLISECONDS)
-.build();
-  jmxReporter.start();
-  reporters.add(jmxReporter);
-  break;
-case JSON_FILE:
-  final JsonFileReporter jsonFileReporter = new JsonFileReporter();
-  jsonFileReporter.start();
-  reporters.add(jsonFileReporter);
-  break;
-case HADOOP2:
-  String applicationName = 

hive git commit: HIVE-16312 : Flaky test: TestHCatClient.testTransportFailure (Barna Zsombor Klara via Ashutosh Chauhan)

2017-04-03 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master 7a0a39767 -> 6a82a898e


HIVE-16312 : Flaky test: TestHCatClient.testTransportFailure (Barna Zsombor 
Klara via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6a82a898
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6a82a898
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6a82a898

Branch: refs/heads/master
Commit: 6a82a898e51f805ee49442d59beedf7b7c718fe7
Parents: 7a0a397
Author: Barna Zsombor Klara 
Authored: Mon Apr 3 10:02:14 2017 -0700
Committer: Ashutosh Chauhan 
Committed: Mon Apr 3 10:02:14 2017 -0700

--
 .../src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6a82a898/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
--
diff --git 
a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
 
b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
index b9cb067..86d3acb 100644
--- 
a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
+++ 
b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
@@ -434,7 +434,7 @@ public class TestHCatClient {
 HCatClient client = HCatClient.create(new Configuration(hcatConf));
 boolean isExceptionCaught = false;
 // Table creation with a long table name causes ConnectionFailureException
-final String tableName = "Temptable" + new BigInteger(200, new 
Random()).toString(2);
+final String tableName = "Temptable" + new BigInteger(260, new 
Random()).toString(2);
 
 ArrayList cols = new ArrayList();
 cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));



hive git commit: HIVE-16359 : Update golden file for subquery_select.q

2017-04-03 Thread hashutosh
Repository: hive
Updated Branches:
  refs/heads/master 5e84e4999 -> 7a0a39767


HIVE-16359 : Update golden file for subquery_select.q

Signed-off-by: Ashutosh Chauhan 


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7a0a3976
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7a0a3976
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7a0a3976

Branch: refs/heads/master
Commit: 7a0a39767541f81a2031cb3d250f44921d5da4c4
Parents: 5e84e49
Author: Ashutosh Chauhan 
Authored: Mon Apr 3 09:58:56 2017 -0700
Committer: Ashutosh Chauhan 
Committed: Mon Apr 3 09:58:56 2017 -0700

--
 .../test/results/clientpositive/llap/subquery_select.q.out   | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7a0a3976/ql/src/test/results/clientpositive/llap/subquery_select.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/subquery_select.q.out 
b/ql/src/test/results/clientpositive/llap/subquery_select.q.out
index 8e6cd8b..cbd01b8 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_select.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_select.q.out
@@ -4788,9 +4788,9 @@ STAGE PLANS:
   Statistics: Num rows: 26 Data size: 104 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
 predicate: p_size BETWEEN 1 AND 20 (type: boolean)
-Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE 
Column stats: COMPLETE
+Statistics: Num rows: 8 Data size: 32 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
-  Statistics: Num rows: 2 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Statistics: Num rows: 8 Data size: 32 Basic stats: 
COMPLETE Column stats: COMPLETE
   Group By Operator
 aggregations: count()
 mode: hash
@@ -4889,11 +4889,11 @@ STAGE PLANS:
   Statistics: Num rows: 26 Data size: 208 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
 predicate: p_size BETWEEN 1 AND 20 (type: boolean)
-Statistics: Num rows: 2 Data size: 16 Basic stats: 
COMPLETE Column stats: COMPLETE
+Statistics: Num rows: 8 Data size: 64 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: p_partkey (type: int)
   outputColumnNames: p_partkey
-  Statistics: Num rows: 2 Data size: 16 Basic stats: 
COMPLETE Column stats: COMPLETE
+  Statistics: Num rows: 8 Data size: 64 Basic stats: 
COMPLETE Column stats: COMPLETE
   Group By Operator
 aggregations: count(p_partkey)
 mode: hash