Repository: hive Updated Branches: refs/heads/master 34ba81ae7 -> 553374447
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java index 1f3806e..56434a7 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hive.serde2.lazybinary.fast; import java.io.EOFException; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -57,7 +57,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; * called. */ public class LazyBinaryDeserializeRead implements DeserializeRead { - public static final Log LOG = LogFactory.getLog(LazyBinaryDeserializeRead.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinaryDeserializeRead.class.getName()); private PrimitiveTypeInfo[] primitiveTypeInfos; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java index 253b514..ebe4181 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.sql.Date; import java.sql.Timestamp; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -46,7 +46,7 @@ import org.apache.hive.common.util.DateUtils; * This is an alternative way to serialize than what is provided by LazyBinarySerDe. */ public class LazyBinarySerializeWrite implements SerializeWrite { - public static final Log LOG = LogFactory.getLog(LazyBinarySerializeWrite.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinarySerializeWrite.class.getName()); private Output output; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index 09e9108..56597a2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -27,8 +27,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -89,7 +89,7 @@ import org.apache.hadoop.util.StringUtils; */ public final class ObjectInspectorUtils { - protected final static Log LOG = LogFactory.getLog(ObjectInspectorUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectInspectorUtils.class.getName()); /** * This enum controls how we copy primitive objects. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java index 87a072c..227e8a9 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ListStructObjectInspector works on struct data that is stored as a Java List @@ -39,8 +39,8 @@ import org.apache.commons.logging.LogFactory; public class StandardStructObjectInspector extends SettableStructObjectInspector { - public static final Log LOG = LogFactory - .getLog(StandardStructObjectInspector.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(StandardStructObjectInspector.class.getName()); protected static class MyField implements StructField { protected int fieldID; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java index 24ab4d2..932ae0b 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java @@ -27,8 +27,8 @@ import java.sql.Timestamp; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -69,7 +69,7 @@ import org.apache.hadoop.io.WritableUtils; * ObjectInspector to return to the caller of SerDe2.getObjectInspector(). */ public final class PrimitiveObjectInspectorUtils { - private static Log LOG = LogFactory.getLog(PrimitiveObjectInspectorUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(PrimitiveObjectInspectorUtils.class); /** * TypeEntry stores information about a Hive Primitive TypeInfo. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java index 8a54512..8ac2d84 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveChar; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; @@ -31,7 +31,7 @@ import org.apache.hive.common.util.HiveStringUtils; public class WritableHiveVarcharObjectInspector extends AbstractPrimitiveWritableObjectInspector implements SettableHiveVarcharObjectInspector { - private static final Log LOG = LogFactory.getLog(WritableHiveVarcharObjectInspector.class); + private static final Logger LOG = LoggerFactory.getLogger(WritableHiveVarcharObjectInspector.class); // no-arg ctor required for Kyro serialization public WritableHiveVarcharObjectInspector() { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java index 61f770d..7344ec1 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java @@ -23,8 +23,8 @@ import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; @@ -71,7 +71,7 @@ import org.apache.thrift.transport.TTransport; public class TBinarySortableProtocol extends TProtocol implements ConfigurableTProtocol, WriteNullsProtocol, WriteTextProtocol { - static final Log LOG = LogFactory.getLog(TBinarySortableProtocol.class + static final Logger LOG = LoggerFactory.getLogger(TBinarySortableProtocol.class .getName()); static byte ORDERED_TYPE = (byte) -1; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java ---------------------------------------------------------------------- diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java index 63f3287..6144052 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java @@ -28,8 +28,8 @@ import java.util.StringTokenizer; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; @@ -55,7 +55,7 @@ import org.apache.thrift.transport.TTransportException; public class TCTLSeparatedProtocol extends TProtocol implements ConfigurableTProtocol, WriteNullsProtocol, SkippableTProtocol { - static final Log LOG = LogFactory.getLog(TCTLSeparatedProtocol.class + static final Logger LOG = LoggerFactory.getLogger(TCTLSeparatedProtocol.class .getName()); static byte ORDERED_TYPE = (byte) -1; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java ---------------------------------------------------------------------- diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java index abbf038..ac0a8ee 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java @@ -33,9 +33,10 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.apache.log4j.Logger; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -44,7 +45,7 @@ import java.util.List; public class TestTypeInfoToSchema { - private static Logger LOGGER = Logger.getLogger(TestTypeInfoToSchema.class); + private static Logger LOGGER = LoggerFactory.getLogger(TestTypeInfoToSchema.class); private static final List<String> COLUMN_NAMES = Arrays.asList("testCol"); private static final TypeInfo STRING = TypeInfoFactory.getPrimitiveTypeInfo( serdeConstants.STRING_TYPE_NAME); @@ -434,4 +435,4 @@ public class TestTypeInfoToSchema { Assert.assertEquals("Test for nested struct's avro schema failed", expectedSchema, getAvroSchemaString(superStructTypeInfo)); } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/pom.xml ---------------------------------------------------------------------- diff --git a/service/pom.xml b/service/pom.xml index d7ab5bf..7095448 100644 --- a/service/pom.xml +++ b/service/pom.xml @@ -67,11 +67,6 @@ <version>${commons-lang.version}</version> </dependency> <dependency> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - <version>${commons-logging.version}</version> - </dependency> - <dependency> <groupId>org.eclipse.jetty.aggregate</groupId> <artifactId>jetty-all</artifactId> <version>${jetty.version}</version> http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/AbstractService.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/AbstractService.java b/service/src/java/org/apache/hive/service/AbstractService.java index c2a2b2d..adf0667 100644 --- a/service/src/java/org/apache/hive/service/AbstractService.java +++ b/service/src/java/org/apache/hive/service/AbstractService.java @@ -21,8 +21,8 @@ package org.apache.hive.service; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -31,7 +31,7 @@ import org.apache.hadoop.hive.conf.HiveConf; */ public abstract class AbstractService implements Service { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractService.class); /** * Service state: initially {@link STATE#NOTINITED}. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/CompositeService.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/CompositeService.java b/service/src/java/org/apache/hive/service/CompositeService.java index 8979118..e1f10f7 100644 --- a/service/src/java/org/apache/hive/service/CompositeService.java +++ b/service/src/java/org/apache/hive/service/CompositeService.java @@ -23,8 +23,8 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.conf.HiveConf; */ public class CompositeService extends AbstractService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); + private static final Logger LOG = LoggerFactory.getLogger(CompositeService.class); private final List<Service> serviceList = new ArrayList<Service>(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/CookieSigner.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/CookieSigner.java b/service/src/java/org/apache/hive/service/CookieSigner.java index ee51c24..1cb11c2 100644 --- a/service/src/java/org/apache/hive/service/CookieSigner.java +++ b/service/src/java/org/apache/hive/service/CookieSigner.java @@ -19,8 +19,8 @@ package org.apache.hive.service; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; +import org.slf4j.LoggerFactory; +import org.slf4j.Logger; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -34,7 +34,7 @@ public class CookieSigner { private static final String SIGNATURE = "&s="; private static final String SHA_STRING = "SHA"; private byte[] secretBytes; - private static final Log LOG = LogFactory.getLog(CookieSigner.class); + private static final Logger LOG = LoggerFactory.getLogger(CookieSigner.class); /** * Constructor http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/ServiceOperations.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/ServiceOperations.java b/service/src/java/org/apache/hive/service/ServiceOperations.java index 8946219..f65dc51 100644 --- a/service/src/java/org/apache/hive/service/ServiceOperations.java +++ b/service/src/java/org/apache/hive/service/ServiceOperations.java @@ -18,8 +18,8 @@ package org.apache.hive.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -27,7 +27,7 @@ import org.apache.hadoop.hive.conf.HiveConf; * */ public final class ServiceOperations { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractService.class); private ServiceOperations() { } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/ServiceUtils.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/ServiceUtils.java b/service/src/java/org/apache/hive/service/ServiceUtils.java index e712aaf..11cbfef 100644 --- a/service/src/java/org/apache/hive/service/ServiceUtils.java +++ b/service/src/java/org/apache/hive/service/ServiceUtils.java @@ -17,6 +17,10 @@ */ package org.apache.hive.service; +import java.io.IOException; + +import org.slf4j.Logger; + public class ServiceUtils { /* @@ -41,4 +45,25 @@ public class ServiceUtils { } return endIdx; } + + /** + * Close the Closeable objects and <b>ignore</b> any {@link IOException} or + * null pointers. Must only be used for cleanup in exception handlers. + * + * @param log the log to record problems to at debug level. Can be null. + * @param closeables the objects to close + */ + public static void cleanup(Logger log, java.io.Closeable... closeables) { + for (java.io.Closeable c : closeables) { + if (c != null) { + try { + c.close(); + } catch(IOException e) { + if (log != null && log.isDebugEnabled()) { + log.debug("Exception in closing " + c, e); + } + } + } + } + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java index a58db9c..0620c64 100644 --- a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java +++ b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java @@ -32,8 +32,8 @@ import java.util.StringTokenizer; import javax.security.auth.Subject; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.http.protocol.BasicHttpContext; @@ -51,7 +51,7 @@ public final class HttpAuthUtils { public static final String AUTHORIZATION = "Authorization"; public static final String BASIC = "Basic"; public static final String NEGOTIATE = "Negotiate"; - private static final Log LOG = LogFactory.getLog(HttpAuthUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(HttpAuthUtils.class); private static final String COOKIE_ATTR_SEPARATOR = "&"; private static final String COOKIE_CLIENT_USER_NAME = "cu"; private static final String COOKIE_CLIENT_RAND_NUMBER = "rn"; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java index f2a4a5b..31e3854 100644 --- a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java +++ b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java @@ -17,8 +17,8 @@ */ package org.apache.hive.service.auth; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.ServiceUtils; @@ -39,7 +39,7 @@ import javax.security.sasl.AuthenticationException; public class LdapAuthenticationProviderImpl implements PasswdAuthenticationProvider { - private static final Log LOG = LogFactory.getLog(LdapAuthenticationProviderImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LdapAuthenticationProviderImpl.class); private static final String DN_ATTR = "distinguishedName"; private final String ldapURL; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/CLIService.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/CLIService.java b/service/src/java/org/apache/hive/service/cli/CLIService.java index 4c7d7f4..adc9809 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIService.java +++ b/service/src/java/org/apache/hive/service/cli/CLIService.java @@ -28,12 +28,10 @@ import java.util.concurrent.TimeoutException; import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -62,7 +60,7 @@ public class CLIService extends CompositeService implements ICLIService { SERVER_VERSION = protocols[protocols.length - 1]; } - private final Log LOG = LogFactory.getLog(CLIService.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(CLIService.class.getName()); private HiveConf hiveConf; private SessionManager sessionManager; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index 807f010..22c55f1 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.io.IOUtils; +import org.apache.hive.service.ServiceUtils; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationState; @@ -48,7 +48,7 @@ import org.apache.hive.service.cli.session.HiveSession; * Executes a HiveCommand */ public class HiveCommandOperation extends ExecuteStatementOperation { - private CommandProcessor commandProcessor; + private final CommandProcessor commandProcessor; private TableSchema resultSchema = null; private boolean closeSessionStreams = true; // Only close file based streams, not System.out and System.err. @@ -79,7 +79,7 @@ public class HiveCommandOperation extends ExecuteStatementOperation { LOG.error("Error in creating temp output file ", e); // Close file streams to avoid resource leaking - IOUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); + ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); closeSessionStreams = false; try { @@ -98,7 +98,7 @@ public class HiveCommandOperation extends ExecuteStatementOperation { private void tearDownSessionIO() { if (closeSessionStreams) { - IOUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); + ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); } } @@ -214,7 +214,7 @@ public class HiveCommandOperation extends ExecuteStatementOperation { private void resetResultReader() { if (resultReader != null) { - IOUtils.cleanup(LOG, resultReader); + ServiceUtils.cleanup(LOG, resultReader); resultReader = null; } } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java index c1bc547..9cb6439 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java +++ b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java @@ -39,6 +39,7 @@ import org.apache.logging.log4j.core.appender.OutputStreamManager; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.filter.AbstractFilter; import org.apache.logging.log4j.core.layout.PatternLayout; +import org.slf4j.LoggerFactory; import com.google.common.base.Joiner; @@ -47,7 +48,7 @@ import com.google.common.base.Joiner; */ public class LogDivertAppender extends AbstractOutputStreamAppender<LogDivertAppender.StringOutputStreamManager> { - private static final Logger LOG = LogManager.getLogger(LogDivertAppender.class.getName()); + private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(LogDivertAppender.class.getName()); private static LoggerContext context = (LoggerContext) LogManager.getContext(false); private static Configuration configuration = context.getConfiguration(); public static final Layout<? extends Serializable> verboseLayout = PatternLayout.createLayout( @@ -56,7 +57,7 @@ public class LogDivertAppender "%-5p : %m%n", configuration, null, null, true, false, null, null); private final OperationManager operationManager; - private StringOutputStreamManager manager; + private final StringOutputStreamManager manager; private boolean isVerbose; private final Layout<? extends Serializable> layout; @@ -105,7 +106,7 @@ public class LogDivertAppender private static class NameFilter extends AbstractFilter { private Pattern namePattern; private OperationLog.LoggingLevel loggingMode; - private OperationManager operationManager; + private final OperationManager operationManager; /* Patterns that are excluded in verbose logging level. * Filter out messages coming from log processing classes, or we'll run an infinite loop. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/operation/Operation.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java index 515299c..4ca0561 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -23,11 +23,11 @@ import java.util.EnumSet; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.OperationLog; @@ -47,8 +47,8 @@ public abstract class Operation { private OperationState state = OperationState.INITIALIZED; private final OperationHandle opHandle; private HiveConf configuration; - public static final Log LOG = LogFactory.getLog(Operation.class.getName()); public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = FetchOrientation.FETCH_NEXT; + public static final Logger LOG = LoggerFactory.getLogger(Operation.class.getName()); public static final long DEFAULT_FETCH_MAX_ROWS = 100; protected boolean hasResultSet; protected volatile HiveSQLException operationException; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java index e29b4b6..b0bd351 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java +++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -24,8 +24,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -49,13 +49,14 @@ import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; +import org.slf4j.LoggerFactory; /** * OperationManager. * */ public class OperationManager extends AbstractService { - private final Log LOG = LogFactory.getLog(OperationManager.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(OperationManager.class.getName()); private final Map<OperationHandle, Operation> handleToOperation = new HashMap<OperationHandle, Operation>(); @@ -91,7 +92,7 @@ public class OperationManager extends AbstractService { Appender ap = LogDivertAppender.createInstance(this, OperationLog.getLoggingLevel(loggingMode)); LoggerContext context = (LoggerContext) LogManager.getContext(false); Configuration configuration = context.getConfiguration(); - LoggerConfig loggerConfig = configuration.getLoggerConfig(LogManager.getLogger().getName()); + LoggerConfig loggerConfig = configuration.getLoggerConfig(LoggerFactory.getLogger(getClass()).getName()); loggerConfig.addAppender(ap, null, null); context.updateLoggers(); ap.start(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 3eaab9a..50e938e 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -31,10 +31,10 @@ import java.util.Set; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.cli.HiveFileProcessor; import org.apache.hadoop.hive.common.cli.IHiveFileProcessor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -77,9 +77,6 @@ import org.apache.hive.service.server.ThreadWithGarbageCleanup; * */ public class HiveSessionImpl implements HiveSession { - private static final String FETCH_WORK_SERDE_CLASS = - "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; - private static final Log LOG = LogFactory.getLog(HiveSessionImpl.class); // Shared between threads (including SessionState!) private final SessionHandle sessionHandle; @@ -94,6 +91,11 @@ public class HiveSessionImpl implements HiveSession { // 2) Some parts of session state, like mrStats and vars, need proper synchronization. private SessionState sessionState; private String ipAddress; + + private static final String FETCH_WORK_SERDE_CLASS = + "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; + private static final Logger LOG = LoggerFactory.getLogger(HiveSessionImpl.class); + private SessionManager sessionManager; private OperationManager operationManager; // Synchronized by locking on itself. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java index 0f4f680..441db7c 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java @@ -20,8 +20,8 @@ package org.apache.hive.service.cli.session; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -42,7 +42,7 @@ import org.apache.hive.service.cli.thrift.TProtocolVersion; */ public class HiveSessionImplwithUGI extends HiveSessionImpl { public static final String HS2TOKEN = "HiveServer2ImpersonationToken"; - static final Log LOG = LogFactory.getLog(HiveSessionImplwithUGI.class); + static final Logger LOG = LoggerFactory.getLogger(HiveSessionImplwithUGI.class); private UserGroupInformation sessionUgi = null; private String hmsDelegationTokenStr = null; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/session/SessionManager.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java index 1119fd3..a9b4334 100644 --- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -32,8 +32,8 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.hooks.HookUtils; @@ -51,8 +51,8 @@ import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup; */ public class SessionManager extends CompositeService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); public static final String HIVERCFILE = ".hiverc"; + private static final Logger LOG = LoggerFactory.getLogger(CompositeService.class); private HiveConf hiveConf; private final Map<SessionHandle, HiveSession> handleToSession = new ConcurrentHashMap<SessionHandle, HiveSession>(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java b/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java index 4bd7336..529eaa4 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java @@ -18,8 +18,8 @@ package org.apache.hive.service.cli.thrift; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.auth.HiveAuthFactory; @@ -49,7 +49,7 @@ import java.util.concurrent.TimeUnit; * implementation and retries calls to it on failure. */ public class RetryingThriftCLIServiceClient implements InvocationHandler { - public static final Log LOG = LogFactory.getLog(RetryingThriftCLIServiceClient.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryingThriftCLIServiceClient.class); private ThriftCLIServiceClient base; private final int retryLimit; private final int retryDelaySeconds; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index 54f9914..cf575a4 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -100,7 +100,7 @@ public class ThriftBinaryCLIService extends ThriftCLIService { LOG.info(msg); server.serve(); } catch (Throwable t) { - LOG.fatal( + LOG.error( "Error starting HiveServer2: could not start " + ThriftBinaryCLIService.class.getSimpleName(), t); System.exit(-1); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index 0532d79..8434965 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -28,11 +28,11 @@ import java.util.concurrent.atomic.AtomicInteger; import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.service.AbstractService; @@ -66,7 +66,7 @@ import org.apache.thrift.transport.TTransport; */ public abstract class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable { - public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(ThriftCLIService.class.getName()); protected CLIService cliService; private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index a940bd6..b7756dd 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -21,7 +21,6 @@ package org.apache.hive.service.cli.thrift; import java.util.Arrays; import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.conf.HiveConf; @@ -143,7 +142,7 @@ public class ThriftHttpCLIService extends ThriftCLIService { LOG.info(msg); httpServer.join(); } catch (Throwable t) { - LOG.fatal( + LOG.error( "Error starting HiveServer2: could not start " + ThriftHttpCLIService.class.getSimpleName(), t); System.exit(-1); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index 56c8cb6..0b8cf31 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -35,8 +35,8 @@ import javax.ws.rs.core.NewCookie; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; @@ -68,7 +68,7 @@ import org.ietf.jgss.Oid; public class ThriftHttpServlet extends TServlet { private static final long serialVersionUID = 1L; - public static final Log LOG = LogFactory.getLog(ThriftHttpServlet.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(ThriftHttpServlet.class.getName()); private final String authType; private final UserGroupInformation serviceUGI; private final UserGroupInformation httpUGI; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/server/HiveServer2.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 601c5db..b30b6a2 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -34,8 +34,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; @@ -45,6 +43,8 @@ import org.apache.curator.framework.api.CuratorEventType; import org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.hadoop.hive.common.JvmPauseMonitor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -78,9 +78,8 @@ import com.google.common.base.Joiner; * */ public class HiveServer2 extends CompositeService { - private static final Log LOG = LogFactory.getLog(HiveServer2.class); private static CountDownLatch deleteSignal; - + private static final Logger LOG = LoggerFactory.getLogger(HiveServer2.class); private CLIService cliService; private ThriftCLIService thriftCLIService; private PersistentEphemeralNode znode; @@ -208,7 +207,7 @@ public class HiveServer2 extends CompositeService { LOG.info("Created the root name space: " + rootNamespace + " on ZooKeeper for HiveServer2"); } catch (KeeperException e) { if (e.code() != KeeperException.Code.NODEEXISTS) { - LOG.fatal("Unable to create HiveServer2 namespace: " + rootNamespace + " on ZooKeeper", e); + LOG.error("Unable to create HiveServer2 namespace: " + rootNamespace + " on ZooKeeper", e); throw e; } } @@ -241,7 +240,7 @@ public class HiveServer2 extends CompositeService { } LOG.info("Created a znode on ZooKeeper for HiveServer2 uri: " + instanceURI); } catch (Exception e) { - LOG.fatal("Unable to create a znode for this server instance", e); + LOG.error("Unable to create a znode for this server instance", e); if (znode != null) { znode.close(); } @@ -550,7 +549,7 @@ public class HiveServer2 extends CompositeService { LOG.debug(initLog4jMessage); HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG); - // Log debug message from "oproc" after log4j initialize properly + // Logger debug message from "oproc" after log4j initialize properly LOG.debug(oproc.getDebugMessage().toString()); // Call the executor which will execute the appropriate command based on the parsed options @@ -683,7 +682,7 @@ public class HiveServer2 extends CompositeService { try { startHiveServer2(); } catch (Throwable t) { - LOG.fatal("Error starting HiveServer2", t); + LOG.error("Error starting HiveServer2", t); System.exit(-1); } } @@ -705,7 +704,7 @@ public class HiveServer2 extends CompositeService { try { deleteServerInstancesFromZooKeeper(versionNumber); } catch (Exception e) { - LOG.fatal("Error deregistering HiveServer2 instances for version: " + versionNumber + LOG.error("Error deregistering HiveServer2 instances for version: " + versionNumber + " from ZooKeeper", e); System.out.println("Error deregistering HiveServer2 instances for version: " + versionNumber + " from ZooKeeper." + e); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java ---------------------------------------------------------------------- diff --git a/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java b/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java index 8ee9810..8c2a49e 100644 --- a/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java +++ b/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java @@ -21,8 +21,8 @@ package org.apache.hive.service.server; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.RawStore; @@ -32,7 +32,7 @@ import org.apache.hadoop.hive.metastore.RawStore; * when killed by its corresponding ExecutorService. */ public class ThreadWithGarbageCleanup extends Thread { - private static final Log LOG = LogFactory.getLog(ThreadWithGarbageCleanup.class); + private static final Logger LOG = LoggerFactory.getLogger(ThreadWithGarbageCleanup.class); Map<Long, RawStore> threadRawStoreMap = ThreadFactoryWithGarbageCleanup.getThreadRawStoreMap(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java ---------------------------------------------------------------------- diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index c73d152..d90002b 100644 --- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -33,8 +33,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.service.server.HiveServer2; @@ -47,7 +47,7 @@ import org.junit.Test; * */ public abstract class CLIServiceTest { - private static final Log LOG = LogFactory.getLog(CLIServiceTest.class); + private static final Logger LOG = LoggerFactory.getLogger(CLIServiceTest.class); protected static CLIServiceClient client; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/0.23/pom.xml ---------------------------------------------------------------------- diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml index eee594e..17efde8 100644 --- a/shims/0.23/pom.xml +++ b/shims/0.23/pom.xml @@ -47,11 +47,6 @@ <version>${commons-lang.version}</version> </dependency> <dependency> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - <version>${commons-logging.version}</version> - </dependency> - <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java ---------------------------------------------------------------------- diff --git a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java index c85a739..288043f 100644 --- a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java +++ b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.mapred; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.TypeConverter; @@ -43,7 +43,7 @@ import java.util.List; import java.util.Set; public class WebHCatJTShim23 implements WebHCatJTShim { - private static final Log LOG = LogFactory.getLog(WebHCatJTShim23.class); + private static final Logger LOG = LoggerFactory.getLogger(WebHCatJTShim23.class); private JobClient jc; private final Configuration conf; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/common/pom.xml ---------------------------------------------------------------------- diff --git a/shims/common/pom.xml b/shims/common/pom.xml index 76d8da5..001c96b 100644 --- a/shims/common/pom.xml +++ b/shims/common/pom.xml @@ -36,11 +36,6 @@ <!-- dependencies are always listed in sorted order by groupId, artifectId --> <!-- inter-project --> <dependency> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - <version>${commons-logging.version}</version> - </dependency> - <dependency> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-1.2-api</artifactId> <version>${log4j2.version}</version> @@ -51,11 +46,6 @@ <version>${log4j2.version}</version> </dependency> <dependency> - <groupId>org.apache.logging.log4j</groupId> - <artifactId>log4j-jcl</artifactId> - <version>${log4j2.version}</version> - </dependency> - <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> @@ -65,6 +55,12 @@ <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> <optional>true</optional> + <exclusions> + <exclusion> + <groupId>commmons-logging</groupId> + <artifactId>commons-logging</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> <groupId>commons-lang</groupId> http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java ---------------------------------------------------------------------- diff --git a/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java b/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java index 45ca210..3cc2d1a 100644 --- a/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java +++ b/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java @@ -28,8 +28,8 @@ import java.util.List; import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.permission.FsAction; @@ -45,7 +45,7 @@ import org.apache.hadoop.security.UserGroupInformation; */ public class DefaultFileAccess { - private static Log LOG = LogFactory.getLog(DefaultFileAccess.class); + private static Logger LOG = LoggerFactory.getLogger(DefaultFileAccess.class); private static List<String> emptyGroups = new ArrayList<String>(0); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java ---------------------------------------------------------------------- diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java index dae9a1d..47b3caa 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -25,19 +25,14 @@ import java.net.URI; import java.nio.ByteBuffer; import java.security.AccessControlException; import java.security.NoSuchAlgorithmException; -import java.security.PrivilegedExceptionAction; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; -import javax.security.auth.login.LoginException; - import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; @@ -48,7 +43,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hive.shims.HadoopShims.StoragePolicyValue; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.ClusterStatus; @@ -80,10 +74,10 @@ import org.apache.hadoop.util.Progressable; public interface HadoopShims { /** - * Constructs and Returns TaskAttempt Log Url + * Constructs and Returns TaskAttempt Logger Url * or null if the TaskLogServlet is not available * - * @return TaskAttempt Log Url + * @return TaskAttempt Logger Url */ String getTaskAttemptLogUrl(JobConf conf, String taskTrackerHttpAddress, @@ -418,11 +412,11 @@ public interface HadoopShims { public FileSystem createProxyFileSystem(FileSystem fs, URI uri); public Map<String, String> getHadoopConfNames(); - + /** * Create a shim for DFS storage policy. */ - + public enum StoragePolicyValue { MEMORY, /* 1-replica memory */ SSD, /* 3-replica ssd */ @@ -435,11 +429,11 @@ public interface HadoopShims { return StoragePolicyValue.valueOf(name.toUpperCase().trim()); } }; - + public interface StoragePolicyShim { void setStoragePolicy(Path path, StoragePolicyValue policy) throws IOException; } - + /** * obtain a storage policy shim associated with the filesystem. * Returns null when the filesystem has no storage policies. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java ---------------------------------------------------------------------- diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java index c6b7c9d..0a0f52d 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java @@ -31,8 +31,8 @@ import java.util.HashSet; import java.util.Set; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DefaultFileAccess; import org.apache.hadoop.fs.FileStatus; @@ -60,7 +60,7 @@ import org.apache.hadoop.util.Progressable; */ public abstract class HadoopShimsSecure implements HadoopShims { - static final Log LOG = LogFactory.getLog(HadoopShimsSecure.class); + static final Logger LOG = LoggerFactory.getLogger(HadoopShimsSecure.class); public static class InputSplitShim extends CombineFileSplit { long shrinkedLength; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java ---------------------------------------------------------------------- diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index 6b0bd10..20dec9a 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -40,8 +40,8 @@ import javax.security.sasl.SaslServer; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.shims.ShimLoader; @@ -76,7 +76,7 @@ import org.apache.thrift.transport.TTransportFactory; * to avoid maintenance errors. */ public abstract class HadoopThriftAuthBridge { - private static final Log LOG = LogFactory.getLog(HadoopThriftAuthBridge.class); + private static final Logger LOG = LoggerFactory.getLogger(HadoopThriftAuthBridge.class); public Client createClient() { return new Client(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/scheduler/pom.xml ---------------------------------------------------------------------- diff --git a/shims/scheduler/pom.xml b/shims/scheduler/pom.xml index 276b6cb..cf9d077 100644 --- a/shims/scheduler/pom.xml +++ b/shims/scheduler/pom.xml @@ -42,11 +42,6 @@ </dependency> <!-- inter-project --> <dependency> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - <version>${commons-logging.version}</version> - </dependency> - <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java ---------------------------------------------------------------------- diff --git a/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java b/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java index 41c34aa..372244d 100644 --- a/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java +++ b/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.shims.SchedulerShim; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -31,7 +31,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.AllocationFi import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.QueuePlacementPolicy; public class FairSchedulerShim implements SchedulerShim { - private static final Log LOG = LogFactory.getLog(FairSchedulerShim.class); + private static final Logger LOG = LoggerFactory.getLogger(FairSchedulerShim.class); private static final String MR2_JOB_QUEUE_PROPERTY = "mapreduce.job.queuename"; @Override http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java ---------------------------------------------------------------------- diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java index 589436d..cd38346 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java @@ -27,14 +27,14 @@ import java.util.List; import java.util.Set; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class SparkClientUtilities { - protected static final transient Log LOG = LogFactory.getLog(SparkClientUtilities.class); + protected static final transient Logger LOG = LoggerFactory.getLogger(SparkClientUtilities.class); /** * Add new elements to the classpath. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java ---------------------------------------------------------------------- diff --git a/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java b/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java index 5523333..a65cc93 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java +++ b/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java @@ -21,8 +21,8 @@ import java.io.Serializable; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.spark.api.java.JavaSparkContext; /** @@ -40,7 +40,7 @@ import org.apache.spark.api.java.JavaSparkContext; public class SparkCounters implements Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(SparkCounters.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkCounters.class); private Map<String, SparkCounterGroup> sparkCounterGroups; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java ---------------------------------------------------------------------- diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java index fe4e64e..b84aeb5 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.hive.common.io; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Java linked list iterator interface is convoluted, and moreover concurrent modifications * of the same list by multiple iterators are impossible. Hence, this. * Java also doesn't support multiple inheritance, so this cannot be done as "aspect"... */ public class DiskRangeList extends DiskRange { - private static final Log LOG = LogFactory.getLog(DiskRangeList.class); + private static final Logger LOG = LoggerFactory.getLogger(DiskRangeList.class); public DiskRangeList prev, next; public DiskRangeList(long offset, long end) { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java ---------------------------------------------------------------------- diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java index 4c3dd5a..eeff131 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java @@ -27,15 +27,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - /** * The implementation of SearchArguments. */ final class SearchArgumentImpl implements SearchArgument { - public static final Log LOG = LogFactory.getLog(SearchArgumentImpl.class); static final class PredicateLeafImpl implements PredicateLeaf { private final Operator operator; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java ---------------------------------------------------------------------- diff --git a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java index 9890771..41452da 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java +++ b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java @@ -22,8 +22,6 @@ import java.io.DataOutput; import java.io.IOException; import java.math.BigInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.io.WritableComparable; @@ -31,8 +29,6 @@ import org.apache.hadoop.io.WritableUtils; public class HiveDecimalWritable implements WritableComparable<HiveDecimalWritable> { - static final private Log LOG = LogFactory.getLog(HiveDecimalWritable.class); - private byte[] internalStorage = new byte[0]; private int scale; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/testutils/ptest2/pom.xml ---------------------------------------------------------------------- diff --git a/testutils/ptest2/pom.xml b/testutils/ptest2/pom.xml index fade125..51a0aaf 100644 --- a/testutils/ptest2/pom.xml +++ b/testutils/ptest2/pom.xml @@ -80,11 +80,6 @@ limitations under the License. <version>${log4j2.version}</version> </dependency> <dependency> - <groupId>org.apache.logging.log4j</groupId> - <artifactId>log4j-jcl</artifactId> - <version>${log4j2.version}</version> - </dependency> - <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>4.2.5</version>