Author: brock Date: Sat Sep 6 17:20:46 2014 New Revision: 1622892 URL: http://svn.apache.org/r1622892 Log: HIVE-7553 - avoid the scheduling maintenance window for every jar change (Ferdinand Xu via Brock)
Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java hive/trunk/ql/src/test/resources/SessionStateTest.jar.v1 (with props) hive/trunk/ql/src/test/resources/SessionStateTest.jar.v2 (with props) Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original) +++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Sat Sep 6 17:20:46 2014 @@ -581,6 +581,11 @@ public class HiveConf extends Configurat HIVEJAR("hive.jar.path", "", ""), HIVEAUXJARS("hive.aux.jars.path", "", ""), + // reloadable jars + HIVERELOADABLEJARS("hive.reloadable.aux.jars.path", "", + "Jars can be renewed by executing reload command. And these jars can be " + + "used as the auxiliary classes like creating a UDF or SerDe."), + // hive added files and jars HIVEADDEDFILES("hive.added.files.path", "", ""), HIVEADDEDJARS("hive.added.jars.path", "", ""), @@ -1613,7 +1618,7 @@ public class HiveConf extends Configurat HIVE_SERVER2_SSL_KEYSTORE_PATH("hive.server2.keystore.path", "", ""), HIVE_SERVER2_SSL_KEYSTORE_PASSWORD("hive.server2.keystore.password", "", ""), - HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,compile", + HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,reload,compile", "Comma separated list of non-SQL Hive commands users are authorized to execute"), HIVE_SERVER2_SESSION_CHECK_INTERVAL("hive.server2.session.check.interval", "0ms", Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java (original) +++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java Sat Sep 6 17:20:46 2014 @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -425,7 +426,7 @@ public class HCatUtil { try { Class<? extends HiveStorageHandler> handlerClass = (Class<? extends HiveStorageHandler>) Class - .forName(storageHandler, true, JavaUtils.getClassLoader()); + .forName(storageHandler, true, Utilities.getSessionSpecifiedClassLoader()); return (HiveStorageHandler) ReflectionUtils.newInstance( handlerClass, conf); } catch (ClassNotFoundException e) { Modified: hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java (original) +++ hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java Sat Sep 6 17:20:46 2014 @@ -22,8 +22,8 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.metastore.api.PartitionEventType; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; @@ -49,7 +49,7 @@ public abstract class HCatClient { HCatClientHMSImpl.class.getName()); try { Class<? extends HCatClient> clientClass = Class.forName(className, - true, JavaUtils.getClassLoader()).asSubclass( + true, Utilities.getSessionSpecifiedClassLoader()).asSubclass( HCatClient.class); client = (HCatClient) clientClass.newInstance(); } catch (ClassNotFoundException e) { Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java Sat Sep 6 17:20:46 2014 @@ -53,7 +53,7 @@ public class DefaultFetchFormatter<T> im private SerDe initializeSerde(Configuration conf, Properties props) throws Exception { String serdeName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE); Class<? extends SerDe> serdeClass = Class.forName(serdeName, true, - JavaUtils.getClassLoader()).asSubclass(SerDe.class); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(SerDe.class); // cast only needed for Hadoop 0.17 compatibility SerDe serde = ReflectionUtils.newInstance(serdeClass, null); Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Sat Sep 6 17:20:46 2014 @@ -39,7 +39,6 @@ import javax.xml.parsers.DocumentBuilder import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Function; @@ -562,7 +561,7 @@ public final class FunctionRegistry { return null; } - Class<?> udfClass = Class.forName(func.getClassName(), true, JavaUtils.getClassLoader()); + Class<?> udfClass = Class.forName(func.getClassName(), true, Utilities.getSessionSpecifiedClassLoader()); if (registerTemporaryFunction(functionName, udfClass)) { ret = mFunctions.get(functionName); } else { @@ -610,7 +609,7 @@ public final class FunctionRegistry { // Even if we have a reference to the class (which will be the case for GenericUDFs), // the classloader may not be able to resolve the class, which would mean reflection-based // methods would fail such as for plan deserialization. Make sure this works too. - Class.forName(udfClass.getName(), true, JavaUtils.getClassLoader()); + Class.forName(udfClass.getName(), true, Utilities.getSessionSpecifiedClassLoader()); } private static void loadFunctionResourcesIfNecessary(String functionName, CommonFunctionInfo cfi) { Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java Sat Sep 6 17:20:46 2014 @@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.exec; import static org.apache.hadoop.util.StringUtils.stringifyException; import java.io.IOException; -import java.net.URI; import java.util.List; import org.apache.commons.logging.Log; @@ -33,10 +32,8 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.exec.FunctionUtils.FunctionType; import org.apache.hadoop.hive.ql.exec.FunctionUtils.UDFClassType; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -47,10 +44,6 @@ import org.apache.hadoop.hive.ql.plan.Dr import org.apache.hadoop.hive.ql.plan.FunctionWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; -import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; /** @@ -308,9 +301,10 @@ public class FunctionTask extends Task<F } } - @SuppressWarnings("unchecked") private Class<?> getUdfClass(CreateFunctionDesc desc) throws ClassNotFoundException { - return Class.forName(desc.getClassName(), true, JavaUtils.getClassLoader()); + // get the session specified class loader from SessionState + ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader(); + return Class.forName(desc.getClassName(), true, classLoader); } @Override Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java Sat Sep 6 17:20:46 2014 @@ -57,7 +57,7 @@ public class ListSinkOperator extends Op FetchFormatter fetcher; if (formatterName != null && !formatterName.isEmpty()) { Class<? extends FetchFormatter> fetcherClass = Class.forName(formatterName, true, - JavaUtils.getClassLoader()).asSubclass(FetchFormatter.class); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(FetchFormatter.class); fetcher = ReflectionUtils.newInstance(fetcherClass, null); } else { fetcher = new DefaultFetchFormatter(); Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Sat Sep 6 17:20:46 2014 @@ -1975,6 +1975,26 @@ public final class Utilities { } /** + * get session specified class loader and get current class loader if fall + * + * @return + */ + public static ClassLoader getSessionSpecifiedClassLoader() { + SessionState state = SessionState.get(); + if (state == null || state.getConf() == null) { + LOG.debug("Hive Conf not found or Session not initiated, use thread based class loader instead"); + return JavaUtils.getClassLoader(); + } + ClassLoader sessionCL = state.getConf().getClassLoader(); + if (sessionCL != null){ + LOG.debug("Use session specified class loader"); + return sessionCL; + } + LOG.debug("Session specified class loader not found, use thread based class loader"); + return JavaUtils.getClassLoader(); + } + + /** * Create a URL from a string representing a path to a local file. * The path string can be just a path, or can start with file:/, file:/// * @param onestr path string @@ -1994,6 +2014,33 @@ public final class Utilities { return oneurl; } + /** + * get the jar files from specified directory or get jar files by several jar names sperated by comma + * @param path + * @return + */ + public static Set<String> getJarFilesByPath(String path){ + Set<String> result = new HashSet<String>(); + if (path == null || path.isEmpty()) { + return result; + } + + File paths = new File(path); + if (paths.exists() && paths.isDirectory()) { + // add all jar files under the reloadable auxiliary jar paths + Set<File> jarFiles = new HashSet<File>(); + jarFiles.addAll(org.apache.commons.io.FileUtils.listFiles( + paths, new String[]{"jar"}, true)); + for (File f : jarFiles) { + result.add(f.getAbsolutePath()); + } + } else { + String[] files = path.split(","); + Collections.addAll(result, files); + } + return result; + } + /** * Add new elements to the classpath. * Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java Sat Sep 6 17:20:46 2014 @@ -635,7 +635,7 @@ public class HadoopJobExecHelper { for (String clientStatsPublisherClass : clientStatsPublisherClasses) { try { clientStatsPublishers.add((ClientStatsPublisher) Class.forName( - clientStatsPublisherClass.trim(), true, JavaUtils.getClassLoader()).newInstance()); + clientStatsPublisherClass.trim(), true, Utilities.getSessionSpecifiedClassLoader()).newInstance()); } catch (Exception e) { LOG.warn(e.getClass().getName() + " occured when trying to create class: " + clientStatsPublisherClass.trim() + " implementing ClientStatsPublisher interface"); Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java Sat Sep 6 17:20:46 2014 @@ -24,6 +24,7 @@ import java.util.List; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.exec.Utilities; public class HookUtils { /** @@ -57,7 +58,7 @@ public class HookUtils { String[] hookClasses = csHooks.split(","); for (String hookClass : hookClasses) { T hook = (T) Class.forName(hookClass.trim(), true, - JavaUtils.getClassLoader()).newInstance(); + Utilities.getSessionSpecifiedClassLoader()).newInstance(); hooks.add(hook); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java Sat Sep 6 17:20:46 2014 @@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.JobConf; @@ -65,7 +66,7 @@ public class HivePassThroughOutputFormat { cls = (Class<? extends OutputFormat>) Class.forName(actualOutputFormatClass, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } else { throw new RuntimeException("Null pointer detected in actualOutputFormatClass"); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java Sat Sep 6 17:20:46 2014 @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; @@ -307,7 +308,7 @@ public final class HiveUtils { try { Class<? extends HiveStorageHandler> handlerClass = (Class<? extends HiveStorageHandler>) - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); HiveStorageHandler storageHandler = ReflectionUtils.newInstance(handlerClass, conf); return storageHandler; } catch (ClassNotFoundException e) { @@ -329,7 +330,7 @@ public final class HiveUtils { try { Class<? extends HiveIndexHandler> handlerClass = (Class<? extends HiveIndexHandler>) - Class.forName(indexHandlerClass, true, JavaUtils.getClassLoader()); + Class.forName(indexHandlerClass, true, Utilities.getSessionSpecifiedClassLoader()); HiveIndexHandler indexHandler = ReflectionUtils.newInstance(handlerClass, conf); return indexHandler; } catch (ClassNotFoundException e) { Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java Sat Sep 6 17:20:46 2014 @@ -302,7 +302,7 @@ public class Partition implements Serial } try { inputFormatClass = ((Class<? extends InputFormat>) Class.forName(clsName, true, - JavaUtils.getClassLoader())); + Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + clsName, e); } @@ -322,7 +322,7 @@ public class Partition implements Serial } try { Class<?> c = (Class.forName(clsName, true, - JavaUtils.getClassLoader())); + Utilities.getSessionSpecifiedClassLoader())); // Replace FileOutputFormat for backward compatibility if (!HiveOutputFormat.class.isAssignableFrom(c)) { outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c,false); Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Sat Sep 6 17:20:46 2014 @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat; @@ -293,7 +294,7 @@ public class Table implements Serializab inputFormatClass = getStorageHandler().getInputFormatClass(); } else { inputFormatClass = (Class<? extends InputFormat>) - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } } catch (ClassNotFoundException e) { throw new RuntimeException(e); @@ -329,7 +330,7 @@ public class Table implements Serializab } else { c = Class.forName(className, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } } if (!HiveOutputFormat.class.isAssignableFrom(c)) { @@ -677,7 +678,7 @@ public class Table implements Serializab } try { setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>) Class - .forName(name, true, JavaUtils.getClassLoader())); + .forName(name, true, Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + name, e); } @@ -690,7 +691,7 @@ public class Table implements Serializab return; } try { - Class<?> origin = Class.forName(name, true, JavaUtils.getClassLoader()); + Class<?> origin = Class.forName(name, true, Utilities.getSessionSpecifiedClassLoader()); setOutputFormatClass(HiveFileFormatUtils .getOutputFormatSubstitute(origin,false)); } catch (ClassNotFoundException e) { Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java Sat Sep 6 17:20:46 2014 @@ -29,7 +29,6 @@ import java.util.Stack; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; @@ -41,6 +40,7 @@ import org.apache.hadoop.hive.ql.exec.Ro import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -275,7 +275,7 @@ public final class ConstantPropagateProc String udfClassName = bridge.getUdfClassName(); try { UDF udfInternal = - (UDF) Class.forName(bridge.getUdfClassName(), true, JavaUtils.getClassLoader()) + (UDF) Class.forName(bridge.getUdfClassName(), true, Utilities.getSessionSpecifiedClassLoader()) .newInstance(); files = udfInternal.getRequiredFiles(); jars = udf.getRequiredJars(); Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Sat Sep 6 17:20:46 2014 @@ -445,7 +445,7 @@ public class ImportSemanticAnalyzer exte * substitute OutputFormat name based on HiveFileFormatUtils.outputFormatSubstituteMap */ try { - Class<?> origin = Class.forName(importedofc, true, JavaUtils.getClassLoader()); + Class<?> origin = Class.forName(importedofc, true, Utilities.getSessionSpecifiedClassLoader()); Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils .getOutputFormatSubstitute(origin,false); if (replaced == null) { Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Sat Sep 6 17:20:46 2014 @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.Jav import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; @@ -221,7 +222,7 @@ public final class ParseUtils { return null; } try { - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException("Cannot find class '" + className + "'", e); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sat Sep 6 17:20:46 2014 @@ -2534,7 +2534,7 @@ public class SemanticAnalyzer extends Ba try { serdeClass = (Class<? extends Deserializer>) Class.forName(serdeName, - true, JavaUtils.getClassLoader()); + true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2723,7 +2723,7 @@ public class SemanticAnalyzer extends Ba try { serde = (Class<? extends Deserializer>) Class.forName(defaultSerdeName, - true, JavaUtils.getClassLoader()); + true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2790,7 +2790,7 @@ public class SemanticAnalyzer extends Ba try { return (Class<? extends RecordReader>) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2804,7 +2804,7 @@ public class SemanticAnalyzer extends Ba try { return (Class<? extends RecordReader>) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2822,7 +2822,7 @@ public class SemanticAnalyzer extends Ba try { return (Class<? extends RecordWriter>) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java Sat Sep 6 17:20:46 2014 @@ -23,6 +23,7 @@ import java.io.Serializable; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.PTFUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.util.ReflectionUtils; @@ -93,7 +94,7 @@ public class AggregationDesc implements try { return genericUDAFEvaluator = ReflectionUtils.newInstance(Class.forName(genericUDAFEvaluatorClassName, true, - JavaUtils.getClassLoader()).asSubclass(GenericUDAFEvaluator.class), null); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(GenericUDAFEvaluator.class), null); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Sat Sep 6 17:20:46 2014 @@ -419,7 +419,7 @@ public class CreateTableDesc extends DDL if (this.getStorageHandler() == null) { try { Class<?> origin = Class.forName(this.getOutputFormat(), true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils .getOutputFormatSubstitute(origin,false); if (replaced == null) { Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Sat Sep 6 17:20:46 2014 @@ -26,6 +26,7 @@ import java.util.Properties; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat; @@ -65,7 +66,7 @@ public class TableDesc implements Serial public Class<? extends Deserializer> getDeserializerClass() { try { return (Class<? extends Deserializer>) Class.forName( - getSerdeClassName(), true, JavaUtils.getClassLoader()); + getSerdeClassName(), true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Sat Sep 6 17:20:46 2014 @@ -80,6 +80,8 @@ public final class CommandProcessorFacto return new DeleteResourceProcessor(); case COMPILE: return new CompileProcessor(); + case RELOAD: + return new ReloadProcessor(); default: throw new AssertionError("Unknown HiveCommand " + hiveCommand); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java Sat Sep 6 17:20:46 2014 @@ -31,6 +31,7 @@ public enum HiveCommand { DFS(), ADD(), LIST(), + RELOAD(), DELETE(), COMPILE(); private static final Set<String> COMMANDS = new HashSet<String>(); Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java?rev=1622892&view=auto ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java (added) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java Sat Sep 6 17:20:46 2014 @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.processors; + +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * used for reload auxiliary and jars without restarting hive server2 + */ +public class ReloadProcessor implements CommandProcessor{ + private static final Log LOG = LogFactory.getLog(ReloadProcessor.class); + + @Override + public void init() { + } + + @Override + public CommandProcessorResponse run(String command) throws CommandNeedRetryException { + SessionState ss = SessionState.get(); + try { + ss.reloadAuxJars(); + } catch (IOException e) { + LOG.error("fail to reload auxiliary jar files", e); + return CommandProcessorResponse.create(e); + } + return new CommandProcessorResponse(0); + } +} Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Sat Sep 6 17:20:46 2014 @@ -24,14 +24,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.net.URI; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; +import java.net.URLClassLoader; +import java.util.*; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; @@ -237,6 +231,11 @@ public class SessionState { private boolean txnAutoCommit = true; /** + * store the jars loaded last time + */ + private final Set<String> preReloadableAuxJars = new HashSet<String>(); + + /** * Get the lineage state stored in this session. * * @return LineageState @@ -830,7 +829,6 @@ public class SessionState { SessionState ss = SessionState.get(); Configuration conf = (ss == null) ? new Configuration() : ss.getConf(); - LogHelper console = getConsole(); for (String newFile : newFiles) { try { if (Utilities.realFile(newFile, conf) == null) { @@ -844,6 +842,52 @@ public class SessionState { } } + // reloading the jars under the path specified in hive.reloadable.aux.jars.path property + public void reloadAuxJars() throws IOException { + final Set<String> reloadedAuxJars = new HashSet<String>(); + + final String renewableJarPath = conf.getVar(ConfVars.HIVERELOADABLEJARS); + // do nothing if this property is not specified or empty + if (renewableJarPath == null || renewableJarPath.isEmpty()) { + return; + } + + Set<String> jarPaths = Utilities.getJarFilesByPath(renewableJarPath); + + // load jars under the hive.reloadable.aux.jars.path + if(!jarPaths.isEmpty()){ + reloadedAuxJars.addAll(jarPaths); + } + + // remove the previous renewable jars + try { + if (preReloadableAuxJars != null && !preReloadableAuxJars.isEmpty()) { + Utilities.removeFromClassPath(preReloadableAuxJars.toArray(new String[0])); + } + } catch (Exception e) { + String msg = "Fail to remove the reloaded jars loaded last time: " + e; + throw new IOException(msg, e); + } + + try { + if (reloadedAuxJars != null && !reloadedAuxJars.isEmpty()) { + URLClassLoader currentCLoader = + (URLClassLoader) SessionState.get().getConf().getClassLoader(); + currentCLoader = + (URLClassLoader) Utilities.addToClassPath(currentCLoader, + reloadedAuxJars.toArray(new String[0])); + conf.setClassLoader(currentCLoader); + Thread.currentThread().setContextClassLoader(currentCLoader); + } + preReloadableAuxJars.clear(); + preReloadableAuxJars.addAll(reloadedAuxJars); + } catch (Exception e) { + String msg = + "Fail to add jars from the path specified in hive.reloadable.aux.jars.path property: " + e; + throw new IOException(msg, e); + } + } + static void registerJars(List<String> newJars) throws IllegalArgumentException { LogHelper console = getConsole(); try { Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java Sat Sep 6 17:20:46 2014 @@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.util.ReflectionUtils; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS; @@ -87,7 +88,7 @@ public final class StatsFactory { } private boolean initialize(String type) { - ClassLoader classLoader = JavaUtils.getClassLoader(); + ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader(); try { StatDB statDB = type.startsWith("jdbc") ? StatDB.jdbc : StatDB.valueOf(type); publisherImplementation = (Class<? extends Serializable>) Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java Sat Sep 6 17:20:46 2014 @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.common.typ import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ConversionHelper; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -128,7 +129,7 @@ public class GenericUDFBridge extends Ge public Class<? extends UDF> getUdfClass() { try { - return (Class<? extends UDF>) Class.forName(udfClassName, true, JavaUtils.getClassLoader()); + return (Class<? extends UDF>) Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } @@ -138,7 +139,7 @@ public class GenericUDFBridge extends Ge public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { try { - udf = (UDF) Class.forName(udfClassName, true, JavaUtils.getClassLoader()).newInstance(); + udf = (UDF) Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()).newInstance(); } catch (Exception e) { throw new UDFArgumentException( "Unable to instantiate UDF implementation class " + udfClassName + ": " + e); Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (original) +++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java Sat Sep 6 17:20:46 2014 @@ -20,12 +20,22 @@ package org.apache.hadoop.hive.ql.exec; import static org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension; +import java.io.File; +import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; +import com.google.common.collect.Sets; +import com.google.common.io.Files; +import junit.framework.Assert; import junit.framework.TestCase; +import org.apache.commons.io.FileUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -38,6 +48,7 @@ import org.apache.hadoop.hive.serde2.typ import org.apache.hadoop.mapred.JobConf; public class TestUtilities extends TestCase { + public static final Log LOG = LogFactory.getLog(TestUtilities.class); public void testGetFileExtension() { JobConf jc = new JobConf(); @@ -105,4 +116,28 @@ public class TestUtilities extends TestC assertEquals("Invalid table name " + tablename, ex.getMessage()); } } + + public void testGetJarFilesByPath() { + File f = Files.createTempDir(); + String jarFileName1 = f.getAbsolutePath() + File.separator + "a.jar"; + String jarFileName2 = f.getAbsolutePath() + File.separator + "b.jar"; + File jarFile = new File(jarFileName1); + try { + FileUtils.touch(jarFile); + HashSet<String> jars = (HashSet) Utilities.getJarFilesByPath(f.getAbsolutePath()); + Assert.assertEquals(Sets.newHashSet(jarFile.getAbsolutePath()),jars); + + File jarFile2 = new File(jarFileName2); + FileUtils.touch(jarFile2); + String newPath = "file://" + jarFileName1 + "," + "file://" + jarFileName2; + jars = (HashSet) Utilities.getJarFilesByPath(newPath); + + Assert.assertEquals(Sets.newHashSet("file://" + jarFileName1, "file://" + jarFileName2), jars); + } catch (IOException e) { + LOG.error("failed to copy file to reloading folder", e); + Assert.fail(e.getMessage()); + } finally { + FileUtils.deleteQuietly(f); + } + } } Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java (original) +++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java Sat Sep 6 17:20:46 2014 @@ -20,18 +20,29 @@ package org.apache.hadoop.hive.ql.sessio import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; +import org.apache.commons.io.FileUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hive.common.util.HiveTestUtils; +import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import com.google.common.io.Files; + /** * Test SessionState */ @@ -39,6 +50,14 @@ import org.junit.runners.Parameterized.P public class TestSessionState { private final boolean prewarm; + private final static String clazzDistFileName = "SessionStateTest.jar.v1"; + private final static String clazzV2FileName = "SessionStateTest.jar.v2"; + private final static String reloadClazzFileName = "reloadingClazz.jar"; + private final static String reloadClazzName = "org.apache.test.RefreshedJarClass"; + private final static String versionMethodName = "version"; + private static String hiveReloadPath; + private File reloadFolder; + public static final Log LOG = LogFactory.getLog(TestSessionState.class); public TestSessionState(Boolean mode) { this.prewarm = mode.booleanValue(); @@ -50,8 +69,20 @@ public class TestSessionState { } @Before - public void setup() { + public void setUp() { HiveConf conf = new HiveConf(); + String tmp = System.getProperty("java.io.tmpdir"); + File tmpDir = new File(tmp); + if (!tmpDir.exists()) { + tmpDir.mkdir(); + } + hiveReloadPath = Files.createTempDir().getAbsolutePath(); + // create the reloading folder to place jar files if not exist + reloadFolder = new File(hiveReloadPath); + if (!reloadFolder.exists()) { + reloadFolder.mkdir(); + } + if (prewarm) { HiveConf.setBoolVar(conf, ConfVars.HIVE_PREWARM_ENABLED, true); HiveConf.setIntVar(conf, ConfVars.HIVE_PREWARM_NUM_CONTAINERS, 1); @@ -59,6 +90,11 @@ public class TestSessionState { SessionState.start(conf); } + @After + public void tearDown(){ + FileUtils.deleteQuietly(reloadFolder); + } + /** * test set and get db */ @@ -129,4 +165,81 @@ public class TestSessionState { assertEquals("Other thread loader and current thread loader", otherThread.loader, Thread.currentThread().getContextClassLoader()); } + + private String getReloadedClazzVersion(ClassLoader cl) throws Exception { + Class addedClazz = Class.forName(reloadClazzName, true, cl); + Method versionMethod = addedClazz.getMethod(versionMethodName); + return (String) versionMethod.invoke(addedClazz.newInstance()); + } + + @Test + public void testReloadAuxJars2() { + HiveConf conf = new HiveConf(); + HiveConf.setVar(conf, ConfVars.HIVERELOADABLEJARS, hiveReloadPath); + SessionState ss = new SessionState(conf); + SessionState.start(ss); + + ss = SessionState.get(); + File dist = null; + try { + dist = new File(reloadFolder.getAbsolutePath() + File.separator + reloadClazzFileName); + Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzDistFileName)), dist); + ss.reloadAuxJars(); + Assert.assertEquals("version1", getReloadedClazzVersion(ss.getConf().getClassLoader())); + } catch (Exception e) { + LOG.error("Reload auxiliary jar test fail with message: ", e); + Assert.fail(e.getMessage()); + } finally { + FileUtils.deleteQuietly(dist); + try { + ss.close(); + } catch (IOException ioException) { + Assert.fail(ioException.getMessage()); + LOG.error("Fail to close the created session: ", ioException); + } + } + } + + @Test + public void testReloadExistingAuxJars2() { + HiveConf conf = new HiveConf(); + HiveConf.setVar(conf, ConfVars.HIVERELOADABLEJARS, hiveReloadPath); + + SessionState ss = new SessionState(conf); + SessionState.start(ss); + File dist = null; + + try { + ss = SessionState.get(); + + LOG.info("copy jar file 1"); + dist = new File(reloadFolder.getAbsolutePath() + File.separator + reloadClazzFileName); + + Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzDistFileName)), dist); + ss.reloadAuxJars(); + + Assert.assertEquals("version1", getReloadedClazzVersion(ss.getConf().getClassLoader())); + + LOG.info("copy jar file 2"); + FileUtils.deleteQuietly(dist); + Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzV2FileName)), dist); + + ss.reloadAuxJars(); + Assert.assertEquals("version2", getReloadedClazzVersion(ss.getConf().getClassLoader())); + + FileUtils.deleteQuietly(dist); + ss.reloadAuxJars(); + } catch (Exception e) { + LOG.error("refresh existing jar file case failed with message: ", e); + Assert.fail(e.getMessage()); + } finally { + FileUtils.deleteQuietly(dist); + try { + ss.close(); + } catch (IOException ioException) { + Assert.fail(ioException.getMessage()); + LOG.error("Fail to close the created session: ", ioException); + } + } + } } Added: hive/trunk/ql/src/test/resources/SessionStateTest.jar.v1 URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/resources/SessionStateTest.jar.v1?rev=1622892&view=auto ============================================================================== Binary file - no diff available. Propchange: hive/trunk/ql/src/test/resources/SessionStateTest.jar.v1 ------------------------------------------------------------------------------ svn:mime-type = application/octet-stream Added: hive/trunk/ql/src/test/resources/SessionStateTest.jar.v2 URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/resources/SessionStateTest.jar.v2?rev=1622892&view=auto ============================================================================== Binary file - no diff available. Propchange: hive/trunk/ql/src/test/resources/SessionStateTest.jar.v2 ------------------------------------------------------------------------------ svn:mime-type = application/octet-stream Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1622892&r1=1622891&r2=1622892&view=diff ============================================================================== --- hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java (original) +++ hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java Sat Sep 6 17:20:46 2014 @@ -120,6 +120,15 @@ public class HiveSessionImpl implements public void initialize(Map<String, String> sessionConfMap) throws Exception { // Process global init file: .hiverc processGlobalInitFile(); + try { + sessionState.reloadAuxJars(); + } catch (IOException e) { + String msg = "fail to load reloadable jar file path" + e; + LOG.error(msg, e); + throw new Exception(msg, e); + } + SessionState.setCurrentSessionState(sessionState); + // Set conf properties specified by user from client side if (sessionConfMap != null) { configureSession(sessionConfMap);