HIVE-13513: cleardanglingscratchdir does not work in some version of HDFS (Daniel Dai, reviewed by Thejas Nair)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b4489936 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b4489936 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b4489936 Branch: refs/heads/branch-2.1 Commit: b4489936e84b5f9fdc13bc9d12f152c111e779f9 Parents: 3648d35 Author: Daniel Dai <da...@hortonworks.com> Authored: Wed May 25 15:23:57 2016 -0700 Committer: Daniel Dai <da...@hortonworks.com> Committed: Thu May 26 09:50:44 2016 -0700 ---------------------------------------------------------------------- .../hadoop/hive/ql/session/ClearDanglingScratchDir.java | 6 ++++++ .../org/apache/hadoop/hive/ql/session/SessionState.java | 9 ++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/b4489936/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java b/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java index ee012c2..725f954 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java @@ -30,6 +30,8 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; +import org.apache.hadoop.hive.common.LogUtils; +import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.RemoteException; @@ -52,6 +54,10 @@ import org.apache.hadoop.ipc.RemoteException; public class ClearDanglingScratchDir { public static void main(String[] args) throws Exception { + try { + LogUtils.initHiveLog4j(); + } catch (LogInitializationException e) { + } Options opts = createOptions(); CommandLine cli = new GnuParser().parse(opts, args); http://git-wip-us.apache.org/repos/asf/hive/blob/b4489936/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 37ef165..ce43f7d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -109,6 +109,7 @@ public class SessionState { private static final String HDFS_SESSION_PATH_KEY = "_hive.hdfs.session.path"; private static final String TMP_TABLE_SPACE_KEY = "_hive.tmp_table_space"; static final String LOCK_FILE_NAME = "inuse.lck"; + static final String INFO_FILE_NAME = "inuse.info"; private final Map<String, Map<String, Table>> tempTables = new HashMap<String, Map<String, Table>>(); private final Map<String, Map<String, ColumnStatisticsObj>> tempTableColStats = @@ -643,10 +644,12 @@ public class SessionState { // 5. hold a lock file in HDFS session dir to indicate the it is in use if (conf.getBoolVar(HiveConf.ConfVars.HIVE_SCRATCH_DIR_LOCK)) { FileSystem fs = hdfsSessionPath.getFileSystem(conf); + FSDataOutputStream hdfsSessionPathInfoFile = fs.create(new Path(hdfsSessionPath, INFO_FILE_NAME), + true); + hdfsSessionPathInfoFile.writeUTF("process: " + ManagementFactory.getRuntimeMXBean().getName() + +"\n"); + hdfsSessionPathInfoFile.close(); hdfsSessionPathLockFile = fs.create(new Path(hdfsSessionPath, LOCK_FILE_NAME), true); - hdfsSessionPathLockFile.writeUTF("hostname: " + InetAddress.getLocalHost().getHostName() + "\n"); - hdfsSessionPathLockFile.writeUTF("process: " + ManagementFactory.getRuntimeMXBean().getName() + "\n"); - hdfsSessionPathLockFile.hsync(); } // 6. Local session path localSessionPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR), sessionId);