Author: hashutosh
Date: Sun Jan 19 19:35:50 2014
New Revision: 1559568

URL: http://svn.apache.org/r1559568
Log:
HIVE-6228 : Use paths consistently - VII (Ashutosh Chauhan via Xuefu Zhang)

Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Sun Jan 19 
19:35:50 2014
@@ -23,10 +23,8 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
@@ -316,40 +314,13 @@ public class Context {
       nextPathId());
   }
 
-
-  /**
-   * Given a URI for mapreduce intermediate output, swizzle the
-   * it to point to the local file system. This can be called in
-   * case the caller decides to run in local mode (in which case
-   * all intermediate data can be stored locally)
-   *
-   * @param originalURI uri to localize
-   * @return localized path for map-red intermediate data
-   */
-  public String localizeMRTmpFileURI(String originalURI) {
-    Path o = new Path(originalURI);
-    Path mrbase = getMRScratchDir();
-
-    URI relURI = mrbase.toUri().relativize(o.toUri());
-    if (relURI.equals(o.toUri())) {
-      throw new RuntimeException
-        ("Invalid URI: " + originalURI + ", cannot relativize against" +
-         mrbase.toString());
-    }
-
-    return getLocalScratchDir(!explain) + Path.SEPARATOR +
-      relURI.getPath();
-  }
-
-
   /**
    * Get a tmp path on local host to store intermediate data.
    *
    * @return next available tmp path on local fs
    */
-  public String getLocalTmpFileURI() {
-    return getLocalScratchDir(true) + Path.SEPARATOR + LOCAL_PREFIX +
-      nextPathId();
+  public Path getLocalTmpPath() {
+    return new Path(getLocalScratchDir(true), LOCAL_PREFIX + nextPathId());
   }
 
   /**
@@ -595,38 +566,6 @@ public class Context {
   }
 
   /**
-   * Given a mapping from paths to objects, localize any MR tmp paths
-   * @param map mapping from paths to objects
-   */
-  public void localizeKeys(Map<String, Object> map) {
-    for (Map.Entry<String, Object> entry: map.entrySet()) {
-      String path = entry.getKey();
-      if (isMRTmpFileURI(path)) {
-        Object val = entry.getValue();
-        map.remove(path);
-        map.put(localizeMRTmpFileURI(path), val);
-      }
-    }
-  }
-
-  /**
-   * Given a list of paths, localize any MR tmp paths contained therein
-   * @param paths list of paths to be localized
-   */
-  public void localizePaths(List<String> paths) {
-    Iterator<String> iter = paths.iterator();
-    List<String> toAdd = new ArrayList<String> ();
-    while(iter.hasNext()) {
-      String path = iter.next();
-      if (isMRTmpFileURI(path)) {
-        iter.remove();
-        toAdd.add(localizeMRTmpFileURI(path));
-      }
-    }
-    paths.addAll(toAdd);
-  }
-
-  /**
    * @return the isHDFSCleanup
    */
   public boolean isHDFSCleanup() {

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java 
Sun Jan 19 19:35:50 2014
@@ -27,7 +27,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.Stack;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -63,7 +62,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.SubStructObjectInspector;
-import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
@@ -816,10 +814,10 @@ public class FileSinkOperator extends Te
       throws HiveException {
     try {
       if ((conf != null) && isNativeTable) {
-        String specPath = conf.getDirName().toString();
+        Path specPath = conf.getDirName();
         DynamicPartitionCtx dpCtx = conf.getDynPartCtx();
         if (conf.isLinkedFileSink() && (dpCtx != null)) {
-          specPath = conf.getParentDir().toString();
+          specPath = conf.getParentDir();
         }
         Utilities.mvFileToFinalPath(specPath, hconf, success, LOG, dpCtx, conf,
           reporter);

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java Sun 
Jan 19 19:35:50 2014
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec;
 
 import java.io.IOException;
 import java.io.Serializable;
-import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -150,7 +149,7 @@ public class JoinOperator extends Common
     if (conf.getHandleSkewJoin()) {
       try {
         for (int i = 0; i < numAliases; i++) {
-          String specPath = conf.getBigKeysDirMap().get((byte) i);
+          Path specPath = conf.getBigKeysDirMap().get((byte) i);
           mvFileToFinalPath(specPath, hconf, success, LOG);
           for (int j = 0; j < numAliases; j++) {
             if (j == i) {
@@ -165,7 +164,7 @@ public class JoinOperator extends Common
         if (success) {
           // move up files
           for (int i = 0; i < numAliases; i++) {
-            String specPath = conf.getBigKeysDirMap().get((byte) i);
+            Path specPath = conf.getBigKeysDirMap().get((byte) i);
             moveUpFiles(specPath, hconf, LOG);
             for (int j = 0; j < numAliases; j++) {
               if (j == i) {
@@ -184,16 +183,15 @@ public class JoinOperator extends Common
     super.jobCloseOp(hconf, success);
   }
 
-  private void moveUpFiles(String specPath, Configuration hconf, Log log)
+  private void moveUpFiles(Path specPath, Configuration hconf, Log log)
       throws IOException, HiveException {
-    FileSystem fs = (new Path(specPath)).getFileSystem(hconf);
-    Path finalPath = new Path(specPath);
+    FileSystem fs = specPath.getFileSystem(hconf);
 
-    if (fs.exists(finalPath)) {
-      FileStatus[] taskOutputDirs = fs.listStatus(finalPath);
+    if (fs.exists(specPath)) {
+      FileStatus[] taskOutputDirs = fs.listStatus(specPath);
       if (taskOutputDirs != null) {
         for (FileStatus dir : taskOutputDirs) {
-          Utilities.renameOrMoveFiles(fs, dir.getPath(), finalPath);
+          Utilities.renameOrMoveFiles(fs, dir.getPath(), specPath);
           fs.delete(dir.getPath(), true);
         }
       }
@@ -210,15 +208,13 @@ public class JoinOperator extends Common
    * @throws IOException
    * @throws HiveException
    */
-  private void  mvFileToFinalPath(String specPath, Configuration hconf,
+  private void  mvFileToFinalPath(Path specPath, Configuration hconf,
       boolean success, Log log) throws IOException, HiveException {
 
-    FileSystem fs = (new Path(specPath)).getFileSystem(hconf);
+    FileSystem fs = specPath.getFileSystem(hconf);
     Path tmpPath = Utilities.toTempPath(specPath);
     Path intermediatePath = new Path(tmpPath.getParent(), tmpPath.getName()
         + ".intermediate");
-    Path finalPath = new Path(specPath);
-    ArrayList<String> emptyBuckets = null;
     if (success) {
       if (fs.exists(tmpPath)) {
         // Step1: rename tmp output folder to intermediate path. After this
@@ -229,8 +225,8 @@ public class JoinOperator extends Common
         // Step2: remove any tmp file or double-committed output files
         Utilities.removeTempOrDuplicateFiles(fs, intermediatePath);
         // Step3: move to the file destination
-        log.info("Moving tmp dir: " + intermediatePath + " to: " + finalPath);
-        Utilities.renameOrMoveFiles(fs, intermediatePath, finalPath);
+        log.info("Moving tmp dir: " + intermediatePath + " to: " + specPath);
+        Utilities.renameOrMoveFiles(fs, intermediatePath, specPath);
       }
     } else {
       fs.delete(tmpPath, true);

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java 
Sun Jan 19 19:35:50 2014
@@ -177,7 +177,7 @@ public class SkewJoinHandler {
   void endGroup() throws IOException, HiveException {
     if (skewKeyInCurrentGroup) {
 
-      String specPath = conf.getBigKeysDirMap().get((byte) currBigKeyTag);
+      Path specPath = conf.getBigKeysDirMap().get((byte) currBigKeyTag);
       RowContainer<ArrayList<Object>> bigKey = 
(RowContainer)joinOp.storage[currBigKeyTag];
       Path outputPath = getOperatorOutputPath(specPath);
       FileSystem destFs = outputPath.getFileSystem(hconf);
@@ -258,7 +258,7 @@ public class SkewJoinHandler {
         }
 
         try {
-          String specPath = conf.getBigKeysDirMap().get((byte) bigKeyTbl);
+          Path specPath = conf.getBigKeysDirMap().get((byte) bigKeyTbl);
           Path bigKeyPath = getOperatorOutputPath(specPath);
           FileSystem fs = bigKeyPath.getFileSystem(hconf);
           delete(bigKeyPath, fs);
@@ -295,7 +295,7 @@ public class SkewJoinHandler {
         continue;
       }
 
-      String specPath = conf.getBigKeysDirMap().get(
+      Path specPath = conf.getBigKeysDirMap().get(
           Byte.valueOf((byte) bigKeyTbl));
       commitOutputPathToFinalPath(specPath, false);
       for (int smallKeyTbl = 0; smallKeyTbl < numAliases; smallKeyTbl++) {
@@ -311,7 +311,7 @@ public class SkewJoinHandler {
     }
   }
 
-  private void commitOutputPathToFinalPath(String specPath,
+  private void commitOutputPathToFinalPath(Path specPath,
       boolean ignoreNonExisting) throws IOException {
     Path outPath = getOperatorOutputPath(specPath);
     Path finalPath = getOperatorFinalPath(specPath);
@@ -334,14 +334,12 @@ public class SkewJoinHandler {
     }
   }
 
-  private Path getOperatorOutputPath(String specPath) throws IOException {
-    Path tmpPath = Utilities.toTempPath(specPath);
-    return new Path(tmpPath, Utilities.toTempPath(taskId));
+  private Path getOperatorOutputPath(Path specPath) throws IOException {
+    return new Path(Utilities.toTempPath(specPath), 
Utilities.toTempPath(taskId));
   }
 
-  private Path getOperatorFinalPath(String specPath) throws IOException {
-    Path tmpPath = Utilities.toTempPath(specPath);
-    return new Path(tmpPath, taskId);
+  private Path getOperatorFinalPath(Path specPath) throws IOException {
+    return new Path(Utilities.toTempPath(specPath), taskId);
   }
 
   public void setSkewJoinJobCounter(LongWritable skewjoinFollowupJobs) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Sun 
Jan 19 19:35:50 2014
@@ -1424,10 +1424,6 @@ public final class Utilities {
     return new Path(orig.getParent(), taskTmpPrefix + orig.getName());
   }
 
-  public static Path toTaskTempPath(String orig) {
-    return toTaskTempPath(new Path(orig));
-  }
-
   public static Path toTempPath(Path orig) {
     if (orig.getName().indexOf(tmpPrefix) == 0) {
       return orig;
@@ -1686,15 +1682,14 @@ public final class Utilities {
     }
   }
 
-  public static void mvFileToFinalPath(String specPath, Configuration hconf,
+  public static void mvFileToFinalPath(Path specPath, Configuration hconf,
       boolean success, Log log, DynamicPartitionCtx dpCtx, FileSinkDesc conf,
       Reporter reporter) throws IOException,
       HiveException {
 
-    FileSystem fs = (new Path(specPath)).getFileSystem(hconf);
+    FileSystem fs = specPath.getFileSystem(hconf);
     Path tmpPath = Utilities.toTempPath(specPath);
     Path taskTmpPath = Utilities.toTaskTempPath(specPath);
-    Path finalPath = new Path(specPath);
     if (success) {
       if (fs.exists(tmpPath)) {
         // remove any tmp file or double-committed output files
@@ -1706,8 +1701,8 @@ public final class Utilities {
         }
 
         // move to the file destination
-        log.info("Moving tmp dir: " + tmpPath + " to: " + finalPath);
-        Utilities.renameOrMoveFiles(fs, tmpPath, finalPath);
+        log.info("Moving tmp dir: " + tmpPath + " to: " + specPath);
+        Utilities.renameOrMoveFiles(fs, tmpPath, specPath);
       }
     } else {
       fs.delete(tmpPath, true);

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java 
Sun Jan 19 19:35:50 2014
@@ -739,7 +739,7 @@ public class ExecDriver extends Task<Map
   public static String generateCmdLine(HiveConf hconf, Context ctx)
       throws IOException {
     HiveConf tempConf = new HiveConf();
-    Path hConfFilePath = new Path(ctx.getLocalTmpFileURI(), JOBCONF_FILENAME);
+    Path hConfFilePath = new Path(ctx.getLocalTmpPath(), JOBCONF_FILENAME);
     OutputStream out = null;
 
     Properties deltaP = hconf.getChangedProperties();

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java 
Sun Jan 19 19:35:50 2014
@@ -169,7 +169,7 @@ public class MapRedTask extends ExecDriv
       String hiveConfArgs = generateCmdLine(conf, ctx);
 
       // write out the plan to a local file
-      Path planPath = new Path(ctx.getLocalTmpFileURI(), "plan.xml");
+      Path planPath = new Path(ctx.getLocalTmpPath(), "plan.xml");
       OutputStream out = FileSystem.getLocal(conf).create(planPath);
       MapredWork plan = getWork();
       LOG.info("Generating plan file " + planPath.toString());
@@ -188,7 +188,7 @@ public class MapRedTask extends ExecDriv
       if (!files.isEmpty()) {
         cmdLine = cmdLine + " -files " + files;
 
-        workDir = (new Path(ctx.getLocalTmpFileURI())).toUri().getPath();
+        workDir = ctx.getLocalTmpPath().toUri().getPath();
 
         if (! (new File(workDir)).mkdir()) {
           throw new IOException ("Cannot create tmp working dir: " + workDir);

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java 
Sun Jan 19 19:35:50 2014
@@ -136,7 +136,7 @@ public class MapredLocalTask extends Tas
       String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
 
       // write out the plan to a local file
-      Path planPath = new Path(ctx.getLocalTmpFileURI(), "plan.xml");
+      Path planPath = new Path(ctx.getLocalTmpPath(), "plan.xml");
       OutputStream out = FileSystem.getLocal(conf).create(planPath);
       MapredLocalWork plan = getWork();
       LOG.info("Generating plan file " + planPath.toString());
@@ -157,7 +157,7 @@ public class MapredLocalTask extends Tas
       if (!files.isEmpty()) {
         cmdLine = cmdLine + " -files " + files;
 
-        workDir = (new Path(ctx.getLocalTmpFileURI())).toUri().getPath();
+        workDir = ctx.getLocalTmpPath().toUri().getPath();
 
         if (!(new File(workDir)).mkdir()) {
           throw new IOException("Cannot create tmp working dir: " + workDir);

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java
 Sun Jan 19 19:35:50 2014
@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.shims.CombineHiveKey;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.mapred.JobConf;
@@ -84,13 +83,12 @@ public class RCFileMergeMapper extends M
     listBucketingDepth = HiveConf.getIntVar(job,
         HiveConf.ConfVars.HIVEMERGECURRENTJOBCONCATENATELISTBUCKETINGDEPTH);
 
-    String specPath = RCFileBlockMergeOutputFormat.getMergeOutputPath(job)
-        .toString();
+    Path specPath = RCFileBlockMergeOutputFormat.getMergeOutputPath(job);
     Path tmpPath = Utilities.toTempPath(specPath);
     Path taskTmpPath = Utilities.toTaskTempPath(specPath);
     updatePaths(tmpPath, taskTmpPath);
     try {
-      fs = (new Path(specPath)).getFileSystem(job);
+      fs = specPath.getFileSystem(job);
       autoDelete = fs.deleteOnExit(outPath);
     } catch (IOException e) {
       this.exception = true;
@@ -316,7 +314,7 @@ public class RCFileMergeMapper extends M
       ) throws HiveException, IOException {
     FileSystem fs = outputPath.getFileSystem(job);
     Path backupPath = backupOutputPath(fs, outputPath, job);
-    Utilities.mvFileToFinalPath(outputPath.toUri().toString(), job, success, 
LOG, dynPartCtx, null,
+    Utilities.mvFileToFinalPath(outputPath, job, success, LOG, dynPartCtx, 
null,
       reporter);
     fs.delete(backupPath, true);
   }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
 Sun Jan 19 19:35:50 2014
@@ -34,7 +34,6 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.shims.CombineHiveKey;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.mapred.JobConf;
@@ -234,7 +233,7 @@ public class ColumnTruncateMapper extend
       ) throws HiveException, IOException {
     FileSystem fs = outputPath.getFileSystem(job);
     Path backupPath = backupOutputPath(fs, outputPath, job);
-    Utilities.mvFileToFinalPath(outputPath.toUri().toString(), job, success, 
LOG, dynPartCtx, null,
+    Utilities.mvFileToFinalPath(outputPath, job, success, LOG, dynPartCtx, 
null,
       reporter);
     fs.delete(backupPath, true);
   }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
 Sun Jan 19 19:35:50 2014
@@ -123,21 +123,20 @@ public final class GenMRSkewJoinProcesso
     Task<? extends Serializable> child =
         children != null && children.size() == 1 ? children.get(0) : null;
 
-    String baseTmpDir = 
parseCtx.getContext().getMRTmpPath().toUri().toString();
+    Path baseTmpDir = parseCtx.getContext().getMRTmpPath();
 
     JoinDesc joinDescriptor = joinOp.getConf();
     Map<Byte, List<ExprNodeDesc>> joinValues = joinDescriptor.getExprs();
     int numAliases = joinValues.size();
 
-    Map<Byte, String> bigKeysDirMap = new HashMap<Byte, String>();
-    Map<Byte, Map<Byte, String>> smallKeysDirMap = new HashMap<Byte, Map<Byte, 
String>>();
-    Map<Byte, String> skewJoinJobResultsDir = new HashMap<Byte, String>();
+    Map<Byte, Path> bigKeysDirMap = new HashMap<Byte, Path>();
+    Map<Byte, Map<Byte, Path>> smallKeysDirMap = new HashMap<Byte, Map<Byte, 
Path>>();
+    Map<Byte, Path> skewJoinJobResultsDir = new HashMap<Byte, Path>();
     Byte[] tags = joinDescriptor.getTagOrder();
     for (int i = 0; i < numAliases; i++) {
       Byte alias = tags[i];
-      String bigKeysDir = getBigKeysDir(baseTmpDir, alias);
-      bigKeysDirMap.put(alias, bigKeysDir);
-      Map<Byte, String> smallKeysMap = new HashMap<Byte, String>();
+      bigKeysDirMap.put(alias, getBigKeysDir(baseTmpDir, alias));
+      Map<Byte, Path> smallKeysMap = new HashMap<Byte, Path>();
       smallKeysDirMap.put(alias, smallKeysMap);
       for (Byte src2 : tags) {
         if (!src2.equals(alias)) {
@@ -154,8 +153,8 @@ public final class GenMRSkewJoinProcesso
     joinDescriptor.setSkewKeyDefinition(HiveConf.getIntVar(parseCtx.getConf(),
         HiveConf.ConfVars.HIVESKEWJOINKEY));
 
-    HashMap<String, Task<? extends Serializable>> bigKeysDirToTaskMap =
-      new HashMap<String, Task<? extends Serializable>>();
+    HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap =
+      new HashMap<Path, Task<? extends Serializable>>();
     List<Serializable> listWorks = new ArrayList<Serializable>();
     List<Task<? extends Serializable>> listTasks = new ArrayList<Task<? 
extends Serializable>>();
     MapredWork currPlan = (MapredWork) currTask.getWork();
@@ -272,13 +271,13 @@ public final class GenMRSkewJoinProcesso
       ArrayList<String> aliases = new ArrayList<String>();
       String alias = src.toString();
       aliases.add(alias);
-      String bigKeyDirPath = bigKeysDirMap.get(src);
-      newPlan.getPathToAliases().put(bigKeyDirPath, aliases);
+      Path bigKeyDirPath = bigKeysDirMap.get(src);
+      newPlan.getPathToAliases().put(bigKeyDirPath.toString(), aliases);
 
       newPlan.getAliasToWork().put(alias, tblScan_op);
       PartitionDesc part = new PartitionDesc(tableDescList.get(src), null);
 
-      newPlan.getPathToPartitionInfo().put(bigKeyDirPath, part);
+      newPlan.getPathToPartitionInfo().put(bigKeyDirPath.toString(), part);
       newPlan.getAliasToPartnInfo().put(alias, part);
 
       Operator<? extends OperatorDesc> reducer = 
clonePlan.getReduceWork().getReducer();
@@ -297,7 +296,7 @@ public final class GenMRSkewJoinProcesso
       MapredLocalWork localPlan = new MapredLocalWork(
           new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
           new LinkedHashMap<String, FetchWork>());
-      Map<Byte, String> smallTblDirs = smallKeysDirMap.get(src);
+      Map<Byte, Path> smallTblDirs = smallKeysDirMap.get(src);
 
       for (int j = 0; j < numAliases; j++) {
         if (j == i) {
@@ -306,7 +305,7 @@ public final class GenMRSkewJoinProcesso
         Byte small_alias = tags[j];
         Operator<? extends OperatorDesc> tblScan_op2 = parentOps[j];
         localPlan.getAliasToWork().put(small_alias.toString(), tblScan_op2);
-        Path tblDir = new Path(smallTblDirs.get(small_alias));
+        Path tblDir = smallTblDirs.get(small_alias);
         localPlan.getAliasToFetchWork().put(small_alias.toString(),
             new FetchWork(tblDir, tableDescList.get(small_alias)));
       }
@@ -393,20 +392,19 @@ public final class GenMRSkewJoinProcesso
   private static String SMALLKEYS = "smallkeys";
   private static String RESULTS = "results";
 
-  static String getBigKeysDir(String baseDir, Byte srcTbl) {
-    return baseDir + Path.SEPARATOR + skewJoinPrefix + UNDERLINE + BIGKEYS
-        + UNDERLINE + srcTbl;
+  static Path getBigKeysDir(Path baseDir, Byte srcTbl) {
+    return new Path(baseDir, skewJoinPrefix + UNDERLINE + BIGKEYS + UNDERLINE 
+ srcTbl);
   }
 
-  static String getBigKeysSkewJoinResultDir(String baseDir, Byte srcTbl) {
-    return baseDir + Path.SEPARATOR + skewJoinPrefix + UNDERLINE + BIGKEYS
-        + UNDERLINE + RESULTS + UNDERLINE + srcTbl;
+  static Path getBigKeysSkewJoinResultDir(Path baseDir, Byte srcTbl) {
+    return new Path(baseDir, skewJoinPrefix + UNDERLINE + BIGKEYS
+        + UNDERLINE + RESULTS + UNDERLINE + srcTbl);
   }
 
-  static String getSmallKeysDir(String baseDir, Byte srcTblBigTbl,
+  static Path getSmallKeysDir(Path baseDir, Byte srcTblBigTbl,
       Byte srcTblSmallTbl) {
-    return baseDir + Path.SEPARATOR + skewJoinPrefix + UNDERLINE + SMALLKEYS
-        + UNDERLINE + srcTblBigTbl + UNDERLINE + srcTblSmallTbl;
+    return new Path(baseDir, skewJoinPrefix + UNDERLINE + SMALLKEYS
+        + UNDERLINE + srcTblBigTbl + UNDERLINE + srcTblSmallTbl);
   }
 
 }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
 Sun Jan 19 19:35:50 2014
@@ -102,7 +102,7 @@ public class MapJoinResolver implements 
       if (localwork != null) {
         // get the context info and set up the shared tmp URI
         Context ctx = physicalContext.getContext();
-        Path tmpPath = Utilities.generateTmpPath(new 
Path(ctx.getLocalTmpFileURI()), currTask.getId());
+        Path tmpPath = Utilities.generateTmpPath(ctx.getLocalTmpPath(), 
currTask.getId());
         localwork.setTmpPath(tmpPath);
         mapredWork.getMapWork().setTmpHDFSPath(Utilities.generateTmpPath(
           ctx.getMRTmpPath(), currTask.getId()));
@@ -167,15 +167,15 @@ public class MapJoinResolver implements 
               // get bigKeysDirToTaskMap
               ConditionalResolverSkewJoinCtx context = 
(ConditionalResolverSkewJoinCtx) conditionalTask
                   .getResolverCtx();
-              HashMap<String, Task<? extends Serializable>> 
bigKeysDirToTaskMap = context
+              HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap 
= context
                   .getDirToTaskMap();
               // to avoid concurrent modify the hashmap
-              HashMap<String, Task<? extends Serializable>> 
newbigKeysDirToTaskMap = new HashMap<String, Task<? extends Serializable>>();
+              HashMap<Path, Task<? extends Serializable>> 
newbigKeysDirToTaskMap = new HashMap<Path, Task<? extends Serializable>>();
               // reset the resolver
-              for (Map.Entry<String, Task<? extends Serializable>> entry : 
bigKeysDirToTaskMap
+              for (Map.Entry<Path, Task<? extends Serializable>> entry : 
bigKeysDirToTaskMap
                   .entrySet()) {
                 Task<? extends Serializable> task = entry.getValue();
-                String key = entry.getKey();
+                Path key = entry.getKey();
                 if (task.equals(currTask)) {
                   newbigKeysDirToTaskMap.put(key, localTask);
                 } else {

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
Sun Jan 19 19:35:50 2014
@@ -267,51 +267,51 @@ public class DDLSemanticAnalyzer extends
       analyzeDropIndex(ast);
       break;
     case HiveParser.TOK_DESCTABLE:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeDescribeTable(ast);
       break;
     case HiveParser.TOK_SHOWDATABASES:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowDatabases(ast);
       break;
     case HiveParser.TOK_SHOWTABLES:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowTables(ast);
       break;
     case HiveParser.TOK_SHOWCOLUMNS:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowColumns(ast);
       break;
     case HiveParser.TOK_SHOW_TABLESTATUS:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowTableStatus(ast);
       break;
     case HiveParser.TOK_SHOW_TBLPROPERTIES:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowTableProperties(ast);
       break;
     case HiveParser.TOK_SHOWFUNCTIONS:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowFunctions(ast);
       break;
     case HiveParser.TOK_SHOWLOCKS:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowLocks(ast);
       break;
     case HiveParser.TOK_SHOWDBLOCKS:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowDbLocks(ast);
       break;
     case HiveParser.TOK_DESCFUNCTION:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeDescFunction(ast);
       break;
     case HiveParser.TOK_DESCDATABASE:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeDescDatabase(ast);
       break;
     case HiveParser.TOK_MSCK:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeMetastoreCheck(ast);
       break;
     case HiveParser.TOK_DROPVIEW:
@@ -381,15 +381,15 @@ public class DDLSemanticAnalyzer extends
       analyzeAlterIndexProps(ast);
       break;
     case HiveParser.TOK_SHOWPARTITIONS:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowPartitions(ast);
       break;
     case HiveParser.TOK_SHOW_CREATETABLE:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowCreateTable(ast);
       break;
     case HiveParser.TOK_SHOWINDEXES:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowIndexes(ast);
       break;
     case HiveParser.TOK_LOCKTABLE:
@@ -423,11 +423,11 @@ public class DDLSemanticAnalyzer extends
       analyzeDropRole(ast);
       break;
     case HiveParser.TOK_SHOW_ROLE_GRANT:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowRoleGrant(ast);
       break;
     case HiveParser.TOK_SHOW_ROLES:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowRoles(ast);
       break;
     case HiveParser.TOK_GRANT_ROLE:
@@ -440,7 +440,7 @@ public class DDLSemanticAnalyzer extends
       analyzeGrant(ast);
       break;
     case HiveParser.TOK_SHOW_GRANT:
-      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowGrant(ast);
       break;
     case HiveParser.TOK_REVOKE:

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
 Sun Jan 19 19:35:50 2014
@@ -22,7 +22,6 @@ import java.io.Serializable;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -65,7 +64,7 @@ public class ExplainSemanticAnalyzer ext
     sem.analyze(input, ctx);
     sem.validate();
 
-    ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+    ctx.setResFile(ctx.getLocalTmpPath());
     List<Task<? extends Serializable>> tasks = sem.getRootTasks();
     Task<? extends Serializable> fetchTask = sem.getFetchTask();
     if (tasks == null) {

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
 Sun Jan 19 19:35:50 2014
@@ -87,8 +87,7 @@ public class ExportSemanticAnalyzer exte
       if (ts.tableHandle.isPartitioned()) {
         partitions = (ts.partitions != null) ? ts.partitions : 
db.getPartitions(ts.tableHandle);
       }
-      String tmpfile = ctx.getLocalTmpFileURI();
-      Path path = new Path(tmpfile, "_metadata");
+      Path path = new Path(ctx.getLocalTmpPath(), "_metadata");
       EximUtil.createExportDump(FileSystem.getLocal(conf), path, 
ts.tableHandle, partitions);
       Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
           path, new Path(toURI), false), conf);

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
 Sun Jan 19 19:35:50 2014
@@ -50,7 +50,7 @@ public class ConditionalResolverSkewJoin
     // tables into corresponding different dirs (one dir per table).
     // this map stores mapping from "big key dir" to its corresponding mapjoin
     // task.
-    private HashMap<String, Task<? extends Serializable>> dirToTaskMap;
+    private HashMap<Path, Task<? extends Serializable>> dirToTaskMap;
     private Task<? extends Serializable> noSkewTask;
 
     /**
@@ -60,19 +60,19 @@ public class ConditionalResolverSkewJoin
     }
 
     public ConditionalResolverSkewJoinCtx(
-        HashMap<String, Task<? extends Serializable>> dirToTaskMap,
+        HashMap<Path, Task<? extends Serializable>> dirToTaskMap,
         Task<? extends Serializable> noSkewTask) {
       super();
       this.dirToTaskMap = dirToTaskMap;
       this.noSkewTask = noSkewTask;
     }
 
-    public HashMap<String, Task<? extends Serializable>> getDirToTaskMap() {
+    public HashMap<Path, Task<? extends Serializable>> getDirToTaskMap() {
       return dirToTaskMap;
     }
 
     public void setDirToTaskMap(
-        HashMap<String, Task<? extends Serializable>> dirToTaskMap) {
+        HashMap<Path, Task<? extends Serializable>> dirToTaskMap) {
       this.dirToTaskMap = dirToTaskMap;
     }
 
@@ -94,16 +94,14 @@ public class ConditionalResolverSkewJoin
     ConditionalResolverSkewJoinCtx ctx = (ConditionalResolverSkewJoinCtx) 
objCtx;
     List<Task<? extends Serializable>> resTsks = new ArrayList<Task<? extends 
Serializable>>();
 
-    Map<String, Task<? extends Serializable>> dirToTaskMap = ctx
+    Map<Path, Task<? extends Serializable>> dirToTaskMap = ctx
         .getDirToTaskMap();
-    Iterator<Entry<String, Task<? extends Serializable>>> bigKeysPathsIter = 
dirToTaskMap
+    Iterator<Entry<Path, Task<? extends Serializable>>> bigKeysPathsIter = 
dirToTaskMap
         .entrySet().iterator();
     try {
       while (bigKeysPathsIter.hasNext()) {
-        Entry<String, Task<? extends Serializable>> entry = bigKeysPathsIter
-            .next();
-        String path = entry.getKey();
-        Path dirPath = new Path(path);
+        Entry<Path, Task<? extends Serializable>> entry = 
bigKeysPathsIter.next();
+        Path dirPath = entry.getKey();
         FileSystem inpFs = dirPath.getFileSystem(conf);
         FileStatus[] fstatus = Utilities.listStatusIfExists(dirPath, inpFs);
         if (fstatus != null && fstatus.length > 0) {

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java 
Sun Jan 19 19:35:50 2014
@@ -20,15 +20,15 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
+import org.apache.hadoop.fs.Path;
+
 /**
  * Map Join operator Descriptor implementation.
  *
@@ -41,8 +41,8 @@ public class HashTableSinkDesc extends J
   // used to handle skew join
   private boolean handleSkewJoin = false;
   private int skewKeyDefinition = -1;
-  private Map<Byte, String> bigKeysDirMap;
-  private Map<Byte, Map<Byte, String>> smallKeysDirMap;
+  private Map<Byte, Path> bigKeysDirMap;
+  private Map<Byte, Map<Byte, Path>> smallKeysDirMap;
   private Map<Byte, TableDesc> skewKeysValuesTables;
 
   // alias to key mapping
@@ -173,22 +173,22 @@ public class HashTableSinkDesc extends J
   }
 
   @Override
-  public Map<Byte, String> getBigKeysDirMap() {
+  public Map<Byte, Path> getBigKeysDirMap() {
     return bigKeysDirMap;
   }
 
   @Override
-  public void setBigKeysDirMap(Map<Byte, String> bigKeysDirMap) {
+  public void setBigKeysDirMap(Map<Byte, Path> bigKeysDirMap) {
     this.bigKeysDirMap = bigKeysDirMap;
   }
 
   @Override
-  public Map<Byte, Map<Byte, String>> getSmallKeysDirMap() {
+  public Map<Byte, Map<Byte, Path>> getSmallKeysDirMap() {
     return smallKeysDirMap;
   }
 
   @Override
-  public void setSmallKeysDirMap(Map<Byte, Map<Byte, String>> smallKeysDirMap) 
{
+  public void setSmallKeysDirMap(Map<Byte, Map<Byte, Path>> smallKeysDirMap) {
     this.smallKeysDirMap = smallKeysDirMap;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java?rev=1559568&r1=1559567&r2=1559568&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java Sun Jan 
19 19:35:50 2014
@@ -26,6 +26,8 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.fs.Path;
+
 
 /**
  * Join operator Descriptor implementation.
@@ -44,8 +46,8 @@ public class JoinDesc extends AbstractOp
   // used to handle skew join
   private boolean handleSkewJoin = false;
   private int skewKeyDefinition = -1;
-  private Map<Byte, String> bigKeysDirMap;
-  private Map<Byte, Map<Byte, String>> smallKeysDirMap;
+  private Map<Byte, Path> bigKeysDirMap;
+  private Map<Byte, Map<Byte, Path>> smallKeysDirMap;
   private Map<Byte, TableDesc> skewKeysValuesTables;
 
   // alias to key mapping
@@ -128,12 +130,12 @@ public class JoinDesc extends AbstractOp
     }
 
     if (getBigKeysDirMap() != null) {
-      Map<Byte, String> cloneBigKeysDirMap = new HashMap<Byte, String>();
+      Map<Byte, Path> cloneBigKeysDirMap = new HashMap<Byte, Path>();
       cloneBigKeysDirMap.putAll(getBigKeysDirMap());
       ret.setBigKeysDirMap(cloneBigKeysDirMap);
     }
     if (getSmallKeysDirMap() != null) {
-      Map<Byte, Map<Byte, String>> cloneSmallKeysDirMap = new HashMap<Byte, 
Map<Byte,String>> ();
+      Map<Byte, Map<Byte, Path>> cloneSmallKeysDirMap = new HashMap<Byte, 
Map<Byte,Path>> ();
       cloneSmallKeysDirMap.putAll(getSmallKeysDirMap());
       ret.setSmallKeysDirMap(cloneSmallKeysDirMap);
     }
@@ -364,7 +366,7 @@ public class JoinDesc extends AbstractOp
   /**
    * @return mapping from tbl to dir for big keys.
    */
-  public Map<Byte, String> getBigKeysDirMap() {
+  public Map<Byte, Path> getBigKeysDirMap() {
     return bigKeysDirMap;
   }
 
@@ -373,14 +375,14 @@ public class JoinDesc extends AbstractOp
    *
    * @param bigKeysDirMap
    */
-  public void setBigKeysDirMap(Map<Byte, String> bigKeysDirMap) {
+  public void setBigKeysDirMap(Map<Byte, Path> bigKeysDirMap) {
     this.bigKeysDirMap = bigKeysDirMap;
   }
 
   /**
    * @return mapping from tbl to dir for small keys
    */
-  public Map<Byte, Map<Byte, String>> getSmallKeysDirMap() {
+  public Map<Byte, Map<Byte, Path>> getSmallKeysDirMap() {
     return smallKeysDirMap;
   }
 
@@ -389,7 +391,7 @@ public class JoinDesc extends AbstractOp
    *
    * @param smallKeysDirMap
    */
-  public void setSmallKeysDirMap(Map<Byte, Map<Byte, String>> smallKeysDirMap) 
{
+  public void setSmallKeysDirMap(Map<Byte, Map<Byte, Path>> smallKeysDirMap) {
     this.smallKeysDirMap = smallKeysDirMap;
   }
 


Reply via email to