Repository: hive
Updated Branches:
  refs/heads/master d2d50e694 -> 94152c997


http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
index b7fbea4..9a91e3f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -417,14 +417,14 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     } else {
       CopyWork cw = new CopyWork(dataPath, destPath, false);
       cw.setSkipSourceMmDirs(isSourceMm);
-      copyTask = TaskFactory.get(cw, x.getConf());
+      copyTask = TaskFactory.get(cw);
     }
 
     LoadTableDesc loadTableWork = new LoadTableDesc(
         loadPath, Utilities.getTableDesc(table), new TreeMap<>(), lft, 
writeId);
     loadTableWork.setStmtId(stmtId);
     MoveWork mv = new MoveWork(x.getInputs(), x.getOutputs(), loadTableWork, 
null, false);
-    Task<?> loadTableTask = TaskFactory.get(mv, x.getConf());
+    Task<?> loadTableTask = TaskFactory.get(mv);
     copyTask.addDependentTask(loadTableTask);
     x.getTasks().add(copyTask);
     return loadTableTask;
@@ -470,7 +470,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         x.getInputs(),
         x.getOutputs(),
         addPartitionDesc
-    ), x.getConf());
+    ));
   }
 
  private static Task<?> addSinglePartition(URI fromURI, FileSystem fs, 
ImportTableDesc tblDesc,
@@ -485,7 +485,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       // addPartitionDesc already has the right partition location
       @SuppressWarnings("unchecked")
       Task<?> addPartTask = TaskFactory.get(new DDLWork(x.getInputs(),
-          x.getOutputs(), addPartitionDesc), x.getConf());
+          x.getOutputs(), addPartitionDesc));
       return addPartTask;
     } else {
       String srcLocation = partSpec.getLocation();
@@ -515,11 +515,11 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       } else {
         CopyWork cw = new CopyWork(new Path(srcLocation), destPath, false);
         cw.setSkipSourceMmDirs(isSourceMm);
-        copyTask = TaskFactory.get(cw, x.getConf());
+        copyTask = TaskFactory.get(cw);
       }
 
       Task<?> addPartTask = TaskFactory.get(new DDLWork(x.getInputs(),
-          x.getOutputs(), addPartitionDesc), x.getConf());
+          x.getOutputs(), addPartitionDesc));
       // Note: this sets LoadFileType incorrectly for ACID; is that relevant 
for import?
       //       See setLoadFileType and setIsAcidIow calls elsewhere for an 
example.
       LoadTableDesc loadTableWork = new LoadTableDesc(moveTaskSrc, 
Utilities.getTableDesc(table),
@@ -529,7 +529,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       loadTableWork.setStmtId(stmtId);
       loadTableWork.setInheritTableSpecs(false);
       Task<?> loadPartTask = TaskFactory.get(new MoveWork(
-          x.getInputs(), x.getOutputs(), loadTableWork, null, false), 
x.getConf(), true);
+          x.getInputs(), x.getOutputs(), loadTableWork, null, false));
       copyTask.addDependentTask(loadPartTask);
       addPartTask.addDependentTask(loadPartTask);
       x.getTasks().add(copyTask);
@@ -831,7 +831,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       if (table.isPartitioned()) {
         x.getLOG().debug("table partitioned");
         Task<?> ict = createImportCommitTask(
-            table.getDbName(), table.getTableName(), writeId, stmtId, 
x.getConf(),
+            table.getDbName(), table.getTableName(), writeId, stmtId,
             AcidUtils.isInsertOnlyTable(table.getParameters()));
 
         for (AddPartitionDesc addPartitionDesc : partitionDescs) {
@@ -868,7 +868,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
       if (isPartitioned(tblDesc)) {
         Task<?> ict = createImportCommitTask(
-            tblDesc.getDatabaseName(), tblDesc.getTableName(), writeId, 
stmtId, x.getConf(),
+            tblDesc.getDatabaseName(), tblDesc.getTableName(), writeId, stmtId,
             AcidUtils.isInsertOnlyTable(tblDesc.getTblProps()));
         for (AddPartitionDesc addPartitionDesc : partitionDescs) {
           t.addDependentTask(addSinglePartition(fromURI, fs, tblDesc, table, 
wh, addPartitionDesc,
@@ -903,11 +903,10 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   }
 
   private static Task<?> createImportCommitTask(
-      String dbName, String tblName, Long writeId, int stmtId, HiveConf conf, 
boolean isMmTable) {
+      String dbName, String tblName, Long writeId, int stmtId, boolean 
isMmTable) {
     // TODO: noop, remove?
-    @SuppressWarnings("unchecked")
     Task<ImportCommitWork> ict = (!isMmTable) ? null : TaskFactory.get(
-        new ImportCommitWork(dbName, tblName, writeId, stmtId), conf);
+        new ImportCommitWork(dbName, tblName, writeId, stmtId));
     return ict;
   }
 
@@ -996,7 +995,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       if (!replicationSpec.isMetadataOnly()) {
         if (isPartitioned(tblDesc)) {
           Task<?> ict = createImportCommitTask(
-              tblDesc.getDatabaseName(), tblDesc.getTableName(), writeId, 
stmtId, x.getConf(),
+              tblDesc.getDatabaseName(), tblDesc.getTableName(), writeId, 
stmtId,
               AcidUtils.isInsertOnlyTable(tblDesc.getTblProps()));
           for (AddPartitionDesc addPartitionDesc : partitionDescs) {
             addPartitionDesc.setReplicationSpec(replicationSpec);
@@ -1022,7 +1021,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
           Map<String, String> partSpec = 
addPartitionDesc.getPartition(0).getPartSpec();
           org.apache.hadoop.hive.ql.metadata.Partition ptn = null;
           Task<?> ict = replicationSpec.isMetadataOnly() ? null : 
createImportCommitTask(
-              tblDesc.getDatabaseName(), tblDesc.getTableName(), writeId, 
stmtId, x.getConf(),
+              tblDesc.getDatabaseName(), tblDesc.getTableName(), writeId, 
stmtId,
               AcidUtils.isInsertOnlyTable(tblDesc.getTblProps()));
           if ((ptn = x.getHive().getPartition(table, partSpec, false)) == 
null) {
             if (!replicationSpec.isMetadataOnly()){

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
index fb3bfda..d5aace0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
@@ -344,7 +344,7 @@ public class LoadSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
     Task<? extends Serializable> childTask = TaskFactory.get(
         new MoveWork(getInputs(), getOutputs(), loadTableWork, null, true,
-            isLocal), conf
+            isLocal)
     );
     if (rTask != null) {
       rTask.addDependentTask(childTask);
@@ -364,7 +364,7 @@ public class LoadSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       basicStatsWork.setNoStatsAggregator(true);
       basicStatsWork.setClearAggregatorStats(true);
       StatsWork columnStatsWork = new StatsWork(ts.tableHandle, 
basicStatsWork, conf);
-      statTask = TaskFactory.get(columnStatsWork, conf);
+      statTask = TaskFactory.get(columnStatsWork);
     }
 
     if (statTask != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
index a4c32f4..762e438 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
@@ -140,7 +140,7 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         body = sa.genExprNodeDesc((ASTNode)ast.getChild(2), rowResolver);
     }
     CreateMacroDesc desc = new CreateMacroDesc(functionName, macroColNames, 
macroColTypes, body);
-    rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
+    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
 
     addEntities();
   }
@@ -164,7 +164,7 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
 
     DropMacroDesc desc = new DropMacroDesc(functionName);
-    rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
+    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
 
     addEntities();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
index 6117034..5961059 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
@@ -103,7 +103,7 @@ public class ProcessAnalyzeTable implements NodeProcessor {
           PrunedPartitionList partList = new PrunedPartitionList(table, 
confirmedParts, partCols, false);
           statWork.addInputPartitions(partList.getPartitions());
         }
-        Task<StatsWork> snjTask = TaskFactory.get(statWork, 
parseContext.getConf());
+        Task<StatsWork> snjTask = TaskFactory.get(statWork);
         snjTask.setParentTasks(null);
         context.rootTasks.remove(context.currentTask);
         context.rootTasks.add(snjTask);
@@ -120,7 +120,7 @@ public class ProcessAnalyzeTable implements NodeProcessor {
         columnStatsWork.collectStatsFromAggregator(tableScan.getConf());
 
         columnStatsWork.setSourceTask(context.currentTask);
-        Task<StatsWork> statsTask = TaskFactory.get(columnStatsWork, 
parseContext.getConf());
+        Task<StatsWork> statsTask = TaskFactory.get(columnStatsWork);
         context.currentTask.addDependentTask(statsTask);
 
         // ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS noscan;

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
index 796ab0d..753f039 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
@@ -185,7 +185,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
               ErrorMsg.INVALID_PATH.getMsg(ast),
               maxEventLimit,
               ctx.getResFile().toUri().toString()
-          ), conf, true);
+      ), conf);
       rootTasks.add(replDumpWorkTask);
       if (dbNameOrPattern != null) {
         for (String dbName : Utils.matchesDb(db, dbNameOrPattern)) {
@@ -323,7 +323,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         ReplLoadWork replLoadWork =
             new ReplLoadWork(conf, loadPath.toString(), dbNameOrPattern, 
tblNameOrPattern,
                 queryState.getLineageState(), 
SessionState.get().getTxnMgr().getCurrentTxnId());
-        rootTasks.add(TaskFactory.get(replLoadWork, conf, true));
+        rootTasks.add(TaskFactory.get(replLoadWork, conf));
         return;
       }
 
@@ -354,7 +354,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
         ReplLoadWork replLoadWork = new ReplLoadWork(conf, 
loadPath.toString(), dbNameOrPattern,
             queryState.getLineageState(), 
SessionState.get().getTxnMgr().getCurrentTxnId());
-        rootTasks.add(TaskFactory.get(replLoadWork, conf, true));
+        rootTasks.add(TaskFactory.get(replLoadWork, conf));
         //
         //        for (FileStatus dir : dirsInLoadPath) {
         //          analyzeDatabaseLoad(dbNameOrPattern, fs, dir);
@@ -364,7 +364,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         // We need to guarantee that the directory listing we got is in order 
of evid.
         Arrays.sort(dirsInLoadPath, new EventDumpDirComparator());
 
-        Task<? extends Serializable> evTaskRoot = TaskFactory.get(new 
DependencyCollectionWork(), conf);
+        Task<? extends Serializable> evTaskRoot = TaskFactory.get(new 
DependencyCollectionWork());
         Task<? extends Serializable> taskChainTail = evTaskRoot;
 
         ReplLogger replLogger = new IncrementalLoadLogger(dbNameOrPattern,
@@ -403,7 +403,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
             ReplStateLogWork replStateLogWork = new 
ReplStateLogWork(replLogger,
                                                           
dir.getPath().getName(),
                                                           
eventDmd.getDumpType().toString());
-            Task<? extends Serializable> barrierTask = 
TaskFactory.get(replStateLogWork, conf);
+            Task<? extends Serializable> barrierTask = 
TaskFactory.get(replStateLogWork);
             for (Task<? extends Serializable> t : evTasks){
               t.addDependentTask(barrierTask);
               LOG.debug("Added {}:{} as a precursor of barrier task {}:{}",
@@ -420,7 +420,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
           Map<String, String> dbProps = new HashMap<>();
           dbProps.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), 
String.valueOf(dmd.getEventTo()));
           ReplStateLogWork replStateLogWork = new ReplStateLogWork(replLogger, 
dbProps);
-          Task<? extends Serializable> barrierTask = 
TaskFactory.get(replStateLogWork, conf);
+          Task<? extends Serializable> barrierTask = 
TaskFactory.get(replStateLogWork);
           taskChainTail.addDependentTask(barrierTask);
           LOG.debug("Added {}:{} as a precursor of barrier task {}:{}",
                   taskChainTail.getClass(), taskChainTail.getId(),
@@ -473,7 +473,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     alterTblDesc.setPartSpec((HashMap<String, String>)partSpec);
 
     Task<? extends Serializable> updateReplIdTask = TaskFactory.get(
-                      new DDLWork(inputs, outputs, alterTblDesc), conf, true);
+        new DDLWork(inputs, outputs, alterTblDesc), conf);
 
     // Link the update repl state task with dependency collection task
     if (preCursor != null) {
@@ -495,7 +495,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(
                             dbName, mapProp, new ReplicationSpec(replState, 
replState));
     Task<? extends Serializable> updateReplIdTask = TaskFactory.get(
-                            new DDLWork(inputs, outputs, alterDbDesc), conf, 
true);
+        new DDLWork(inputs, outputs, alterDbDesc), conf);
 
     // Link the update repl state task with dependency collection task
     if (preCursor != null) {
@@ -523,7 +523,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
 
     // Create a barrier task for dependency collection of import tasks
-    Task<? extends Serializable> barrierTask = TaskFactory.get(new 
DependencyCollectionWork(), conf);
+    Task<? extends Serializable> barrierTask = TaskFactory.get(new 
DependencyCollectionWork());
 
     // Link import tasks to the barrier task which will in-turn linked with 
repl state update tasks
     for (Task<? extends Serializable> t : importTasks){

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 2342fff..aa2d060 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -6701,7 +6701,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     alterTblDesc.setOldName(tableName);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setDropIfExists(true);
-    this.rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
alterTblDesc), conf));
+    this.rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
alterTblDesc)));
   }
 
   private ImmutableBitSet getEnabledNotNullConstraints(Table tbl) throws 
HiveException{
@@ -7567,7 +7567,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     PreInsertTableDesc preInsertTableDesc = new PreInsertTableDesc(table, 
overwrite);
     InsertTableDesc insertTableDesc = new InsertTableDesc(table, overwrite);
     this.rootTasks
-        .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
preInsertTableDesc), conf));
+        .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
preInsertTableDesc)));
     TaskFactory
         .getAndMakeChild(new DDLWork(getInputs(), getOutputs(), 
insertTableDesc), conf, tasks);
   }
@@ -12746,7 +12746,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
       // outputs is empty, which means this create table happens in the current
       // database.
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-          crtTblDesc), conf));
+            crtTblDesc)));
       break;
 
     case CTLT: // create table like <tbl_name>
@@ -12766,7 +12766,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
           storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, 
ifNotExists,
           likeTableName, isUserStorageFormat);
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-          crtTblLikeDesc), conf));
+            crtTblLikeDesc)));
       break;
 
     case CTAS: // create table as select
@@ -12970,7 +12970,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
           ifNotExists, orReplace, isAlterViewAs, 
storageFormat.getInputFormat(),
           storageFormat.getOutputFormat(), storageFormat.getSerde());
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-          createVwDesc), conf));
+          createVwDesc)));
       addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, tblProps);
       queryState.setCommandType(HiveOperation.CREATEVIEW);
     }
@@ -14207,7 +14207,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
    */
   private void useCachedResult(QueryResultsCache.CacheEntry cacheEntry) {
     // Change query FetchTask to use new location specified in results cache.
-    FetchTask fetchTask = (FetchTask) 
TaskFactory.get(cacheEntry.getFetchWork(), conf);
+    FetchTask fetchTask = (FetchTask) 
TaskFactory.get(cacheEntry.getFetchWork());
     setFetchTask(fetchTask);
 
     queryState.setCommandType(cacheEntry.getQueryInfo().getHiveOperation());

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index 5e94bb7..c268ddc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -198,7 +198,7 @@ public abstract class TaskCompiler {
           fetch.setIsUsingThriftJDBCBinarySerDe(false);
       }
 
-      pCtx.setFetchTask((FetchTask) TaskFactory.get(fetch, conf));
+      pCtx.setFetchTask((FetchTask) TaskFactory.get(fetch));
 
       // For the FetchTask, the limit optimization requires we fetch all the 
rows
       // in memory and count how many rows we get. It's not practical if the
@@ -219,8 +219,7 @@ public abstract class TaskCompiler {
     } else if (!isCStats) {
       for (LoadTableDesc ltd : loadTableWork) {
         Task<MoveWork> tsk = TaskFactory
-            .get(new MoveWork(null, null, ltd, null, false),
-                conf);
+            .get(new MoveWork(null, null, ltd, null, false));
         mvTask.add(tsk);
       }
 
@@ -235,8 +234,7 @@ public abstract class TaskCompiler {
           oneLoadFileForCtas = false;
         }
         mvTask.add(TaskFactory
-            .get(new MoveWork(null, null, null, lfd, false),
-                conf));
+            .get(new MoveWork(null, null, null, lfd, false)));
       }
     }
 
@@ -326,13 +324,13 @@ public abstract class TaskCompiler {
       CreateTableDesc crtTblDesc = pCtx.getCreateTable();
       crtTblDesc.validate(conf);
       Task<? extends Serializable> crtTblTask = TaskFactory.get(new DDLWork(
-          inputs, outputs, crtTblDesc), conf);
+          inputs, outputs, crtTblDesc));
       patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtTblTask);
     } else if (pCtx.getQueryProperties().isMaterializedView()) {
       // generate a DDL task and make it a dependent task of the leaf
       CreateViewDesc viewDesc = pCtx.getCreateViewDesc();
       Task<? extends Serializable> crtViewTask = TaskFactory.get(new DDLWork(
-          inputs, outputs, viewDesc), conf);
+          inputs, outputs, viewDesc));
       patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtViewTask);
     } else if (pCtx.getMaterializedViewUpdateDesc() != null) {
       // If there is a materialized view update desc, we create introduce it 
at the end
@@ -391,13 +389,13 @@ public abstract class TaskCompiler {
       if (partitions.size() > 0) {
         
columnStatsWork.addInputPartitions(parseContext.getPrunedPartitions(tableScan).getPartitions());
       }
-      return TaskFactory.get(columnStatsWork, parseContext.getConf());
+      return TaskFactory.get(columnStatsWork);
     } else {
       BasicStatsWork statsWork = new 
BasicStatsWork(tableScan.getConf().getTableMetadata().getTableSpec());
       StatsWork columnStatsWork = new StatsWork(table, statsWork, 
parseContext.getConf());
       columnStatsWork.collectStatsFromAggregator(tableScan.getConf());
       columnStatsWork.setSourceTask(currentTask);
-      return TaskFactory.get(columnStatsWork, parseContext.getConf());
+      return TaskFactory.get(columnStatsWork);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
index 4cc57e5..18ed6fb 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
@@ -57,15 +57,12 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 /**
  * Default implementation of HiveAuthorizationTaskFactory
  */
-@SuppressWarnings("unchecked")
 public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFactory {
 
-  private final HiveConf conf;
   // Assumes one instance of this + single-threaded compilation for each query.
   private final Hive db;
 
-  public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) {
-    this.conf = conf;
+    public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) {
     this.db = db;
   }
 
@@ -74,14 +71,14 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
       HashSet<WriteEntity> outputs) {
     String roleName = 
BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
     RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, 
RoleDDLDesc.RoleOperation.CREATE_ROLE, null);
-    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
   }
   @Override
   public Task<? extends Serializable> createDropRoleTask(ASTNode ast, 
HashSet<ReadEntity> inputs,
       HashSet<WriteEntity> outputs) {
     String roleName = 
BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
     RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, 
RoleDDLDesc.RoleOperation.DROP_ROLE, null);
-    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
   }
   @Override
   public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, 
Path resultFile,
@@ -103,7 +100,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
     RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType,
         RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null);
     roleDesc.setResFile(resultFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs,  roleDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
   }
   @Override
   public Task<? extends Serializable> createGrantTask(ASTNode ast, 
HashSet<ReadEntity> inputs,
@@ -130,7 +127,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
 
     GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc,
         principalDesc, userName, PrincipalType.USER, grantOption);
-    return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc));
   }
 
   @Override
@@ -149,7 +146,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
       }
     }
     RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, 
hiveObj, grantOption);
-    return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc));
   }
   @Override
   public Task<? extends Serializable> createGrantRoleTask(ASTNode ast, 
HashSet<ReadEntity> inputs,
@@ -182,7 +179,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
 
     ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(),
         principalDesc, privHiveObj);
-    return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, showGrant));
   }
 
   @Override
@@ -217,7 +214,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
 
     GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant,
         roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
-    return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL), 
conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL));
   }
 
   private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast,
@@ -333,7 +330,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
       HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs)
       throws SemanticException {
     return TaskFactory.get(new DDLWork(inputs, outputs, new 
RoleDDLDesc(roleName, PrincipalType.ROLE,
-      RoleDDLDesc.RoleOperation.SET_ROLE, null)), conf);
+        RoleDDLDesc.RoleOperation.SET_ROLE, null)));
   }
 
   @Override
@@ -342,7 +339,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
       throws SemanticException {
     RoleDDLDesc ddlDesc = new RoleDDLDesc(null, 
RoleDDLDesc.RoleOperation.SHOW_CURRENT_ROLE);
     ddlDesc.setResFile(resFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc));
   }
 
   @Override
@@ -360,7 +357,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
     RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE,
      RoleOperation.SHOW_ROLE_PRINCIPALS, null);
     roleDDLDesc.setResFile(resFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc));
   }
 
   @Override
@@ -369,7 +366,7 @@ public class HiveAuthorizationTaskFactoryImpl implements 
HiveAuthorizationTaskFa
     RoleDDLDesc showRolesDesc = new RoleDDLDesc(null, null, 
RoleDDLDesc.RoleOperation.SHOW_ROLES,
         null);
     showRolesDesc.setResFile(resFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc), conf);
+    return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc));
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
index 0fd970a..8f76230 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
@@ -69,7 +69,7 @@ public class AddForeignKeyHandler extends 
AbstractMessageHandler {
 
     AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." 
+ actualTblName, new ArrayList<SQLPrimaryKey>(), fks,
         new ArrayList<SQLUniqueConstraint>(), 
context.eventOnlyReplicationSpec());
-    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf);
+    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
     tasks.add(addConstraintsTask);
     context.log.debug("Added add constrains task : {}:{}", 
addConstraintsTask.getId(), actualTblName);
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
actualTblName, null);

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
index 3425858..7889e03 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
@@ -66,7 +66,7 @@ public class AddNotNullConstraintHandler extends 
AbstractMessageHandler {
 
     AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." 
+ actualTblName, new ArrayList<SQLPrimaryKey>(), new ArrayList<SQLForeignKey>(),
         new ArrayList<SQLUniqueConstraint>(), nns, new 
ArrayList<SQLDefaultConstraint>(), context.eventOnlyReplicationSpec());
-    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf);
+    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
     tasks.add(addConstraintsTask);
     context.log.debug("Added add constrains task : {}:{}", 
addConstraintsTask.getId(), actualTblName);
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
actualTblName, null);

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
index d7ee223..f9a615a 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
@@ -64,7 +64,7 @@ public class AddPrimaryKeyHandler extends 
AbstractMessageHandler {
 
     AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." 
+ actualTblName, pks, new ArrayList<SQLForeignKey>(),
         new ArrayList<SQLUniqueConstraint>(), 
context.eventOnlyReplicationSpec());
-    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf);
+    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
     tasks.add(addConstraintsTask);
     context.log.debug("Added add constrains task : {}:{}", 
addConstraintsTask.getId(), actualTblName);
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
actualTblName, null);

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
index 0d9c700..757381a 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
@@ -64,7 +64,7 @@ public class AddUniqueConstraintHandler extends 
AbstractMessageHandler {
 
     AlterTableDesc addConstraintsDesc = new AlterTableDesc(actualDbName + "." 
+ actualTblName, new ArrayList<SQLPrimaryKey>(), new ArrayList<SQLForeignKey>(),
         uks, context.eventOnlyReplicationSpec());
-    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf);
+    Task<DDLWork> addConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc));
     tasks.add(addConstraintsTask);
     context.log.debug("Added add constrains task : {}:{}", 
addConstraintsTask.getId(), actualTblName);
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
actualTblName, null);

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
index 00ce977..6c6ee02 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
@@ -80,7 +80,7 @@ public class AlterDatabaseHandler extends 
AbstractMessageHandler {
       }
 
       Task<DDLWork> alterDbTask = TaskFactory.get(
-          new DDLWork(readEntitySet, writeEntitySet, alterDbDesc), 
context.hiveConf);
+          new DDLWork(readEntitySet, writeEntitySet, alterDbDesc));
       context.log.debug("Added alter database task : {}:{}",
               alterDbTask.getId(), actualDbName);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
index f8d8d1a..0dc72e0 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
@@ -61,12 +61,12 @@ public class CreateDatabaseHandler extends 
AbstractMessageHandler {
         new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, 
true);
     createDatabaseDesc.setDatabaseProperties(db.getParameters());
     Task<DDLWork> createDBTask = TaskFactory.get(
-        new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc), 
context.hiveConf);
+        new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc));
     if (!db.getParameters().isEmpty()) {
       AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(destinationDBName, 
db.getParameters(),
           context.eventOnlyReplicationSpec());
       Task<DDLWork> alterDbProperties = TaskFactory
-          .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc), 
context.hiveConf);
+          .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc));
       createDBTask.addDependentTask(alterDbProperties);
     }
     if (StringUtils.isNotEmpty(db.getOwnerName())) {
@@ -74,7 +74,7 @@ public class CreateDatabaseHandler extends 
AbstractMessageHandler {
           new PrincipalDesc(db.getOwnerName(), db.getOwnerType()),
           context.eventOnlyReplicationSpec());
       Task<DDLWork> alterDbTask = TaskFactory
-          .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner), 
context.hiveConf);
+          .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner));
       createDBTask.addDependentTask(alterDbTask);
     }
     updatedMetadata

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
index caf6f3f..77c2dd2 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
@@ -65,8 +65,7 @@ public class CreateFunctionHandler extends 
AbstractMessageHandler {
 
       context.log.debug("Loading function desc : {}", descToLoad.toString());
       Task<FunctionWork> createTask = TaskFactory.get(
-          new FunctionWork(descToLoad), context.hiveConf
-      );
+          new FunctionWork(descToLoad));
       context.log.debug("Added create function task : {}:{},{}", 
createTask.getId(),
           descToLoad.getFunctionName(), descToLoad.getClassName());
       // This null check is specifically done as the same class is used to 
handle both incremental and
@@ -93,7 +92,7 @@ public class CreateFunctionHandler extends 
AbstractMessageHandler {
          *  which should only happen when the last task is finished, at which 
point the child of the barrier task is picked up.
          */
         Task<? extends Serializable> barrierTask =
-            TaskFactory.get(new DependencyCollectionWork(), context.hiveConf);
+            TaskFactory.get(new DependencyCollectionWork());
         builder.replCopyTasks.forEach(t -> t.addDependentTask(barrierTask));
         barrierTask.addDependentTask(createTask);
         return builder.replCopyTasks;

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
index d9d185b..7c7ce1d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
@@ -39,7 +39,7 @@ public class DropConstraintHandler extends 
AbstractMessageHandler {
 
     AlterTableDesc dropConstraintsDesc = new AlterTableDesc(actualDbName + "." 
+ actualTblName, constraintName,
         context.eventOnlyReplicationSpec());
-    Task<DDLWork> dropConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc), context.hiveConf);
+    Task<DDLWork> dropConstraintsTask = TaskFactory.get(new 
DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc));
     context.log.debug("Added drop constrain task : {}:{}", 
dropConstraintsTask.getId(), actualTblName);
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
actualTblName, null);
     return Collections.singletonList(dropConstraintsTask);    

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
index 8b11a9e..363f08c 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
@@ -40,7 +40,7 @@ public class DropDatabaseHandler extends 
AbstractMessageHandler {
         new DropDatabaseDesc(actualDbName, true, 
context.eventOnlyReplicationSpec());
     Task<? extends Serializable> dropDBTask =
         TaskFactory
-            .get(new DDLWork(new HashSet<>(), new HashSet<>(), desc), 
context.hiveConf);
+            .get(new DDLWork(new HashSet<>(), new HashSet<>(), desc));
     context.log.info(
         "Added drop database task : {}:{}", dropDBTask.getId(), 
desc.getDatabaseName());
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
null, null);

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
index fee2bb5..1fc7e13 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
@@ -39,7 +39,7 @@ public class DropFunctionHandler extends 
AbstractMessageHandler {
         FunctionUtils.qualifyFunctionName(msg.getFunctionName(), actualDbName);
     DropFunctionDesc desc = new DropFunctionDesc(
             qualifiedFunctionName, false, context.eventOnlyReplicationSpec());
-    Task<FunctionWork> dropFunctionTask = TaskFactory.get(new 
FunctionWork(desc), context.hiveConf);
+    Task<FunctionWork> dropFunctionTask = TaskFactory.get(new 
FunctionWork(desc));
     context.log.debug(
         "Added drop function task : {}:{}", dropFunctionTask.getId(), 
desc.getFunctionName()
     );

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
index a88916e..1a28eec 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
@@ -53,8 +53,7 @@ public class DropPartitionHandler extends 
AbstractMessageHandler {
         DropTableDesc dropPtnDesc = new DropTableDesc(actualDbName + "." + 
actualTblName,
             partSpecs, null, true, context.eventOnlyReplicationSpec());
         Task<DDLWork> dropPtnTask = TaskFactory.get(
-            new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc),
-            context.hiveConf
+            new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc)
         );
         context.log.debug("Added drop ptn task : {}:{},{}", 
dropPtnTask.getId(),
             dropPtnDesc.getTableName(), msg.getPartitions());

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
index 4d400f4..3e567e9 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
@@ -40,8 +40,7 @@ public class DropTableHandler extends AbstractMessageHandler {
         null, true, true, context.eventOnlyReplicationSpec(), false
     );
     Task<DDLWork> dropTableTask = TaskFactory.get(
-        new DDLWork(readEntitySet, writeEntitySet, dropTableDesc),
-        context.hiveConf
+        new DDLWork(readEntitySet, writeEntitySet, dropTableDesc)
     );
     context.log.debug(
         "Added drop tbl task : {}:{}", dropTableTask.getId(), 
dropTableDesc.getTableName()

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
index 43f2cbc..396b7ee 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
@@ -62,7 +62,7 @@ public class RenamePartitionHandler extends 
AbstractMessageHandler {
     RenamePartitionDesc renamePtnDesc = new RenamePartitionDesc(
             tableName, oldPartSpec, newPartSpec, 
context.eventOnlyReplicationSpec());
     Task<DDLWork> renamePtnTask = TaskFactory.get(
-        new DDLWork(readEntitySet, writeEntitySet, renamePtnDesc), 
context.hiveConf);
+        new DDLWork(readEntitySet, writeEntitySet, renamePtnDesc));
     context.log.debug("Added rename ptn task : {}:{}->{}",
                       renamePtnTask.getId(), oldPartSpec, newPartSpec);
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
actualTblName, newPartSpec);

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
index 83433d7..98bf625 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
@@ -61,7 +61,7 @@ public class RenameTableHandler extends 
AbstractMessageHandler {
       AlterTableDesc renameTableDesc = new AlterTableDesc(
               oldName, newName, false, context.eventOnlyReplicationSpec());
       Task<DDLWork> renameTableTask = TaskFactory.get(
-          new DDLWork(readEntitySet, writeEntitySet, renameTableDesc), 
context.hiveConf);
+          new DDLWork(readEntitySet, writeEntitySet, renameTableDesc));
       context.log.debug("Added rename table task : {}:{}->{}",
                         renameTableTask.getId(), oldName, newName);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
index b983f95..1e7fa10 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
@@ -58,8 +58,7 @@ public class TruncatePartitionHandler extends 
AbstractMessageHandler {
             actualDbName + "." + actualTblName, partSpec,
             context.eventOnlyReplicationSpec());
     Task<DDLWork> truncatePtnTask = TaskFactory.get(
-            new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc),
-            context.hiveConf);
+        new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc));
     context.log.debug("Added truncate ptn task : {}:{}", 
truncatePtnTask.getId(),
         truncateTableDesc.getTableName());
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
actualTblName, partSpec);

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
index c6d7739..bdef67f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
@@ -39,8 +39,7 @@ public class TruncateTableHandler extends 
AbstractMessageHandler {
             actualDbName + "." + actualTblName,
             null, context.eventOnlyReplicationSpec());
     Task<DDLWork> truncateTableTask = TaskFactory.get(
-        new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc),
-        context.hiveConf);
+        new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc));
 
     context.log.debug("Added truncate tbl task : {}:{}", 
truncateTableTask.getId(),
         truncateTableDesc.getTableName());

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java
index c9bb6a5..ede9abc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java
@@ -179,7 +179,7 @@ public class GenSparkProcContext implements 
NodeProcessorCtx {
     this.linkChildOpWithDummyOp = new LinkedHashMap<Operator<?>, 
List<Operator<?>>>();
     this.dependencyTask = conf.getBoolVar(
         HiveConf.ConfVars.HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES)
-        ? (DependencyCollectionTask) TaskFactory.get(new 
DependencyCollectionWork(), conf)
+            ? (DependencyCollectionTask) TaskFactory.get(new 
DependencyCollectionWork())
         : null;
     this.unionWorkMap = new LinkedHashMap<Operator<?>, BaseWork>();
     this.currentUnionOperators = new LinkedList<UnionOperator>();

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
index fd7bd3d..e81d6f3 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
@@ -111,7 +111,7 @@ public class SparkProcessAnalyzeTable implements 
NodeProcessor {
           PrunedPartitionList partList = new PrunedPartitionList(table, 
confirmedParts, partCols, false);
           statWork.addInputPartitions(partList.getPartitions());
         }
-        Task<StatsWork> snjTask = TaskFactory.get(statWork, 
parseContext.getConf());
+        Task<StatsWork> snjTask = TaskFactory.get(statWork);
         snjTask.setParentTasks(null);
         context.rootTasks.remove(context.currentTask);
         context.rootTasks.add(snjTask);
@@ -127,7 +127,7 @@ public class SparkProcessAnalyzeTable implements 
NodeProcessor {
         StatsWork columnStatsWork = new StatsWork(table, basicStatsWork, 
parseContext.getConf());
         columnStatsWork.collectStatsFromAggregator(tableScan.getConf());
         columnStatsWork.setSourceTask(context.currentTask);
-        Task<StatsWork> statsTask = TaskFactory.get(columnStatsWork, 
parseContext.getConf());
+        Task<StatsWork> statsTask = TaskFactory.get(columnStatsWork);
         context.currentTask.addDependentTask(statsTask);
 
         // ANALYZE TABLE T [PARTITION (...)] COMPUTE STATISTICS noscan;

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
index a4e21c1..7a955bc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
@@ -321,9 +321,9 @@ public class ImportTableDesc {
       HiveConf conf) {
     switch (getDescType()) {
     case TABLE:
-      return TaskFactory.get(new DDLWork(inputs, outputs, createTblDesc), 
conf, true);
+        return TaskFactory.get(new DDLWork(inputs, outputs, createTblDesc), 
conf);
     case VIEW:
-      return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), 
conf, true);
+        return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), 
conf);
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
index 788ac52..fdc268c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
@@ -792,7 +792,7 @@ public class TestUtilities {
   }
 
   private Task<? extends Serializable> getDependencyCollectionTask(){
-    return TaskFactory.get(new DependencyCollectionWork(), new HiveConf());
+    return TaskFactory.get(new DependencyCollectionWork());
   }
 
   /**
@@ -911,7 +911,7 @@ public class TestUtilities {
   }
 
   private static Task<MapredWork> getMapredWork() {
-    return TaskFactory.get(MapredWork.class, new HiveConf());
+    return TaskFactory.get(MapredWork.class);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/94152c99/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
index aa24d29..309debe 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
@@ -46,11 +46,11 @@ public class AddDependencyToLeavesTest {
   @Test
   public void shouldNotSkipIntermediateDependencyCollectionTasks() {
     Task<DependencyCollectionWork> collectionWorkTaskOne =
-        TaskFactory.get(new DependencyCollectionWork(), hiveConf);
+        TaskFactory.get(new DependencyCollectionWork());
     Task<DependencyCollectionWork> collectionWorkTaskTwo =
-        TaskFactory.get(new DependencyCollectionWork(), hiveConf);
+        TaskFactory.get(new DependencyCollectionWork());
     Task<DependencyCollectionWork> collectionWorkTaskThree =
-        TaskFactory.get(new DependencyCollectionWork(), hiveConf);
+        TaskFactory.get(new DependencyCollectionWork());
 
     @SuppressWarnings("unchecked") Task<? extends Serializable> rootTask = 
mock(Task.class);
     when(rootTask.getDependentTasks())

Reply via email to