http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
index 2e81acd..7be6195 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java
@@ -19,13 +19,13 @@
 package org.apache.drill.exec.physical.impl.agg;
 
 import ch.qos.logback.classic.Level;
-import org.apache.drill.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.physical.impl.aggregate.HashAggTemplate;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.proto.UserBitShared;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
@@ -33,6 +33,7 @@ import org.apache.drill.test.LogFixture;
 import org.apache.drill.test.ProfileParser;
 import org.apache.drill.test.QueryBuilder;
 import org.apache.drill.categories.SlowTest;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -43,43 +44,13 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 /**
- *  Test spilling for the Hash Aggr operator (using the mock reader)
+ * Test spilling for the Hash Aggr operator (using the mock reader)
  */
 @Category({SlowTest.class, OperatorTest.class})
 public class TestHashAggrSpill {
 
-    private void runAndDump(ClientFixture client, String sql, long 
expectedRows, long spillCycle, long fromSpilledPartitions, long 
toSpilledPartitions) throws Exception {
-        String plan = client.queryBuilder().sql(sql).explainJson();
-
-        QueryBuilder.QuerySummary summary = 
client.queryBuilder().sql(sql).run();
-        if ( expectedRows > 0 ) {
-            assertEquals(expectedRows, summary.recordCount());
-        }
-        // System.out.println(String.format("======== \n Results: %,d records, 
%d batches, %,d ms\n ========", summary.recordCount(), summary.batchCount(), 
summary.runTimeMs() ) );
-
-        //System.out.println("Query ID: " + summary.queryIdString());
-        ProfileParser profile = client.parseProfile(summary.queryIdString());
-        //profile.print();
-        List<ProfileParser.OperatorProfile> ops = 
profile.getOpsOfType(UserBitShared.CoreOperatorType.HASH_AGGREGATE_VALUE);
-
-        assertTrue( ! ops.isEmpty() );
-        // check for the first op only
-        ProfileParser.OperatorProfile hag0 = ops.get(0);
-        long opCycle = 
hag0.getMetric(HashAggTemplate.Metric.SPILL_CYCLE.ordinal());
-        assertEquals(spillCycle, opCycle);
-        long op_spilled_partitions = 
hag0.getMetric(HashAggTemplate.Metric.SPILLED_PARTITIONS.ordinal());
-        assertTrue( op_spilled_partitions >= fromSpilledPartitions && 
op_spilled_partitions <= toSpilledPartitions );
-        /* assertEquals(3, ops.size());
-        for ( int i = 0; i < ops.size(); i++ ) {
-            ProfileParser.OperatorProfile hag = ops.get(i);
-            long cycle = 
hag.getMetric(HashAggTemplate.Metric.SPILL_CYCLE.ordinal());
-            long num_partitions = 
hag.getMetric(HashAggTemplate.Metric.NUM_PARTITIONS.ordinal());
-            long spilled_partitions = 
hag.getMetric(HashAggTemplate.Metric.SPILLED_PARTITIONS.ordinal());
-            long mb_spilled = 
hag.getMetric(HashAggTemplate.Metric.SPILL_MB.ordinal());
-            System.out.println(String.format("(%d) Spill cycle: %d, num 
partitions: %d, spilled partitions: %d, MB spilled: %d", i,cycle, 
num_partitions, spilled_partitions,
-                mb_spilled));
-        } */
-    }
+  @Rule
+  public final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
 
     /**
      *  A template for Hash Aggr spilling tests
@@ -90,24 +61,18 @@ public class TestHashAggrSpill {
                            String sql, long expectedRows, int cycle, int 
fromPart, int toPart) throws Exception {
         LogFixture.LogFixtureBuilder logBuilder = LogFixture.builder()
           .toConsole()
-          //.logger("org.apache.drill.exec.physical.impl.aggregate", 
Level.INFO)
-          .logger("org.apache.drill", Level.WARN)
-          ;
+          .logger("org.apache.drill", Level.WARN);
 
-        ClusterFixtureBuilder builder = ClusterFixture.builder()
+        ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
           .sessionOption(ExecConstants.HASHAGG_MAX_MEMORY_KEY,maxMem)
           
.sessionOption(ExecConstants.HASHAGG_NUM_PARTITIONS_KEY,numPartitions)
           
.sessionOption(ExecConstants.HASHAGG_MIN_BATCHES_PER_PARTITION_KEY,minBatches)
           .configProperty(ExecConstants.SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE, 
false)
           .sessionOption(PlannerSettings.FORCE_2PHASE_AGGR_KEY,true)
-          // .sessionOption(PlannerSettings.EXCHANGE.getOptionName(), true)
           .sessionOption(ExecConstants.HASHAGG_FALLBACK_ENABLED_KEY, fallback)
           
.sessionOption(ExecConstants.HASHAGG_USE_MEMORY_PREDICTION_KEY,predict)
-
           .maxParallelization(maxParallel)
-          .saveProfiles()
-          //.keepLocalFiles()
-          ;
+          .saveProfiles();
         String sqlStr = sql != null ? sql :  // if null then use this default 
query
           "SELECT empid_s17, dept_i, branch_i, AVG(salary_i) FROM 
`mock`.`employee_1200K` GROUP BY empid_s17, dept_i, branch_i";
 
@@ -137,52 +102,68 @@ public class TestHashAggrSpill {
      */
     @Test
     public void testNoPredictHashAggrSpill() throws Exception {
-        testSpill(58_000_000, 16, 2, 2, false,false /* no prediction */,
-             null,1_200_000, 1,1, 1
-        );
+      testSpill(58_000_000, 16, 2, 2, false, false /* no prediction */, null,
+        1_200_000, 1, 1, 1);
     }
-    /**
-     * Test Secondary and Tertiary spill cycles - Happens when some of the 
spilled partitions cause more spilling as they are read back
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testHashAggrSecondaryTertiarySpill() throws Exception {
 
-        testSpill(58_000_000, 16, 3, 1, false,true,
-          "SELECT empid_s44, dept_i, branch_i, AVG(salary_i) FROM 
`mock`.`employee_1100K` GROUP BY empid_s44, dept_i, branch_i",
-          1_100_000, 3,2, 2
-        );
+  private void runAndDump(ClientFixture client, String sql, long expectedRows, 
long spillCycle, long fromSpilledPartitions, long toSpilledPartitions) throws 
Exception {
+    QueryBuilder.QuerySummary summary = client.queryBuilder().sql(sql).run();
+    if (expectedRows > 0) {
+      assertEquals(expectedRows, summary.recordCount());
     }
-    /**
-     * Test with the "fallback" option disabled: When not enough memory 
available to allow spilling, then fail (Resource error) !!
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testHashAggrFailWithFallbackDisabed() throws Exception {
-
-        try {
-            testSpill(34_000_000, 4, 5, 2, false /* no fallback */, true, null,
-              1_200_000, 0 /* no spill due to fallback to pre-1.11 */, 0, 0);
-            fail(); // in case the above test did not throw
-        } catch (Exception ex) {
-            assertTrue(ex instanceof UserRemoteException);
-            assertTrue(((UserRemoteException)ex).getErrorType() == 
UserBitShared.DrillPBError.ErrorType.RESOURCE);
-            // must get here for the test to succeed ...
-        }
-    }
-    /**
-     * Test with the "fallback" option ON: When not enough memory is available 
to allow spilling (internally need enough memory to
-     * create multiple partitions), then behave like the pre-1.11 Hash 
Aggregate: Allocate unlimited memory, no spill.
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testHashAggrSuccessWithFallbackEnabled() throws Exception {
 
-        testSpill(34_000_000, 4, 5, 2, true /* do fallback */,true, null,
-          1_200_000, 0 /* no spill due to fallback to pre-1.11 */,0, 0
-        );
+    ProfileParser profile = client.parseProfile(summary.queryIdString());
+    List<ProfileParser.OperatorProfile> ops = 
profile.getOpsOfType(UserBitShared.CoreOperatorType.HASH_AGGREGATE_VALUE);
+
+    assertTrue(!ops.isEmpty());
+    // check for the first op only
+    ProfileParser.OperatorProfile hag0 = ops.get(0);
+    long opCycle = 
hag0.getMetric(HashAggTemplate.Metric.SPILL_CYCLE.ordinal());
+    assertEquals(spillCycle, opCycle);
+    long op_spilled_partitions = 
hag0.getMetric(HashAggTemplate.Metric.SPILLED_PARTITIONS.ordinal());
+    assertTrue(op_spilled_partitions >= fromSpilledPartitions && 
op_spilled_partitions <= toSpilledPartitions);
+  }
+
+  /**
+   * Test Secondary and Tertiary spill cycles - Happens when some of the 
spilled partitions cause more spilling as they are read back
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testHashAggrSecondaryTertiarySpill() throws Exception {
+
+    testSpill(58_000_000, 16, 3, 1, false, true, "SELECT empid_s44, dept_i, 
branch_i, AVG(salary_i) FROM `mock`.`employee_1100K` GROUP BY empid_s44, 
dept_i, branch_i",
+      1_100_000, 3, 2, 2);
+  }
+
+  /**
+   * Test with the "fallback" option disabled: When not enough memory 
available to allow spilling, then fail (Resource error) !!
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testHashAggrFailWithFallbackDisabed() throws Exception {
+
+    try {
+      testSpill(34_000_000, 4, 5, 2, false /* no fallback */, true, null,
+        1_200_000, 0 /* no spill due to fallback to pre-1.11 */, 0, 0);
+      fail(); // in case the above test did not throw
+    } catch (Exception ex) {
+      assertTrue(ex instanceof UserRemoteException);
+      assertTrue(((UserRemoteException) ex).getErrorType() == 
UserBitShared.DrillPBError.ErrorType.RESOURCE);
+      // must get here for the test to succeed ...
     }
+  }
+
+  /**
+   * Test with the "fallback" option ON: When not enough memory is available 
to allow spilling (internally need enough memory to
+   * create multiple partitions), then behave like the pre-1.11 Hash 
Aggregate: Allocate unlimited memory, no spill.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testHashAggrSuccessWithFallbackEnabled() throws Exception {
+    testSpill(34_000_000, 4, 5, 2, true /* do fallback */, true, null,
+      1_200_000, 0 /* no spill due to fallback to pre-1.11 */, 0, 0);
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/broadcastsender/TestBroadcast.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/broadcastsender/TestBroadcast.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/broadcastsender/TestBroadcast.java
index 5b869e1..970f4a4 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/broadcastsender/TestBroadcast.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/broadcastsender/TestBroadcast.java
@@ -17,35 +17,40 @@
  */
 package org.apache.drill.exec.physical.impl.broadcastsender;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 @Category(OperatorTest.class)
 public class TestBroadcast extends BaseTestQuery {
-  static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestBroadcast.class);
+  public static final String BROAD_CAST_QUERY = "select * from "
+    + "dfs.`broadcast/sales` s "
+    + "INNER JOIN "
+    + "dfs.`broadcast/customer` c "
+    + "ON s.id = c.id";
 
-  String broadcastQuery = "select * from "
-      + "dfs.`${WORKING_PATH}/src/test/resources/broadcast/sales` s "
-      + "INNER JOIN "
-      + "dfs.`${WORKING_PATH}/src/test/resources/broadcast/customer` c "
-      + "ON s.id = c.id";
+  @BeforeClass
+  public static void setupFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("broadcast"));
+  }
 
   @Test
   public void plansWithBroadcast() throws Exception {
     //TODO: actually verify that this plan has a broadcast exchange in it once 
plan tools are enabled.
     setup();
-    test("explain plan for " + broadcastQuery);
+    test("explain plan for %s", BROAD_CAST_QUERY);
   }
 
   @Test
   public void broadcastExecuteWorks() throws Exception {
     setup();
-    test(broadcastQuery);
+    test(BROAD_CAST_QUERY);
   }
 
-
   private void setup() throws Exception{
     testNoResult("alter session set `planner.slice_target` = 1");
     testNoResult("alter session set `planner.enable_broadcast_join` = true");

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestLargeInClause.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestLargeInClause.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestLargeInClause.java
index 353c201..a68dffc 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestLargeInClause.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestLargeInClause.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.filter;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestSimpleFilter.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestSimpleFilter.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestSimpleFilter.java
index 499ac5b..750276c 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestSimpleFilter.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/filter/TestSimpleFilter.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertTrue;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -55,7 +55,7 @@ public class TestSimpleFilter extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = 
PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/filter/test1.json"),
 Charsets.UTF_8));
+    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/filter/test1.json"),
 Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));
@@ -77,7 +77,7 @@ public class TestSimpleFilter extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = 
PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/filter/test_sv4.json"),
 Charsets.UTF_8));
+    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/filter/test_sv4.json"),
 Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
index fc954be..1dc07df 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java
@@ -17,35 +17,38 @@
  
******************************************************************************/
 package org.apache.drill.exec.physical.impl.flatten;
 
-import static org.apache.commons.io.FileUtils.deleteQuietly;
-import static org.apache.drill.TestBuilder.listOf;
-import static org.apache.drill.TestBuilder.mapOf;
+import static org.apache.drill.test.TestBuilder.listOf;
+import static org.apache.drill.test.TestBuilder.mapOf;
 import static org.junit.Assert.assertEquals;
 
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.List;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
-import org.apache.drill.TestBuilder;
+import org.apache.drill.test.TestBuilder;
 import org.apache.drill.categories.UnlikelyTest;
-import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.fn.interp.TestConstantFolding;
 import org.apache.drill.exec.store.easy.json.JSONRecordReader;
 import org.apache.drill.exec.util.JsonStringHashMap;
+import org.apache.drill.test.SubDirTestWatcher;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TemporaryFolder;
 
 import com.google.common.collect.Lists;
 
 @Category(OperatorTest.class)
 public class TestFlatten extends BaseTestQuery {
 
+  private static final Path TEST_DIR = Paths.get("test");
+
   /**
    *  enable this if you have the following files:
    *    - /tmp/yelp_academic_dataset_business.json
@@ -55,23 +58,33 @@ public class TestFlatten extends BaseTestQuery {
    */
   public static boolean RUN_ADVANCED_TESTS = false;
 
+  private static File pathDir;
+
+  @BeforeClass
+  public static void initFile() {
+    pathDir = dirTestWatcher.getRootDir()
+      .toPath()
+      .resolve(TEST_DIR)
+      .toFile();
+  }
+
   @Rule
-  public TemporaryFolder folder = new TemporaryFolder();
+  public final SubDirTestWatcher subDirTestWatcher =
+    new SubDirTestWatcher.Builder(dirTestWatcher.getRootDir())
+    .addSubDir(TEST_DIR)
+    .build();
 
   @Test
   public void testFlattenFailure() throws Exception {
-    test("select flatten(complex), rownum from 
cp.`/store/json/test_flatten_mappify2.json`");
-//    test("select complex, rownum from 
cp.`/store/json/test_flatten_mappify2.json`");
+    test("select flatten(complex), rownum from 
cp.`store/json/test_flatten_mappify2.json`");
   }
 
   @Test
   @Category(UnlikelyTest.class)
   public void testFlatten_Drill2162_complex() throws Exception {
-    String path = folder.getRoot().toPath().toString();
-
     String jsonRecords = 
BaseTestQuery.getFile("flatten/complex_transaction_example_data.json");
     int numCopies = 700;
-    new TestConstantFolding.SmallFileCreator(folder)
+    new TestConstantFolding.SmallFileCreator(pathDir)
         .setRecord(jsonRecords)
         .createFiles(1, numCopies, "json");
 
@@ -97,7 +110,7 @@ public class TestFlatten extends BaseTestQuery {
 
     TestBuilder builder = testBuilder()
         .sqlQuery("select uid, flatten(d.lst_lst[1]) lst1, 
flatten(d.lst_lst[0]) lst0, flatten(d.lst_lst) lst from " +
-                  "dfs.`" + path + "/bigfile/bigfile.json` d")
+                  "dfs.`%s/bigfile/bigfile.json` d", TEST_DIR)
         .unOrdered()
         .baselineColumns("uid", "lst1", "lst0", "lst");
     for (int i = 0; i < numCopies; i++) {
@@ -168,8 +181,6 @@ public class TestFlatten extends BaseTestQuery {
 
   @Test
   public void testFlatten_Drill2162_simple() throws Exception {
-    String path = folder.getRoot().toPath().toString();
-
     List<Long> inputList = Lists.newArrayList();
     String jsonRecord = "{ \"int_list\" : [";
     final int listSize = 30;
@@ -180,7 +191,7 @@ public class TestFlatten extends BaseTestQuery {
     jsonRecord += listSize + "] }";
     inputList.add((long) listSize);
     int numRecords = 3000;
-    new TestConstantFolding.SmallFileCreator(folder)
+    new TestConstantFolding.SmallFileCreator(pathDir)
         .setRecord(jsonRecord)
         .createFiles(1, numRecords, "json");
 
@@ -192,7 +203,7 @@ public class TestFlatten extends BaseTestQuery {
     List<JsonStringHashMap<String, Object>> result = flatten(data, "int_list");
 
     TestBuilder builder = testBuilder()
-        .sqlQuery("select flatten(int_list) as int_list from dfs.`" + path + 
"/bigfile/bigfile.json`")
+        .sqlQuery("select flatten(int_list) as int_list from 
dfs.`%s/bigfile/bigfile.json`", TEST_DIR)
         .unOrdered()
         .baselineColumns("int_list");
 
@@ -207,7 +218,9 @@ public class TestFlatten extends BaseTestQuery {
   @Test
   @Category(UnlikelyTest.class)
   public void drill1671() throws Exception{
-    int rowCount = testSql("select * from (select count(*) as cnt from (select 
id, flatten(evnts1), flatten(evnts2), flatten(evnts3), flatten(evnts4), 
flatten(evnts5), flatten(evnts6), flatten(evnts7), flatten(evnts8), 
flatten(evnts9), flatten(evnts10), flatten(evnts11) from 
cp.`/flatten/many-arrays-50.json`)x )y where cnt = 2048");
+    int rowCount = testSql("select * from (select count(*) as cnt from (select 
id, flatten(evnts1), flatten(evnts2), flatten(evnts3), flatten(evnts4), " +
+      "flatten(evnts5), flatten(evnts6), flatten(evnts7), flatten(evnts8), 
flatten(evnts9), flatten(evnts10), flatten(evnts11) " +
+      "from cp.`flatten/many-arrays-50.json`)x )y where cnt = 2048");
     assertEquals(rowCount, 1);
   }
 
@@ -215,7 +228,7 @@ public class TestFlatten extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void drill3370() throws Exception {
     testBuilder()
-        .sqlQuery("select a from (select flatten(arr) as a from 
cp.`/flatten/drill-3370.json`) where a > 100")
+        .sqlQuery("select a from (select flatten(arr) as a from 
cp.`flatten/drill-3370.json`) where a > 100")
         .unOrdered()
         .baselineColumns("a")
         .baselineValues(131l)
@@ -226,24 +239,22 @@ public class TestFlatten extends BaseTestQuery {
   @Test
   @Ignore("not yet fixed")
   public void drill1660() throws Exception {
-    test("select * from cp.`/flatten/empty-rm.json`");
+    test("select * from cp.`flatten/empty-rm.json`");
   }
 
   @Test // repeated list within a repeated map
   @Category(UnlikelyTest.class)
   public void drill1673() throws Exception {
-    String path = folder.getRoot().toPath().toString();
-
     String jsonRecords = BaseTestQuery.getFile("store/json/1673.json");
     int numCopies = 25000;
-    new TestConstantFolding.SmallFileCreator(folder)
+    new TestConstantFolding.SmallFileCreator(pathDir)
         .setRecord(jsonRecords)
         .createFiles(1, numCopies, "json");
 
     TestBuilder builder = testBuilder()
         .sqlQuery("select t.fixed_column as fixed_column, " +
                   "flatten(t.list_column) as list_col " +
-                  "from dfs.`" + path + "/bigfile/bigfile.json` as t")
+                  "from dfs.`%s/bigfile/bigfile.json` as t", TEST_DIR)
         .baselineColumns("fixed_column", "list_col")
         .unOrdered();
     Object map1 = mapOf("id1", "1",
@@ -263,7 +274,7 @@ public class TestFlatten extends BaseTestQuery {
   @Test
   @Category(UnlikelyTest.class)
   public void drill1653() throws Exception{
-    int rowCount = testSql("select * from (select sum(t.flat.`value`) as sm 
from (select id, flatten(kvgen(m)) as flat from 
cp.`/flatten/missing-map.json`)t) where sm = 10 ");
+    int rowCount = testSql("select * from (select sum(t.flat.`value`) as sm 
from (select id, flatten(kvgen(m)) as flat from 
cp.`flatten/missing-map.json`)t) where sm = 10 ");
     assertEquals(1, rowCount);
   }
 
@@ -271,7 +282,7 @@ public class TestFlatten extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void drill1652() throws Exception {
     if(RUN_ADVANCED_TESTS){
-      test("select uid, flatten(transactions) from dfs.`/tmp/bigfile.json`");
+      test("select uid, flatten(transactions) from dfs.`tmp/bigfile.json`");
     }
   }
 
@@ -282,7 +293,7 @@ public class TestFlatten extends BaseTestQuery {
         "from (\n" +
         " select userinfo.transaction.trans_id trans_id, 
max(userinfo.event.event_time) max_event_time\n" +
         " from (\n" +
-        "     select uid, flatten(events) event, flatten(transactions) 
transaction from cp.`/flatten/single-user-transactions.json`\n" +
+        "     select uid, flatten(events) event, flatten(transactions) 
transaction from cp.`flatten/single-user-transactions.json`\n" +
         " ) userinfo\n" +
         " where userinfo.transaction.trans_time >= 
userinfo.event.event_time\n" +
         " group by userinfo.transaction.trans_id\n" +
@@ -290,7 +301,7 @@ public class TestFlatten extends BaseTestQuery {
         "inner join\n" +
         "(\n" +
         " select uid, flatten(events) event\n" +
-        " from cp.`/flatten/single-user-transactions.json`\n" +
+        " from cp.`flatten/single-user-transactions.json`\n" +
         ") event_info\n" +
         "on transaction_info.max_event_time = event_info.event.event_time;");
   }
@@ -299,25 +310,25 @@ public class TestFlatten extends BaseTestQuery {
   public void testKVGenFlatten1() throws Exception {
     // works - TODO and verify results
     test("select flatten(kvgen(f1)) as monkey, x " +
-        "from cp.`/store/json/test_flatten_mapify.json`");
+        "from cp.`store/json/test_flatten_mapify.json`");
   }
 
   @Test
   public void testTwoFlattens() throws Exception {
     // second re-write rule has been added to test the fixes together, this 
now runs
-    test("select `integer`, `float`, x, flatten(z), flatten(l) from 
cp.`/jsoninput/input2_modified.json`");
+    test("select `integer`, `float`, x, flatten(z), flatten(l) from 
cp.`jsoninput/input2_modified.json`");
   }
 
   @Test
   public void testFlattenRepeatedMap() throws Exception {
-    test("select `integer`, `float`, x, flatten(z) from 
cp.`/jsoninput/input2.json`");
+    test("select `integer`, `float`, x, flatten(z) from 
cp.`jsoninput/input2.json`");
   }
 
   @Test
   public void testFlattenKVGenFlatten() throws Exception {
     // currently does not fail, but produces incorrect results, requires 
second re-write rule to split up expressions
     // with complex outputs
-    test("select `integer`, `float`, x, flatten(kvgen(flatten(z))) from 
cp.`/jsoninput/input2.json`");
+    test("select `integer`, `float`, x, flatten(kvgen(flatten(z))) from 
cp.`jsoninput/input2.json`");
   }
 
   @Test
@@ -325,7 +336,7 @@ public class TestFlatten extends BaseTestQuery {
     // currently runs
     // TODO - re-verify results by hand
     if(RUN_ADVANCED_TESTS){
-      test("select flatten(kvgen(visited_cellid_counts)) as mytb from 
dfs.`/tmp/mapkv.json`") ;
+      test("select flatten(kvgen(visited_cellid_counts)) as mytb from 
dfs.`tmp/mapkv.json`") ;
     }
   }
 
@@ -334,7 +345,7 @@ public class TestFlatten extends BaseTestQuery {
     // WORKS!!
     // TODO - hand verify results
     test("select t2.key from (select t.monkey.`value` as val, t.monkey.key as 
key from (select flatten(kvgen(f1)) as monkey, x " +
-        "from cp.`/store/json/test_flatten_mapify.json`) as t) as t2 where 
t2.val > 1");
+        "from cp.`store/json/test_flatten_mapify.json`) as t) as t2 where 
t2.val > 1");
   }
 
   @Test
@@ -347,7 +358,7 @@ public class TestFlatten extends BaseTestQuery {
     // these types of problems are being solved more generally as we develp 
better support for chaning schema
     if(RUN_ADVANCED_TESTS){
       test("select celltbl.catl from (\n" +
-          "        select flatten(categories) catl from 
dfs.`/tmp/yelp_academic_dataset_business.json` b limit 100\n" +
+          "        select flatten(categories) catl from 
dfs.`tmp/yelp_academic_dataset_business.json` b limit 100\n" +
           "    )  celltbl where celltbl.catl = 'Doctors'");
     }
   }
@@ -356,23 +367,22 @@ public class TestFlatten extends BaseTestQuery {
   public void countAggFlattened() throws Exception {
     if(RUN_ADVANCED_TESTS){
       test("select celltbl.catl, count(celltbl.catl) from ( " +
-          "select business_id, flatten(categories) catl from 
dfs.`/tmp/yelp_academic_dataset_business.json` b limit 100 " +
+          "select business_id, flatten(categories) catl from 
dfs.`tmp/yelp_academic_dataset_business.json` b limit 100 " +
           ")  celltbl group by celltbl.catl limit 10 ");
     }
   }
 
-
   @Test
   public void flattenAndAdditionalColumn() throws Exception {
     if(RUN_ADVANCED_TESTS){
-      test("select business_id, flatten(categories) from 
dfs.`/tmp/yelp_academic_dataset_business.json` b");
+      test("select business_id, flatten(categories) from 
dfs.`tmp/yelp_academic_dataset_business.json` b");
     }
   }
 
   @Test
   public void testFailingFlattenAlone() throws Exception {
     if(RUN_ADVANCED_TESTS){
-      test("select flatten(categories) from 
dfs.`/tmp/yelp_academic_dataset_business.json` b  ");
+      test("select flatten(categories) from 
dfs.`tmp/yelp_academic_dataset_business.json` b  ");
     }
   }
 
@@ -380,7 +390,7 @@ public class TestFlatten extends BaseTestQuery {
   public void testDistinctAggrFlattened() throws Exception {
     if(RUN_ADVANCED_TESTS){
       test(" select distinct(celltbl.catl) from (\n" +
-          "        select flatten(categories) catl from 
dfs.`/tmp/yelp_academic_dataset_business.json` b\n" +
+          "        select flatten(categories) catl from 
dfs.`tmp/yelp_academic_dataset_business.json` b\n" +
           "    )  celltbl");
     }
 
@@ -390,24 +400,24 @@ public class TestFlatten extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void testDrill1665() throws Exception {
     if(RUN_ADVANCED_TESTS){
-      test("select id, flatten(evnts) as rpt from dfs.`/tmp/drill1665.json`");
+      test("select id, flatten(evnts) as rpt from dfs.`tmp/drill1665.json`");
     }
 
   }
 
   @Test
   public void testFlattenComplexRepeatedMap() throws Exception {
-    test("select a, flatten(r_map_1), flatten(r_map_2) from 
cp.`/store/json/complex_repeated_map.json`");
+    test("select a, flatten(r_map_1), flatten(r_map_2) from 
cp.`store/json/complex_repeated_map.json`");
   }
 
   @Test
   public void testFlatten2_levelRepeatedMap() throws Exception {
-    test("select flatten(rm) from cp.`/store/json/2_level_repeated_map.json`");
+    test("select flatten(rm) from cp.`store/json/2_level_repeated_map.json`");
   }
 
   @Test
   public void testDrill_1770() throws Exception {
-    test("select flatten(sub.fk.`value`) from (select flatten(kvgen(map)) fk 
from cp.`/store/json/nested_repeated_map.json`) sub");
+    test("select flatten(sub.fk.`value`) from (select flatten(kvgen(map)) fk 
from cp.`store/json/nested_repeated_map.json`) sub");
   }
 
 
@@ -455,7 +465,7 @@ public class TestFlatten extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void testDrill_2013() throws Exception {
     testBuilder()
-            .sqlQuery("select flatten(complex), rownum from 
cp.`/store/json/test_flatten_mappify2.json` where rownum > 5")
+            .sqlQuery("select flatten(complex), rownum from 
cp.`store/json/test_flatten_mappify2.json` where rownum > 5")
             .expectsEmptyResultSet()
             .build().run();
   }
@@ -480,41 +490,25 @@ public class TestFlatten extends BaseTestQuery {
   @Test // see DRILL-2146
   @Category(UnlikelyTest.class)
   public void testFalttenWithStar() throws Exception {
-    String root = 
FileUtils.getResourceAsFile("/store/text/sample.json").toURI().toString();
-    String q1 = String.format("select *, flatten(j.topping) tt, 
flatten(j.batters.batter) bb, j.id " +
-        "from dfs_test.`%s` j " +
-        "where j.type = 'donut'", root);
-    String q2 = String.format("select *, flatten(j.topping) tt, 
flatten(j.batters.batter) bb, j.id, j.type " +
-        "from dfs_test.`%s` j " +
-        "where j.type = 'donut'", root);
-
-    test(q1);
-    test(q2);
+    test("select *, flatten(j.topping) tt, flatten(j.batters.batter) bb, j.id 
" +
+      "from cp.`store/text/sample.json` j where j.type = 'donut'");
+    test("select *, flatten(j.topping) tt, flatten(j.batters.batter) bb, j.id, 
j.type " +
+      "from cp.`store/text/sample.json` j where j.type = 'donut'");
   }
 
   @Test // see DRILL-2012
   @Category(UnlikelyTest.class)
   public void testMultipleFalttenWithWhereClause() throws Exception {
-    String root = 
FileUtils.getResourceAsFile("/store/text/sample.json").toURI().toString();
-    String q1 = String.format("select flatten(j.topping) tt " +
-        "from dfs_test.`%s` j " +
-        "where j.type = 'donut'", root);
-    String q2 = String.format("select j.type, flatten(j.topping) tt " +
-         "from dfs_test.`%s` j " +
-         "where j.type = 'donut'", root);
-
-    test(q1);
-    test(q2);
+    test("select flatten(j.topping) tt from cp.`store/text/sample.json` j 
where j.type = 'donut'");
+    test("select j.type, flatten(j.topping) tt from 
cp.`store/text/sample.json` j where j.type = 'donut'");
   }
 
   @Test //DRILL-2099
   @Category(UnlikelyTest.class)
   public void testFlattenAfterSort() throws Exception {
-    String query = "select flatten(s1.rms.rptd) rptds from " +
-        "(select d.uid uid, flatten(d.map.rm) rms from 
cp.`jsoninput/flatten_post_sort.json` d order by d.uid) s1";
-
     testBuilder()
-        .sqlQuery(query)
+        .sqlQuery("select flatten(s1.rms.rptd) rptds from " +
+          "(select d.uid uid, flatten(d.map.rm) rms from 
cp.`jsoninput/flatten_post_sort.json` d order by d.uid) s1")
         .unOrdered()
         .jsonBaselineFile("flatten/drill-2099-result.json")
         .go();
@@ -523,12 +517,10 @@ public class TestFlatten extends BaseTestQuery {
   @Test //DRILL-2268
   @Category(UnlikelyTest.class)
   public void testFlattenAfterJoin1() throws Exception {
-    String query = "select flatten(sub1.events) flat_events  from "+
-      "(select t1.events events from cp.`complex/json/flatten_join.json` t1 "+
-      "inner join cp.`complex/json/flatten_join.json` t2 on t1.id=t2.id) sub1";
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("select flatten(sub1.events) flat_events  from "+
+        "(select t1.events events from cp.`complex/json/flatten_join.json` t1 
"+
+        "inner join cp.`complex/json/flatten_join.json` t2 on t1.id=t2.id) 
sub1")
       .unOrdered()
       .jsonBaselineFile("complex/drill-2268-1-result.json")
       .go();
@@ -537,11 +529,9 @@ public class TestFlatten extends BaseTestQuery {
   @Test //DRILL-2268
   @Category(UnlikelyTest.class)
   public void testFlattenAfterJoin2() throws Exception {
-    String query = "select flatten(t1.events) flat_events from 
cp.`complex/json/flatten_join.json` t1 " +
-      "inner join cp.`complex/json/flatten_join.json` t2 on t1.id=t2.id";
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("select flatten(t1.events) flat_events from 
cp.`complex/json/flatten_join.json` t1 " +
+        "inner join cp.`complex/json/flatten_join.json` t2 on t1.id=t2.id")
       .unOrdered()
       .jsonBaselineFile("complex/drill-2268-2-result.json")
       .go();
@@ -550,12 +540,10 @@ public class TestFlatten extends BaseTestQuery {
   @Test //DRILL-2268
   @Category(UnlikelyTest.class)
   public void testFlattenAfterJoin3() throws Exception {
-    String query = "select flatten(sub1.lst_lst) flat_lst_lst from "+
-      "(select t1.lst_lst lst_lst from cp.`complex/json/flatten_join.json` t1 
"+
-      "inner join cp.`complex/json/flatten_join.json` t2 on t1.id=t2.id) sub1";
-
     testBuilder()
-      .sqlQuery(query)
+      .sqlQuery("select flatten(sub1.lst_lst) flat_lst_lst from "+
+        "(select t1.lst_lst lst_lst from cp.`complex/json/flatten_join.json` 
t1 "+
+        "inner join cp.`complex/json/flatten_join.json` t2 on t1.id=t2.id) 
sub1")
       .unOrdered()
       .jsonBaselineFile("complex/drill-2268-3-result.json")
       .go();
@@ -563,51 +551,40 @@ public class TestFlatten extends BaseTestQuery {
 
   @Test
   public void testFlattenWithScalarFunc() throws Exception {
-    final String query = "select flatten(t.l) + 1  as c1 from 
cp.`/jsoninput/input2.json` t";
-
     testBuilder()
-        .sqlQuery(query)
-        .unOrdered()
-        .baselineColumns("c1")
-        .baselineValues(5L)
-        .baselineValues(3L)
-        .baselineValues(5L)
-        .baselineValues(3L)
-        .baselineValues(5L)
-        .baselineValues(3L)
-        .go();
-
+      .sqlQuery("select flatten(t.l) + 1  as c1 from 
cp.`jsoninput/input2.json` t")
+      .unOrdered()
+      .baselineColumns("c1")
+      .baselineValues(5L)
+      .baselineValues(3L)
+      .baselineValues(5L)
+      .baselineValues(3L)
+      .baselineValues(5L)
+      .baselineValues(3L)
+      .go();
   }
 
   @Test
   public void testFlattenOnEmptyArrayAndNestedMap() throws Exception {
-    File path = new File(BaseTestQuery.getTempDir("json/input"));
-    try {
-      path.mkdirs();
-      String pathString = path.toPath().toString();
-
-      try (BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(path, "empty_arrays.json")))) {
-        writer.write("{\"a\" : {\"a1\" : \"a1\"}, \"b\" : [1]}\n");
-        for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
-          writer.write("{\"a\" : {\"a1\" : \"a1\"}, \"b\" : [], \"c\" : 1}\n");
-        }
-        writer.write("{\"a\" : {\"a1\" : \"a1\"}, \"b\" : [1], \"c\" : 1}");
+    final Path path = Paths.get("json", "input");
+    final File dir = dirTestWatcher.makeRootSubDir(path);
+
+    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(dir, "empty_arrays.json")))) {
+      writer.write("{\"a\" : {\"a1\" : \"a1\"}, \"b\" : [1]}\n");
+      for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
+        writer.write("{\"a\" : {\"a1\" : \"a1\"}, \"b\" : [], \"c\" : 1}\n");
       }
+      writer.write("{\"a\" : {\"a1\" : \"a1\"}, \"b\" : [1], \"c\" : 1}");
+    }
 
-      String query = "select typeof(t1.a.a1) as col from " +
-        "(select t.*, flatten(t.b) as b from dfs_test.`%s/empty_arrays.json` t 
where t.c is not null) t1";
+    String query = "select typeof(t1.a.a1) as col from " +
+      "(select t.*, flatten(t.b) as b from dfs.`%s/empty_arrays.json` t where 
t.c is not null) t1";
 
-      testBuilder()
-        .sqlQuery(query, pathString)
-        .unOrdered()
-        .baselineColumns("col")
-        .baselineValues("VARCHAR")
-        .go();
-
-    } finally {
-      deleteQuietly(path);
-    }
+    testBuilder()
+      .sqlQuery(query, path)
+      .unOrdered()
+      .baselineColumns("col")
+      .baselineValues("VARCHAR")
+      .go();
   }
-
 }
-

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlattenPlanning.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlattenPlanning.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlattenPlanning.java
index 75537af..1a5117f 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlattenPlanning.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlattenPlanning.java
@@ -30,7 +30,7 @@ public class TestFlattenPlanning extends PlanTestBase {
   public void testFlattenPlanningAvoidUnnecessaryProject() throws Exception {
     // Because of Java7 vs Java8 map ordering differences, we check for both 
cases
     // See DRILL-4331 for details
-    testPlanMatchingPatterns("select flatten(complex), rownum from 
cp.`/store/json/test_flatten_mappify2.json`",
+    testPlanMatchingPatterns("select flatten(complex), rownum from 
cp.`store/json/test_flatten_mappify2.json`",
         new String[]{"\\QProject(EXPR$0=[$1], 
rownum=[$0])\\E|\\QProject(EXPR$0=[$0], rownum=[$1])\\E"},
         new String[]{"\\QProject(EXPR$0=[$0], EXPR$1=[$1], 
EXPR$3=[$1])\\E|\\QProject(EXPR$0=[$1], EXPR$1=[$0], EXPR$3=[$0])\\E"});
   }
@@ -40,7 +40,7 @@ public class TestFlattenPlanning extends PlanTestBase {
     final String query =
         " select comp, rownum " +
         " from (select flatten(complex) comp, rownum " +
-        "      from cp.`/store/json/test_flatten_mappify2.json`) " +
+        "      from cp.`store/json/test_flatten_mappify2.json`) " +
         " where comp > 1 " +   // should not be pushed down
         "   and rownum = 100"; // should be pushed down.
 
@@ -54,7 +54,7 @@ public class TestFlattenPlanning extends PlanTestBase {
     final String query =
         " select comp, rownum " +
             " from (select flatten(complex) comp, rownum " +
-            "      from cp.`/store/json/test_flatten_mappify2.json`) " +
+            "      from cp.`store/json/test_flatten_mappify2.json`) " +
             " where comp > 1 " +   // should NOT be pushed down
             "   OR rownum = 100";  // should NOT be pushed down.
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
index a9fb80d..cee84dc 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoin.java
@@ -26,8 +26,8 @@ import java.util.List;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.common.util.DrillFileUtils;
+import org.apache.drill.test.TestTools;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -70,7 +70,7 @@ public class TestHashJoin extends PopUnitTestBase {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = 
PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile(physicalPlan),
 Charsets.UTF_8));
+    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile(physicalPlan),
 Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));
@@ -120,9 +120,9 @@ public class TestHashJoin extends PopUnitTestBase {
       bit.run();
       client.connect();
       List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              
Files.toString(FileUtils.getResourceAsFile("/join/hash_join.json"), 
Charsets.UTF_8)
-                      .replace("#{TEST_FILE_1}", 
FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
-                      .replace("#{TEST_FILE_2}", 
FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
+              
Files.toString(DrillFileUtils.getResourceAsFile("/join/hash_join.json"), 
Charsets.UTF_8)
+                      .replace("#{TEST_FILE_1}", 
DrillFileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
+                      .replace("#{TEST_FILE_2}", 
DrillFileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
 
       RecordBatchLoader batchLoader = new 
RecordBatchLoader(bit.getContext().getAllocator());
 
@@ -163,7 +163,7 @@ public class TestHashJoin extends PopUnitTestBase {
       bit.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              
Files.toString(FileUtils.getResourceAsFile("/join/hj_exchanges.json"), 
Charsets.UTF_8));
+              
Files.toString(DrillFileUtils.getResourceAsFile("/join/hj_exchanges.json"), 
Charsets.UTF_8));
 
       int count = 0;
       for (final QueryDataBatch b : results) {
@@ -191,9 +191,9 @@ public class TestHashJoin extends PopUnitTestBase {
       bit.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              
Files.toString(FileUtils.getResourceAsFile("/join/hj_multi_condition_join.json"),
 Charsets.UTF_8)
-                      .replace("#{TEST_FILE_1}", 
FileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
-                      .replace("#{TEST_FILE_2}", 
FileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
+              
Files.toString(DrillFileUtils.getResourceAsFile("/join/hj_multi_condition_join.json"),
 Charsets.UTF_8)
+                      .replace("#{TEST_FILE_1}", 
DrillFileUtils.getResourceAsFile("/build_side_input.json").toURI().toString())
+                      .replace("#{TEST_FILE_2}", 
DrillFileUtils.getResourceAsFile("/probe_side_input.json").toURI().toString()));
 
       final RecordBatchLoader batchLoader = new 
RecordBatchLoader(bit.getContext().getAllocator());
 
@@ -237,7 +237,7 @@ public class TestHashJoin extends PopUnitTestBase {
       bit.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-          
Files.toString(FileUtils.getResourceAsFile("/join/hj_exchanges1.json"), 
Charsets.UTF_8));
+          
Files.toString(DrillFileUtils.getResourceAsFile("/join/hj_exchanges1.json"), 
Charsets.UTF_8));
 
       int count = 0;
       for (final QueryDataBatch b : results) {
@@ -262,7 +262,7 @@ public class TestHashJoin extends PopUnitTestBase {
       bit1.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              
Files.toString(FileUtils.getResourceAsFile("/join/hashJoinExpr.json"), 
Charsets.UTF_8));
+              
Files.toString(DrillFileUtils.getResourceAsFile("/join/hashJoinExpr.json"), 
Charsets.UTF_8));
       int count = 0;
       for (final QueryDataBatch b : results) {
         if (b.getHeader().getRowCount() != 0) {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
index 205daa2..49aefe6 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
@@ -19,7 +19,7 @@
 package org.apache.drill.exec.physical.impl.join;
 
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.junit.AfterClass;
@@ -141,32 +141,23 @@ public class TestHashJoinAdvanced extends BaseTestQuery {
   @Test //DRILL-2197 Left Join with complex type in projection
   @Category(UnlikelyTest.class)
   public void testJoinWithMapAndDotField() throws Exception {
-    File directory = new File(BaseTestQuery.getTempDir("json/input"));
-    try {
-      directory.mkdirs();
-      String fileName = "table.json";
-      try (BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(directory, fileName)))) {
-        writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", 
\"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
-      }
-
-      String query = String.format("select t1.m.`a.b` as a,\n" +
-                                          "t2.m.a.b as b,\n" +
-                                          "t1.m['a.b'] as c,\n" +
-                                          "t2.rk.q as d,\n" +
-                                          "t1.`rk.q` as e\n" +
-                                   "from dfs_test.`%1$s/%2$s` t1,\n" +
-                                        "dfs_test.`%1$s/%2$s` t2\n" +
-                                  "where t1.m.`a.b`=t2.m.`a.b` and 
t1.m.a.b=t2.m.a.b",
-                                   directory.toPath().toString(), fileName);
-      testBuilder()
-        .sqlQuery(query)
-        .unOrdered()
-        .baselineColumns("a", "b", "c", "d", "e")
-        .baselineValues("1", "2", "1", null, "a")
-        .go();
-
-    } finally {
-      org.apache.commons.io.FileUtils.deleteQuietly(directory);
+    String fileName = "table.json";
+    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(dirTestWatcher.getRootDir(), fileName)))) {
+      writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", 
\"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
     }
+
+    testBuilder()
+      .sqlQuery("select t1.m.`a.b` as a,\n" +
+        "t2.m.a.b as b,\n" +
+        "t1.m['a.b'] as c,\n" +
+        "t2.rk.q as d,\n" +
+        "t1.`rk.q` as e\n" +
+        "from dfs.`%1$s` t1,\n" +
+        "dfs.`%1$s` t2\n" +
+        "where t1.m.`a.b`=t2.m.`a.b` and t1.m.a.b=t2.m.a.b", fileName)
+      .unOrdered()
+      .baselineColumns("a", "b", "c", "d", "e")
+      .baselineValues("1", "2", "1", null, "a")
+      .go();
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
index 25e8ba5..1c16fa4 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
@@ -25,7 +25,7 @@ import java.util.List;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -68,7 +68,7 @@ public class TestMergeJoin extends PopUnitTestBase {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = 
PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/join/merge_join.json"),
 Charsets.UTF_8));
+    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/join/merge_join.json"),
 Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));
@@ -118,9 +118,9 @@ public class TestMergeJoin extends PopUnitTestBase {
         new StoragePluginRegistryImpl(bitContext));
     final PhysicalPlan plan = reader.readPhysicalPlan(
         Files.toString(
-            FileUtils.getResourceAsFile("/join/merge_single_batch.json"), 
Charsets.UTF_8)
-            .replace("#{LEFT_FILE}", 
FileUtils.getResourceAsFile("/join/merge_single_batch.left.json").toURI().toString())
-            .replace("#{RIGHT_FILE}", 
FileUtils.getResourceAsFile("/join/merge_single_batch.right.json").toURI().toString()));
+            DrillFileUtils.getResourceAsFile("/join/merge_single_batch.json"), 
Charsets.UTF_8)
+            .replace("#{LEFT_FILE}", 
DrillFileUtils.getResourceAsFile("/join/merge_single_batch.left.json").toURI().toString())
+            .replace("#{RIGHT_FILE}", 
DrillFileUtils.getResourceAsFile("/join/merge_single_batch.right.json").toURI().toString()));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));
@@ -169,9 +169,9 @@ public class TestMergeJoin extends PopUnitTestBase {
         new StoragePluginRegistryImpl(bitContext));
     final PhysicalPlan plan = reader.readPhysicalPlan(
         Files.toString(
-            
FileUtils.getResourceAsFile("/join/merge_inner_single_batch.json"), 
Charsets.UTF_8)
-            .replace("#{LEFT_FILE}", 
FileUtils.getResourceAsFile("/join/merge_single_batch.left.json").toURI().toString())
-            .replace("#{RIGHT_FILE}", 
FileUtils.getResourceAsFile("/join/merge_single_batch.right.json").toURI().toString()));
+            
DrillFileUtils.getResourceAsFile("/join/merge_inner_single_batch.json"), 
Charsets.UTF_8)
+            .replace("#{LEFT_FILE}", 
DrillFileUtils.getResourceAsFile("/join/merge_single_batch.left.json").toURI().toString())
+            .replace("#{RIGHT_FILE}", 
DrillFileUtils.getResourceAsFile("/join/merge_single_batch.right.json").toURI().toString()));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));
@@ -220,9 +220,9 @@ public class TestMergeJoin extends PopUnitTestBase {
         new StoragePluginRegistryImpl(bitContext));
     final PhysicalPlan plan = reader.readPhysicalPlan(
         Files.toString(
-            FileUtils.getResourceAsFile("/join/merge_multi_batch.json"), 
Charsets.UTF_8)
-            .replace("#{LEFT_FILE}", 
FileUtils.getResourceAsFile("/join/merge_multi_batch.left.json").toURI().toString())
-            .replace("#{RIGHT_FILE}", 
FileUtils.getResourceAsFile("/join/merge_multi_batch.right.json").toURI().toString()));
+            DrillFileUtils.getResourceAsFile("/join/merge_multi_batch.json"), 
Charsets.UTF_8)
+            .replace("#{LEFT_FILE}", 
DrillFileUtils.getResourceAsFile("/join/merge_multi_batch.left.json").toURI().toString())
+            .replace("#{RIGHT_FILE}", 
DrillFileUtils.getResourceAsFile("/join/merge_multi_batch.right.json").toURI().toString()));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));
@@ -265,7 +265,7 @@ public class TestMergeJoin extends PopUnitTestBase {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = 
PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/join/join_batchsize.json"),
 Charsets.UTF_8));
+    final PhysicalPlan plan = 
reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/join/join_batchsize.json"),
 Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new 
FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, 
PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new 
SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) 
plan.getSortedOperators(false).iterator().next()));
@@ -290,7 +290,7 @@ public class TestMergeJoin extends PopUnitTestBase {
       bit1.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              
Files.toString(FileUtils.getResourceAsFile("/join/merge_join_empty_batch.json"),
+              
Files.toString(DrillFileUtils.getResourceAsFile("/join/merge_join_empty_batch.json"),
                       Charsets.UTF_8)
                       .replace("${JOIN_TYPE}", "INNER"));
       int count = 0;
@@ -314,7 +314,7 @@ public class TestMergeJoin extends PopUnitTestBase {
       bit1.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-          
Files.toString(FileUtils.getResourceAsFile("/join/merge_join_empty_batch.json"),
+          
Files.toString(DrillFileUtils.getResourceAsFile("/join/merge_join_empty_batch.json"),
               Charsets.UTF_8)
               .replace("${JOIN_TYPE}", "LEFT"));
       int count = 0;
@@ -338,7 +338,7 @@ public class TestMergeJoin extends PopUnitTestBase {
       bit1.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              
Files.toString(FileUtils.getResourceAsFile("/join/merge_join_empty_batch.json"),
+              
Files.toString(DrillFileUtils.getResourceAsFile("/join/merge_join_empty_batch.json"),
                       Charsets.UTF_8)
                       .replace("${JOIN_TYPE}", "RIGHT"));
       int count = 0;
@@ -362,7 +362,7 @@ public class TestMergeJoin extends PopUnitTestBase {
       bit1.run();
       client.connect();
       final List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              
Files.toString(FileUtils.getResourceAsFile("/join/mergeJoinExpr.json"), 
Charsets.UTF_8));
+              
Files.toString(DrillFileUtils.getResourceAsFile("/join/mergeJoinExpr.json"), 
Charsets.UTF_8));
       int count = 0;
       for (final QueryDataBatch b : results) {
         if (b.getHeader().getRowCount() != 0) {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinAdvanced.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinAdvanced.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinAdvanced.java
index d01b030..310b331 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinAdvanced.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinAdvanced.java
@@ -17,12 +17,11 @@
  */
 package org.apache.drill.exec.physical.impl.join;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
-import org.apache.drill.test.OperatorFixture;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -35,10 +34,17 @@ import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
+import java.nio.file.Paths;
 import java.util.Random;
 
 @Category(OperatorTest.class)
 public class TestMergeJoinAdvanced extends BaseTestQuery {
+  private static final String LEFT = "merge-join-left.json";
+  private static final String RIGHT = "merge-join-right.json";
+
+  private static File leftFile;
+  private static File rightFile;
+
   @Rule
   public final TestRule TIMEOUT = TestTools.getTimeoutRule(120000); // Longer 
timeout than usual.
 
@@ -46,6 +52,11 @@ public class TestMergeJoinAdvanced extends BaseTestQuery {
   @BeforeClass
   public static void disableMergeJoin() throws Exception {
     test("alter session set `planner.enable_hashjoin` = false");
+
+    leftFile = new File(dirTestWatcher.getRootDir(), LEFT);
+    rightFile = new File(dirTestWatcher.getRootDir(), RIGHT);
+
+    dirTestWatcher.copyResourceToRoot(Paths.get("join"));
   }
 
   @AfterClass
@@ -100,11 +111,8 @@ public class TestMergeJoinAdvanced extends BaseTestQuery {
     setSessionOption(ExecConstants.SLICE_TARGET, "1");
     setSessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, "23");
 
-    final String TEST_RES_PATH = TestTools.getWorkingPath() + 
"/src/test/resources";
-
     try {
-      test("select * from dfs_test.`%s/join/j1` j1 left outer join 
dfs_test.`%s/join/j2` j2 on (j1.c_varchar = j2.c_varchar)",
-        TEST_RES_PATH, TEST_RES_PATH);
+      test("select * from dfs.`join/j1` j1 left outer join dfs.`join/j2` j2 on 
(j1.c_varchar = j2.c_varchar)");
     } finally {
       test("ALTER SESSION RESET ALL");
     }
@@ -131,13 +139,11 @@ public class TestMergeJoinAdvanced extends BaseTestQuery {
 
   private static void testMultipleBatchJoin(final long right, final long left,
                                             final String joinType, final long 
expected) throws Exception {
-    final String leftSide = BaseTestQuery.getTempDir("merge-join-left.json");
-    final String rightSide = BaseTestQuery.getTempDir("merge-join-right.json");
-    final BufferedWriter leftWriter = new BufferedWriter(new FileWriter(new 
File(leftSide)));
-    final BufferedWriter rightWriter = new BufferedWriter(new FileWriter(new 
File(rightSide)));
+    final BufferedWriter leftWriter = new BufferedWriter(new 
FileWriter(leftFile));
+    final BufferedWriter rightWriter = new BufferedWriter(new 
FileWriter(rightFile));
     generateData(leftWriter, rightWriter, left, right);
-    final String query1 = String.format("select count(*) c1 from dfs_test.`%s` 
L %s join dfs_test.`%s` R on L.k=R.k1",
-      leftSide, joinType, rightSide);
+    final String query1 = String.format("select count(*) c1 from dfs.`%s` L %s 
join dfs.`%s` R on L.k=R.k1",
+      LEFT, joinType, RIGHT);
     testBuilder()
       .sqlQuery(query1)
       .optionSettingQueriesForTestQuery("alter session set 
`planner.enable_hashjoin` = false")
@@ -218,10 +224,8 @@ public class TestMergeJoinAdvanced extends BaseTestQuery {
 
   @Test
   public void testDrill4196() throws Exception {
-    final String leftSide = BaseTestQuery.getTempDir("merge-join-left.json");
-    final String rightSide = BaseTestQuery.getTempDir("merge-join-right.json");
-    final BufferedWriter leftWriter = new BufferedWriter(new FileWriter(new 
File(leftSide)));
-    final BufferedWriter rightWriter = new BufferedWriter(new FileWriter(new 
File(rightSide)));
+    final BufferedWriter leftWriter = new BufferedWriter(new 
FileWriter(leftFile));
+    final BufferedWriter rightWriter = new BufferedWriter(new 
FileWriter(rightFile));
 
     // output batch is 32k, create 60k left batch
     leftWriter.write(String.format("{ \"k\" : %d , \"v\": %d }", 9999, 9999));
@@ -239,8 +243,8 @@ public class TestMergeJoinAdvanced extends BaseTestQuery {
     leftWriter.close();
     rightWriter.close();
 
-    final String query1 = String.format("select count(*) c1 from dfs_test.`%s` 
L %s join dfs_test.`%s` R on L.k=R.k1",
-      leftSide, "inner", rightSide);
+    final String query1 = String.format("select count(*) c1 from dfs.`%s` L %s 
join dfs.`%s` R on L.k=R.k1",
+      LEFT, "inner", RIGHT);
     testBuilder()
       .sqlQuery(query1)
       .optionSettingQueriesForTestQuery("alter session set 
`planner.enable_hashjoin` = false")

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
index 48ad5f1..54b7bea 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinMulCondition.java
@@ -23,8 +23,8 @@ import java.util.List;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.categories.SlowTest;
-import org.apache.drill.common.util.FileUtils;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.common.util.DrillFileUtils;
+import org.apache.drill.test.TestTools;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.pop.PopUnitTestBase;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
@@ -60,7 +60,7 @@ public class TestMergeJoinMulCondition extends 
PopUnitTestBase {
       bit1.run();
       client.connect();
       List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-          
Files.toString(FileUtils.getResourceAsFile("/join/mj_multi_condition.json"),
+          
Files.toString(DrillFileUtils.getResourceAsFile("/join/mj_multi_condition.json"),
               Charsets.UTF_8));
       int count = 0;
       for (QueryDataBatch b : results) {
@@ -85,7 +85,7 @@ public class TestMergeJoinMulCondition extends 
PopUnitTestBase {
       bit1.run();
       client.connect();
       List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-          
Files.toString(FileUtils.getResourceAsFile("/join/merge_join_nullkey.json"), 
Charsets.UTF_8).replace("${JOIN_TYPE}", "INNER"));
+          
Files.toString(DrillFileUtils.getResourceAsFile("/join/merge_join_nullkey.json"),
 Charsets.UTF_8).replace("${JOIN_TYPE}", "INNER"));
       int count = 0;
       for (QueryDataBatch b : results) {
         if (b.getHeader().getRowCount() != 0) {
@@ -110,7 +110,7 @@ public class TestMergeJoinMulCondition extends 
PopUnitTestBase {
       bit1.run();
       client.connect();
       List<QueryDataBatch> results = 
client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-          
Files.toString(FileUtils.getResourceAsFile("/join/merge_join_nullkey.json"), 
Charsets.UTF_8).replace("${JOIN_TYPE}", "LEFT"));
+          
Files.toString(DrillFileUtils.getResourceAsFile("/join/merge_join_nullkey.json"),
 Charsets.UTF_8).replace("${JOIN_TYPE}", "LEFT"));
       int count = 0;
       for (QueryDataBatch b : results) {
         if (b.getHeader().getRowCount() != 0) {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
index 86a136c..5136628 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
@@ -17,56 +17,78 @@
  */
 package org.apache.drill.exec.physical.impl.join;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
-import org.apache.drill.TestBuilder;
+import org.apache.drill.test.TestBuilder;
+import org.apache.drill.test.SubDirTestWatcher;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 
 @Category(OperatorTest.class)
 public class TestMergeJoinWithSchemaChanges extends BaseTestQuery {
+  public static final Path LEFT_DIR = 
Paths.get("mergejoin-schemachanges-left");
+  public static final Path RIGHT_DIR = 
Paths.get("mergejoin-schemachanges-right");
+
+  private static File leftDir;
+  private static File rightDir;
+
+  @BeforeClass
+  public static void getDirs() {
+    leftDir = dirTestWatcher.getRootDir()
+      .toPath()
+      .resolve(LEFT_DIR)
+      .toFile();
+    rightDir = dirTestWatcher.getRootDir()
+      .toPath()
+      .resolve(RIGHT_DIR)
+      .toFile();
+  }
+
+  @Rule
+  public final SubDirTestWatcher subDirTestWatcher = new 
SubDirTestWatcher.Builder(dirTestWatcher.getRootDir())
+    .addSubDir(LEFT_DIR)
+    .addSubDir(RIGHT_DIR)
+    .build();
 
   @Test
-  //@Ignore
   public void testNumericTypes() throws Exception {
-    final File left_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
-    final File right_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
-    left_dir.mkdirs();
-    right_dir.mkdirs();
-
     // First create data for numeric types.
     // left side int and float vs right side float
-    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(left_dir, "l1.json")));
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(leftDir, "l1.json")));
     for (int i = 0; i < 5000; ++i) {
       writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
     }
     writer.close();
-    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l2.json")));
+    writer = new BufferedWriter(new FileWriter(new File(leftDir, "l2.json")));
     for (int i = 1000; i < 6000; ++i) {
       writer.write(String.format("{ \"kl\" : %f , \"vl\": %f }\n", (float) i, 
(float) i));
     }
     writer.close();
 
     // right side is int and float
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r1.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r1.json")));
     for (int i = 2000; i < 7000; ++i) {
       writer.write(String.format("{ \"kr\" : %d , \"vr\": %d }\n", i, i));
     }
     writer.close();
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r2.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r2.json")));
     for (int i = 3000; i < 8000; ++i) {
       writer.write(String.format("{ \"kr\" : %f, \"vr\": %f }\n", (float) i, 
(float) i));
     }
     writer.close();
 
     // INNER JOIN
-    String query = String.format("select * from dfs_test.`%s` L %s join 
dfs_test.`%s` R on L.kl=R.kr",
-      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+    String query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R 
on L.kl=R.kr",
+      LEFT_DIR, "inner", RIGHT_DIR);
 
     TestBuilder builder = testBuilder()
       .sqlQuery(query)
@@ -92,8 +114,8 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
     builder.go();
 
     // LEFT JOIN
-    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` 
R on L.kl=R.kr",
-      left_dir.toPath().toString(), "left", right_dir.toPath().toString());
+    query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R on 
L.kl=R.kr",
+      LEFT_DIR, "left", RIGHT_DIR);
 
     builder = testBuilder()
       .sqlQuery(query)
@@ -125,40 +147,34 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
   }
 
   @Test
-  //@Ignore
   public void testNumericStringTypes() throws Exception {
-    final File left_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
-    final File right_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
-    left_dir.mkdirs();
-    right_dir.mkdirs();
-
     // left side int and strings
-    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(left_dir, "l1.json")));
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(leftDir, "l1.json")));
     for (int i = 0; i < 5000; ++i) {
       writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
     }
     writer.close();
-    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l2.json")));
+    writer = new BufferedWriter(new FileWriter(new File(leftDir, "l2.json")));
     for (int i = 1000; i < 6000; ++i) {
       writer.write(String.format("{ \"kl\" : \"%s\" , \"vl\": \"%s\" }\n", i, 
i));
     }
     writer.close();
 
     // right side is float and strings
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r1.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r1.json")));
     for (int i = 2000; i < 7000; ++i) {
       writer.write(String.format("{ \"kr\" : %f , \"vr\": %f }\n", (float)i, 
(float)i));
     }
     writer.close();
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r2.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r2.json")));
     for (int i = 3000; i < 8000; ++i) {
       writer.write(String.format("{ \"kr\" : \"%s\", \"vr\": \"%s\" }\n", i, 
i));
     }
     writer.close();
 
     // INNER JOIN
-    String query = String.format("select * from dfs_test.`%s` L %s join 
dfs_test.`%s` R on L.kl=R.kr",
-      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+    String query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R 
on L.kl=R.kr",
+      LEFT_DIR, "inner", RIGHT_DIR);
 
     TestBuilder builder = testBuilder()
       .sqlQuery(query)
@@ -176,8 +192,8 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
     builder.go();
 
     // RIGHT JOIN
-    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` 
R on L.kl=R.kr",
-      left_dir.toPath().toString(), "right", right_dir.toPath().toString());
+    query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R on 
L.kl=R.kr",
+      LEFT_DIR, "right", RIGHT_DIR);
 
     builder = testBuilder()
       .sqlQuery(query)
@@ -205,54 +221,47 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
   @Ignore("DRILL-5612")
   @Test
   public void testMissingAndNewColumns() throws Exception {
-    final File left_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
-    final File right_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
-    left_dir.mkdirs();
-    right_dir.mkdirs();
-    System.out.println(left_dir);
-    System.out.println(right_dir);
-
     // missing column kl
-    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(left_dir, "l1.json")));
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(leftDir, "l1.json")));
     for (int i = 0; i < 50; ++i) {
       writer.write(String.format("{ \"kl1\" : %d , \"vl1\": %d }\n", i, i));
     }
     writer.close();
 
-    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l2.json")));
+    writer = new BufferedWriter(new FileWriter(new File(leftDir, "l2.json")));
     for (int i = 50; i < 100; ++i) {
       writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
     }
     writer.close();
 
-    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l3.json")));
+    writer = new BufferedWriter(new FileWriter(new File(leftDir, "l3.json")));
     for (int i = 100; i < 150; ++i) {
       writer.write(String.format("{ \"kl2\" : %d , \"vl2\": %d }\n", i, i));
     }
     writer.close();
 
     // right missing column kr
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r1.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r1.json")));
     for (int i = 0; i < 50; ++i) {
       writer.write(String.format("{ \"kr1\" : %f , \"vr1\": %f }\n", (float)i, 
(float)i));
     }
     writer.close();
 
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r2.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r2.json")));
     for (int i = 50; i < 100; ++i) {
       writer.write(String.format("{ \"kr\" : %f , \"vr\": %f }\n", (float)i, 
(float)i));
     }
     writer.close();
 
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r3.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r3.json")));
     for (int i = 100; i < 150; ++i) {
       writer.write(String.format("{ \"kr2\" : %f , \"vr2\": %f }\n", (float)i, 
(float)i));
     }
     writer.close();
 
     // INNER JOIN
-    String query = String.format("select * from dfs_test.`%s` L %s join 
dfs_test.`%s` R on L.kl=R.kr",
-      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+    String query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R 
on L.kl=R.kr",
+      LEFT_DIR, "inner", RIGHT_DIR);
 
     TestBuilder builder = testBuilder()
       .sqlQuery(query)
@@ -266,8 +275,8 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
     builder.go();
 
     // LEFT JOIN
-    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` 
R on L.kl=R.kr",
-      left_dir.toPath().toString(), "left", right_dir.toPath().toString());
+    query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R on 
L.kl=R.kr",
+      LEFT_DIR, "left", RIGHT_DIR);
 
     builder = testBuilder()
       .sqlQuery(query)
@@ -287,8 +296,8 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
     builder.go();
 
     // RIGHT JOIN
-    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` 
R on L.kl=R.kr",
-      left_dir.toPath().toString(), "right", right_dir.toPath().toString());
+    query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R on 
L.kl=R.kr",
+      LEFT_DIR, "right", RIGHT_DIR);
 
     builder = testBuilder()
       .sqlQuery(query)
@@ -309,16 +318,8 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
   }
 
   @Test
-  //@Ignore
   public void testOneSideSchemaChanges() throws Exception {
-    final File left_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
-    final File right_dir = new 
File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
-    left_dir.mkdirs();
-    right_dir.mkdirs();
-    System.out.println(left_dir);
-    System.out.println(right_dir);
-
-    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(left_dir, "l1.json")));
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new 
File(leftDir, "l1.json")));
     for (int i = 0; i < 50; ++i) {
       writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
     }
@@ -327,14 +328,14 @@ public class TestMergeJoinWithSchemaChanges extends 
BaseTestQuery {
     }
     writer.close();
 
-    writer = new BufferedWriter(new FileWriter(new File(right_dir, 
"r1.json")));
+    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r1.json")));
     for (int i = 0; i < 50; ++i) {
       writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
     }
     writer.close();
 
-    String query = String.format("select * from dfs_test.`%s` L %s join 
dfs_test.`%s` R on L.kl=R.kl",
-      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+    String query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R 
on L.kl=R.kl",
+      LEFT_DIR, "inner", RIGHT_DIR);
     TestBuilder builder = testBuilder()
       .sqlQuery(query)
       .optionSettingQueriesForTestQuery("alter session set 
`planner.enable_hashjoin` = false; alter session set `exec.enable_union_type` = 
true")

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestNestedLoopJoin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestNestedLoopJoin.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestNestedLoopJoin.java
index 87c9501..a67a484 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestNestedLoopJoin.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestNestedLoopJoin.java
@@ -21,10 +21,12 @@ package org.apache.drill.exec.physical.impl.join;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.common.util.TestTools;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.junit.Assert.assertThat;
 
@@ -32,8 +34,6 @@ import static org.junit.Assert.assertThat;
 public class TestNestedLoopJoin extends PlanTestBase {
 
   private static String nlpattern = "NestedLoopJoin";
-  private static final String WORKING_PATH = TestTools.getWorkingPath();
-  private static final String TEST_RES_PATH = WORKING_PATH + 
"/src/test/resources";
 
   private static final String DISABLE_HJ = "alter session set 
`planner.enable_hashjoin` = false";
   private static final String ENABLE_HJ = "alter session set 
`planner.enable_hashjoin` = true";
@@ -79,6 +79,11 @@ public class TestNestedLoopJoin extends PlanTestBase {
   private static final String testNlJoinWithLargeRightInput = "select * from 
cp.`tpch/region.parquet`r " +
       "left join cp.`tpch/nation.parquet` n on r.r_regionkey <> n.n_regionkey";
 
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "parquet"));
+  }
+
   @Test
   public void testNlJoinExists_1_planning() throws Exception {
     testPlanMatchingPatterns(testNlJoinExists_1, new String[]{nlpattern}, new 
String[]{});
@@ -157,9 +162,9 @@ public class TestNestedLoopJoin extends PlanTestBase {
 
   @Test // equality join and non-scalar right input, hj and mj disabled, 
enforce exchanges
   public void testNlJoinEqualityNonScalar_2_planning() throws Exception {
-    String query = String.format("select n.n_nationkey from 
cp.`tpch/nation.parquet` n, "
-        + " dfs_test.`%s/multilevel/parquet` o "
-        + " where n.n_regionkey = o.o_orderkey and o.o_custkey > 5", 
TEST_RES_PATH);
+    String query = "select n.n_nationkey from cp.`tpch/nation.parquet` n, "
+        + " dfs.`multilevel/parquet` o "
+        + " where n.n_regionkey = o.o_orderkey and o.o_custkey > 5";
     test("alter session set `planner.slice_target` = 1");
     test(DISABLE_HJ);
     test(DISABLE_MJ);

Reply via email to