Yingyi Bu has uploaded a new change for review.

  https://asterix-gerrit.ics.uci.edu/325

Change subject: This change includes the following things: 1. Allow pregelix 
statement to cross two different dataverses. 2. Change TestsUtils to 
TestsExecutor which is not based on static methods so that Pregelix can reuse 
its functionalities. 3. Adds ioDeviceId into th
......................................................................

This change includes the following things:
1. Allow pregelix statement to cross two different dataverses.
2. Change TestsUtils to TestsExecutor which is not based on static methods so 
that Pregelix can reuse its functionalities.
3. Adds ioDeviceId into the returned result of ConnectorAPIServlet.

Change-Id: I7d167b64bf9ec754182b5b2fe44dfc7e5908c686
---
M asterix-app/pom.xml
M 
asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
M 
asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
M 
asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
A asterix-app/src/main/java/edu/uci/ics/asterix/util/FlushDatasetUtils.java
A asterix-app/src/main/java/edu/uci/ics/asterix/util/JobUtils.java
M 
asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
A 
asterix-app/src/test/java/edu/uci/ics/asterix/aql/translator/AqlTranslatorTest.java
M asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
M asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
M asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
M asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java
R asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsExecutor.java
M 
asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixClusterLifeCycleIT.java
M 
asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java
M 
asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
M 
asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java
M 
asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
M 
asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/MetadataLockManager.java
M asterix-tools/pom.xml
M 
asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLibraryTestIT.java
M 
asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLifecycleIT.java
M pom.xml
23 files changed, 805 insertions(+), 632 deletions(-)


  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb 
refs/changes/25/325/1

diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index 25f1ad9..9386f9e 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -7,226 +7,223 @@
        WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ! See 
the 
        License for the specific language governing permissions and ! 
limitations 
        under the License. ! -->
-<project
-    xmlns="http://maven.apache.org/POM/4.0.0";
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>asterix</artifactId>
-        <groupId>edu.uci.ics.asterix</groupId>
-        <version>0.8.7-SNAPSHOT</version>
-    </parent>
-    <artifactId>asterix-app</artifactId>
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+       xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+       <modelVersion>4.0.0</modelVersion>
+       <parent>
+               <artifactId>asterix</artifactId>
+               <groupId>edu.uci.ics.asterix</groupId>
+               <version>0.8.7-SNAPSHOT</version>
+       </parent>
+       <artifactId>asterix-app</artifactId>
 
-    <licenses>
-        <license>
-            <name>Apache License, Version 2.0</name>
-            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-            <distribution>repo</distribution>
-            <comments>A business-friendly OSS license</comments>
-        </license>
-    </licenses>
+       <licenses>
+               <license>
+                       <name>Apache License, Version 2.0</name>
+                       
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+                       <distribution>repo</distribution>
+                       <comments>A business-friendly OSS license</comments>
+               </license>
+       </licenses>
 
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>2.3.2</version>
-                <configuration>
-                    <source>1.7</source>
-                    <target>1.7</target>
-                    <fork>true</fork>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>appassembler-maven-plugin</artifactId>
-                <version>1.0</version>
-                <executions>
-                    <execution>
-                        <configuration>
-                            <programs>
-                                <program>
-                                    
<mainClass>edu.uci.ics.asterix.drivers.AsterixWebServer</mainClass>
-                                    <name>asterix-web</name>
-                                </program>
-                                <program>
-                                    
<mainClass>edu.uci.ics.asterix.drivers.AsterixClientDriver</mainClass>
-                                    <name>asterix-cmd</name>
-                                </program>
-                                <program>
-                                    
<mainClass>edu.uci.ics.asterix.drivers.AsterixCLI</mainClass>
-                                    <name>asterix-cli</name>
-                                </program>
-                            </programs>
-                            <repositoryLayout>flat</repositoryLayout>
-                            <repositoryName>lib</repositoryName>
-                        </configuration>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>assemble</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <artifactId>maven-assembly-plugin</artifactId>
-                <version>2.2-beta-5</version>
-                <executions>
-                    <execution>
-                        <configuration>
-                            <descriptors>
-                                
<descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-                            </descriptors>
-                        </configuration>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>attached</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
+       <build>
+               <plugins>
+                       <plugin>
+                               <groupId>org.apache.maven.plugins</groupId>
+                               <artifactId>maven-compiler-plugin</artifactId>
+                               <version>2.3.2</version>
+                               <configuration>
+                                       <source>1.7</source>
+                                       <target>1.7</target>
+                                       <fork>true</fork>
+                               </configuration>
+                       </plugin>
+                       <plugin>
+                               <groupId>org.codehaus.mojo</groupId>
+                               
<artifactId>appassembler-maven-plugin</artifactId>
+                               <version>1.0</version>
+                               <executions>
+                                       <execution>
+                                               <configuration>
+                                                       <programs>
+                                                               <program>
+                                                                       
<mainClass>edu.uci.ics.asterix.drivers.AsterixWebServer</mainClass>
+                                                                       
<name>asterix-web</name>
+                                                               </program>
+                                                               <program>
+                                                                       
<mainClass>edu.uci.ics.asterix.drivers.AsterixClientDriver</mainClass>
+                                                                       
<name>asterix-cmd</name>
+                                                               </program>
+                                                               <program>
+                                                                       
<mainClass>edu.uci.ics.asterix.drivers.AsterixCLI</mainClass>
+                                                                       
<name>asterix-cli</name>
+                                                               </program>
+                                                       </programs>
+                                                       
<repositoryLayout>flat</repositoryLayout>
+                                                       
<repositoryName>lib</repositoryName>
+                                               </configuration>
+                                               <phase>package</phase>
+                                               <goals>
+                                                       <goal>assemble</goal>
+                                               </goals>
+                                       </execution>
+                               </executions>
+                       </plugin>
+                       <plugin>
+                               <artifactId>maven-assembly-plugin</artifactId>
+                               <version>2.2-beta-5</version>
+                               <executions>
+                                       <execution>
+                                               <configuration>
+                                                       <descriptors>
+                                                               
<descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+                                                       </descriptors>
+                                               </configuration>
+                                               <phase>package</phase>
+                                               <goals>
+                                                       <goal>attached</goal>
+                                               </goals>
+                                       </execution>
+                               </executions>
+                       </plugin>
+               </plugins>
+       </build>
 
-    <dependencies>
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>servlet-api</artifactId>
-            <type>jar</type>
-        </dependency>
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-server</artifactId>
-            <version>8.0.0.M2</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-servlet</artifactId>
-            <version>8.0.0.M2</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.hyracks</groupId>
-            <artifactId>hyracks-control-cc</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.hyracks</groupId>
-            <artifactId>hyracks-control-nc</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.hyracks</groupId>
-            <artifactId>algebricks-compiler</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.hyracks</groupId>
-            <artifactId>hyracks-client</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-algebra</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-aql</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-om</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-metadata</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-tools</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-common</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-common</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
+       <dependencies>
+               <dependency>
+                       <groupId>javax.servlet</groupId>
+                       <artifactId>servlet-api</artifactId>
+                       <type>jar</type>
+               </dependency>
+               <dependency>
+                       <groupId>org.eclipse.jetty</groupId>
+                       <artifactId>jetty-server</artifactId>
+                       <version>8.0.0.M2</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.eclipse.jetty</groupId>
+                       <artifactId>jetty-servlet</artifactId>
+                       <version>8.0.0.M2</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.hyracks</groupId>
+                       <artifactId>hyracks-control-cc</artifactId>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.hyracks</groupId>
+                       <artifactId>hyracks-control-nc</artifactId>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.hyracks</groupId>
+                       <artifactId>algebricks-compiler</artifactId>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.hyracks</groupId>
+                       <artifactId>hyracks-client</artifactId>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-algebra</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-aql</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-om</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-metadata</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-tools</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-common</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-common</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <type>test-jar</type>
+                       <scope>test</scope>
+               </dependency>
                <!-- posssible remove this <dependency> 
<groupId>com.kenai.nbpwr</groupId> 
                        <artifactId>org-apache-commons-io</artifactId> 
<version>1.3.1-201002241208</version> 
                        <scope>test</scope> </dependency> -->
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-transactions</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <type>jar</type>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <type>jar</type>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>xerces</groupId>
-            <artifactId>xercesImpl</artifactId>
-            <version>2.9.1</version>
-            <type>jar</type>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>xalan</groupId>
-            <artifactId>xalan</artifactId>
-            <version>2.7.1</version>
-            <type>jar</type>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>edu.uci.ics.asterix</groupId>
-            <artifactId>asterix-test-framework</artifactId>
-            <version>0.8.7-SNAPSHOT</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <version>1.10.19</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>com.e-movimento.tinytools</groupId>
-            <artifactId>privilegedaccessor</artifactId>
-            <version>1.2.2</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-transactions</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.hadoop</groupId>
+                       <artifactId>hadoop-client</artifactId>
+                       <type>jar</type>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.hadoop</groupId>
+                       <artifactId>hadoop-hdfs</artifactId>
+                       <type>jar</type>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>xerces</groupId>
+                       <artifactId>xercesImpl</artifactId>
+                       <version>2.9.1</version>
+                       <type>jar</type>
+                       <scope>compile</scope>
+               </dependency>
+               <dependency>
+                       <groupId>xalan</groupId>
+                       <artifactId>xalan</artifactId>
+                       <version>2.7.1</version>
+                       <type>jar</type>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>edu.uci.ics.asterix</groupId>
+                       <artifactId>asterix-test-framework</artifactId>
+                       <version>0.8.7-SNAPSHOT</version>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.mockito</groupId>
+                       <artifactId>mockito-all</artifactId>
+                       <version>1.10.19</version>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.e-movimento.tinytools</groupId>
+                       <artifactId>privilegedaccessor</artifactId>
+                       <version>1.2.2</version>
+                       <scope>test</scope>
+               </dependency>
+       </dependencies>
 </project>
diff --git 
a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
 
b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
index 91aa897..e6a1be8 100644
--- 
a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ 
b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -17,6 +17,8 @@
 import java.io.File;
 import java.util.EnumSet;
 
+import org.apache.commons.io.FileUtils;
+
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.hyracks.bootstrap.CCApplicationEntryPoint;
 import edu.uci.ics.asterix.hyracks.bootstrap.NCApplicationEntryPoint;
@@ -57,6 +59,13 @@
         cc = new ClusterControllerService(ccConfig);
         cc.start();
 
+        // Removes the storage directory.
+        File dir = new File(System.getProperty("java.io.tmpdir") + 
File.separator + "nc1");
+        if (dir.exists()) {
+            FileUtils.forceDelete(dir);
+        }
+
+        // Starts ncs.
         int n = 0;
         for (String ncName : getNcNames()) {
             NCConfig ncConfig1 = new NCConfig();
@@ -82,7 +91,6 @@
             ncs[n].start();
             ++n;
         }
-
         hcc = new HyracksConnection(cc.getConfig().clientNetIpAddress, 
cc.getConfig().clientNetPort);
     }
 
@@ -132,6 +140,7 @@
      */
     public static void main(String[] args) {
         Runtime.getRuntime().addShutdownHook(new Thread() {
+            @Override
             public void run() {
                 try {
                     deinit();
diff --git 
a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
 
b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
index 267aac5..b1a1ef0 100644
--- 
a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
+++ 
b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
@@ -36,6 +36,8 @@
 import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
 import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.util.FlushDatasetUtils;
+
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
 import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
@@ -105,6 +107,10 @@
 
             // Constructs the returned json object.
             formResponseObject(jsonResponse, fileSplits, recordType, 
pkStrBuf.toString(), hcc.getNodeControllerInfos());
+
+            // Flush the cached contents of the dataset to file system.
+            FlushDatasetUtils.flushDataset(hcc, metadataProvider, mdTxnCtx, 
dataverseName, datasetName, datasetName);
+
             // Metadata transaction commits.
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             // Writes file splits.
@@ -129,7 +135,7 @@
         for (FileSplit split : fileSplits) {
             String ipAddress = 
nodeMap.get(split.getNodeName()).getNetworkAddress().getAddress().toString();
             String path = split.getLocalFile().getFile().getAbsolutePath();
-            FilePartition partition = new FilePartition(ipAddress, path);
+            FilePartition partition = new FilePartition(ipAddress, path, 
split.getIODeviceId());
             partititons.put(partition.toJSONObject());
         }
         // Generates the response object which contains the splits.
@@ -140,10 +146,12 @@
 class FilePartition {
     private final String ipAddress;
     private final String path;
+    private final int ioDeviceId;
 
-    public FilePartition(String ipAddress, String path) {
+    public FilePartition(String ipAddress, String path, int ioDeviceId) {
         this.ipAddress = ipAddress;
         this.path = path;
+        this.ioDeviceId = ioDeviceId;
     }
 
     public String getIPAddress() {
@@ -152,6 +160,10 @@
 
     public String getPath() {
         return path;
+    }
+
+    public int getIODeviceId() {
+        return ioDeviceId;
     }
 
     @Override
@@ -163,6 +175,7 @@
         JSONObject partition = new JSONObject();
         partition.put("ip", ipAddress);
         partition.put("path", path);
+        partition.put("ioDeviceId", ioDeviceId);
         return partition;
     }
-}
\ No newline at end of file
+}
diff --git 
a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
 
b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index dad5975..25616ce 100644
--- 
a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ 
b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -17,9 +17,9 @@
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.io.PrintWriter;
 import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.Date;
@@ -41,7 +41,6 @@
 import org.json.JSONObject;
 
 import edu.uci.ics.asterix.api.common.APIFramework;
-import edu.uci.ics.asterix.api.common.Job;
 import edu.uci.ics.asterix.api.common.SessionConfig;
 import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
 import edu.uci.ics.asterix.aql.base.Statement;
@@ -83,7 +82,7 @@
 import edu.uci.ics.asterix.aql.expression.TypeExpression;
 import edu.uci.ics.asterix.aql.expression.WriteStatement;
 import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import 
edu.uci.ics.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
 import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
@@ -147,13 +146,11 @@
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.types.TypeSignature;
 import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
 import 
edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
 import edu.uci.ics.asterix.result.ResultReader;
 import edu.uci.ics.asterix.result.ResultUtils;
-import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
-import 
edu.uci.ics.asterix.runtime.operators.std.FlushDatasetOperatorDescriptor;
 import 
edu.uci.ics.asterix.transaction.management.service.transaction.DatasetIdFactory;
-import 
edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
 import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
 import 
edu.uci.ics.asterix.translator.CompiledStatements.CompiledConnectFeedStatement;
 import 
edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
@@ -166,30 +163,23 @@
 import 
edu.uci.ics.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
 import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
 import edu.uci.ics.asterix.translator.TypeTranslator;
-import 
edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import 
edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.asterix.util.FlushDatasetUtils;
+import edu.uci.ics.asterix.util.JobUtils;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
 import 
edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
 import edu.uci.ics.hyracks.algebricks.data.IAWriterFactory;
 import edu.uci.ics.hyracks.algebricks.data.IResultSerializerFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import 
edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import 
edu.uci.ics.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
 import 
edu.uci.ics.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
 import 
edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
 import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 
 /*
@@ -235,7 +225,7 @@
 
     /**
      * Compiles and submits for execution a list of AQL statements.
-     * 
+     *
      * @param hcc
      *            A Hyracks client connection that is used to submit a jobspec 
to Hyracks.
      * @param hdc
@@ -627,7 +617,7 @@
                 progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
                 //#. runJob
-                runJob(hcc, jobSpec, true);
+                JobUtils.runJob(hcc, jobSpec, true);
 
                 //#. begin new metadataTxn
                 mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -660,8 +650,7 @@
                     JobSpecification jobSpec = 
DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                     bActiveTxn = false;
-
-                    runJob(hcc, jobSpec, true);
+                    JobUtils.runJob(hcc, jobSpec, true);
                 } catch (Exception e2) {
                     e.addSuppressed(e2);
                     if (bActiveTxn) {
@@ -944,7 +933,7 @@
                                 "Failed to create job spec for replicating 
Files Index For external dataset");
                     }
                     filesIndexReplicated = true;
-                    runJob(hcc, spec, true);
+                    JobUtils.runJob(hcc, spec, true);
                 }
             }
 
@@ -987,7 +976,7 @@
             progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
             //#. create the index artifact in NC.
-            runJob(hcc, spec, true);
+            JobUtils.runJob(hcc, spec, true);
 
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
             bActiveTxn = true;
@@ -1001,7 +990,7 @@
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
 
-            runJob(hcc, spec, true);
+            JobUtils.runJob(hcc, spec, true);
 
             //#. begin new metadataTxn
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -1040,7 +1029,7 @@
                             metadataProvider, ds);
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                     bActiveTxn = false;
-                    runJob(hcc, jobSpec, true);
+                    JobUtils.runJob(hcc, jobSpec, true);
                 } catch (Exception e2) {
                     e.addSuppressed(e2);
                     if (bActiveTxn) {
@@ -1062,7 +1051,7 @@
 
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                     bActiveTxn = false;
-                    runJob(hcc, jobSpec, true);
+                    JobUtils.runJob(hcc, jobSpec, true);
                 } catch (Exception e2) {
                     e.addSuppressed(e2);
                     if (bActiveTxn) {
@@ -1260,7 +1249,7 @@
             progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
             for (JobSpecification jobSpec : jobsToExecute) {
-                runJob(hcc, jobSpec, true);
+                JobUtils.runJob(hcc, jobSpec, true);
             }
 
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -1287,7 +1276,7 @@
                 //   remove the all indexes in NC
                 try {
                     for (JobSpecification jobSpec : jobsToExecute) {
-                        runJob(hcc, jobSpec, true);
+                        JobUtils.runJob(hcc, jobSpec, true);
                     }
                 } catch (Exception e2) {
                     //do no throw exception since still the metadata needs to 
be compensated.
@@ -1380,12 +1369,12 @@
 
                 //# disconnect the feeds
                 for (Pair<JobSpecification, Boolean> p : 
disconnectJobList.values()) {
-                    runJob(hcc, p.first, true);
+                    JobUtils.runJob(hcc, p.first, true);
                 }
 
                 //#. run the jobs
                 for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
+                    JobUtils.runJob(hcc, jobSpec, true);
                 }
 
                 mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -1422,7 +1411,7 @@
 
                 //#. run the jobs
                 for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
+                    JobUtils.runJob(hcc, jobSpec, true);
                 }
                 if (indexes.size() > 0) {
                     ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
@@ -1451,7 +1440,7 @@
                 //   remove the all indexes in NC
                 try {
                     for (JobSpecification jobSpec : jobsToExecute) {
-                        runJob(hcc, jobSpec, true);
+                        JobUtils.runJob(hcc, jobSpec, true);
                     }
                 } catch (Exception e2) {
                     //do no throw exception since still the metadata needs to 
be compensated.
@@ -1521,7 +1510,6 @@
                 }
             }
 
-
             if (ds.getDatasetType() == DatasetType.INTERNAL) {
                 indexName = stmtIndexDrop.getIndexName().getValue();
                 Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, 
dataverseName, datasetName, indexName);
@@ -1551,7 +1539,7 @@
                 progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
                 for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
+                    JobUtils.runJob(hcc, jobSpec, true);
                 }
 
                 //#. begin a new transaction
@@ -1616,7 +1604,7 @@
                 progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
                 for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
+                    JobUtils.runJob(hcc, jobSpec, true);
                 }
 
                 //#. begin a new transaction
@@ -1646,7 +1634,7 @@
                 //   remove the all indexes in NC
                 try {
                     for (JobSpecification jobSpec : jobsToExecute) {
-                        runJob(hcc, jobSpec, true);
+                        JobUtils.runJob(hcc, jobSpec, true);
                     }
                 } catch (Exception e2) {
                     //do no throw exception since still the metadata needs to 
be compensated.
@@ -1802,7 +1790,7 @@
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
             if (spec != null) {
-                runJob(hcc, spec, true);
+                JobUtils.runJob(hcc, spec, true);
             }
         } catch (Exception e) {
             if (bActiveTxn) {
@@ -1836,7 +1824,7 @@
             bActiveTxn = false;
 
             if (compiled != null) {
-                runJob(hcc, compiled, true);
+                JobUtils.runJob(hcc, compiled, true);
             }
 
         } catch (Exception e) {
@@ -1873,7 +1861,7 @@
             bActiveTxn = false;
 
             if (compiled != null) {
-                runJob(hcc, compiled, true);
+                JobUtils.runJob(hcc, compiled, true);
             }
 
         } catch (Exception e) {
@@ -1951,7 +1939,7 @@
             MetadataLockManager.INSTANCE.createFeedEnd(dataverseName, 
dataverseName + "." + feedName);
         }
     }
-    
+
     private void handleCreateFeedPolicyStatement(AqlMetadataProvider 
metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
         MetadataTransactionContext mdTxnCtx = 
MetadataManager.INSTANCE.beginTransaction();
@@ -2049,7 +2037,6 @@
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
 
-
         } catch (Exception e) {
             abort(e, e, mdTxnCtx);
             throw e;
@@ -2057,7 +2044,7 @@
             MetadataLockManager.INSTANCE.dropFeedEnd(dataverseName, 
dataverseName + "." + feedName);
         }
     }
-    
+
     private void handleDropFeedPolicyStatement(AqlMetadataProvider 
metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
 
@@ -2084,7 +2071,6 @@
             MetadataLockManager.INSTANCE.dropFeedPolicyEnd(dataverseName, 
dataverseName + "." + policyName);
         }
     }
-
 
     private void handleConnectFeedStatement(AqlMetadataProvider 
metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
@@ -2142,7 +2128,7 @@
                         feedId.getDataverse(), feedId.getFeedName());
                 Pair<JobSpecification, IFeedAdapterFactory> pair = 
FeedOperations.buildFeedIntakeJobSpec(primaryFeed,
                         metadataProvider, policyAccessor);
-                runJob(hcc, pair.first, false);
+                JobUtils.runJob(hcc, pair.first, false);
                 IFeedAdapterFactory adapterFactory = pair.second;
                 if (adapterFactory.isRecordTrackingEnabled()) {
                     
FeedLifecycleListener.INSTANCE.registerFeedIntakeProgressTracker(feedConnId,
@@ -2180,11 +2166,11 @@
             }
         }
     }
-    
+
     /**
      * Generates a subscription request corresponding to a connect feed 
request. In addition, provides a boolean
      * flag indicating if feed intake job needs to be started (source primary 
feed not found to be active).
-     * 
+     *
      * @param dataverse
      * @param feed
      * @param dataset
@@ -2209,7 +2195,7 @@
             sourceFeedJoint = 
FeedLifecycleListener.INSTANCE.getAvailableFeedJoint(feedJointKey);
             if (sourceFeedJoint == null) { // the feed is currently not being 
ingested, i.e., it is unavailable.
                 connectionLocation = 
ConnectionLocation.SOURCE_FEED_INTAKE_STAGE;
-                FeedId sourceFeedId = feedJointKey.getFeedId(); // the 
root/primary feedId 
+                FeedId sourceFeedId = feedJointKey.getFeedId(); // the 
root/primary feedId
                 Feed primaryFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, 
dataverse, sourceFeedId.getFeedName());
                 FeedJointKey intakeFeedJointKey = new 
FeedJointKey(sourceFeedId, new ArrayList<String>());
                 sourceFeedJoint = new FeedJoint(intakeFeedJointKey, 
primaryFeed.getFeedId(), connectionLocation,
@@ -2228,7 +2214,7 @@
                 }
             }
             // register the compute feed point that represents the final 
output from the collection of
-            // functions that will be applied. 
+            // functions that will be applied.
             if (!functionsToApply.isEmpty()) {
                 FeedJointKey computeFeedJointKey = new 
FeedJointKey(feed.getFeedId(), functionsToApply);
                 IFeedJoint computeFeedJoint = new 
FeedJoint(computeFeedJointKey, feed.getFeedId(),
@@ -2252,9 +2238,9 @@
         sourceFeedJoint.addConnectionRequest(request);
         return new Pair<FeedConnectionRequest, Boolean>(request, 
needIntakeJob);
     }
-    
+
     /*
-     * Gets the feed joint corresponding to the feed definition. Tuples 
constituting the feed are 
+     * Gets the feed joint corresponding to the feed definition. Tuples 
constituting the feed are
      * available at this feed joint.
      */
     private FeedJointKey getFeedJointKey(Feed feed, MetadataTransactionContext 
ctx) throws MetadataException {
@@ -2275,7 +2261,7 @@
 
         return new FeedJointKey(sourceFeed.getFeedId(), appliedFunctions);
     }
-    
+
     private void handleDisconnectFeedStatement(AqlMetadataProvider 
metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
         DisconnectFeedStatement cfs = (DisconnectFeedStatement) stmt;
@@ -2311,7 +2297,7 @@
             JobSpecification jobSpec = specDisconnectType.first;
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
-            runJob(hcc, jobSpec, true);
+            JobUtils.runJob(hcc, jobSpec, true);
 
             if (!specDisconnectType.second) {
                 
CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(connectionId);
@@ -2328,7 +2314,7 @@
                     dataverseName + "." + cfs.getFeedName());
         }
     }
-    
+
     private void handleSubscribeFeedStatement(AqlMetadataProvider 
metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
 
@@ -2360,14 +2346,13 @@
 
         try {
 
-          
             JobSpecification alteredJobSpec = 
FeedUtil.alterJobSpecificationForFeed(compiled, feedConnectionId, bfs
                     .getSubscriptionRequest().getPolicyParameters());
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
 
             if (compiled != null) {
-                runJob(hcc, alteredJobSpec, false);
+                JobUtils.runJob(hcc, alteredJobSpec, false);
             }
 
         } catch (Exception e) {
@@ -2452,7 +2437,7 @@
 
             //#. run the jobs
             for (JobSpecification jobSpec : jobsToExecute) {
-                runJob(hcc, jobSpec, true);
+                JobUtils.runJob(hcc, jobSpec, true);
             }
         } catch (Exception e) {
             if (bActiveTxn) {
@@ -2480,7 +2465,7 @@
 
             if (sessionConfig.isExecuteQuery() && compiled != null) {
                 
GlobalConfig.ASTERIX_LOGGER.info(compiled.toJSON().toString(1));
-                JobId jobId = runJob(hcc, compiled, false);
+                JobId jobId = JobUtils.runJob(hcc, compiled, false);
 
                 JSONObject response = new JSONObject();
                 switch (resultDelivery) {
@@ -2658,14 +2643,14 @@
             transactionState = ExternalDatasetTransactionState.BEGIN;
 
             //run the files update job
-            runJob(hcc, spec, true);
+            JobUtils.runJob(hcc, spec, true);
 
             for (Index index : indexes) {
                 if (!ExternalIndexingOperations.isFileIndex(index)) {
                     spec = ExternalIndexingOperations.buildIndexUpdateOp(ds, 
index, metadataFiles, deletedFiles,
                             addedFiles, appendedFiles, metadataProvider);
                     //run the files update job
-                    runJob(hcc, spec, true);
+                    JobUtils.runJob(hcc, spec, true);
                 }
             }
 
@@ -2684,7 +2669,7 @@
             bActiveTxn = false;
             transactionState = ExternalDatasetTransactionState.READY_TO_COMMIT;
             // We don't release the latch since this job is expected to be 
quick
-            runJob(hcc, spec, true);
+            JobUtils.runJob(hcc, spec, true);
             // Start a new metadata transaction to record the final state of 
the transaction
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
             metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -2754,7 +2739,7 @@
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                 bActiveTxn = false;
                 try {
-                    runJob(hcc, spec, true);
+                    JobUtils.runJob(hcc, spec, true);
                 } catch (Exception e2) {
                     // This should never happen -- fix throw illegal
                     e.addSuppressed(e2);
@@ -2806,175 +2791,41 @@
     }
 
     private void handlePregelixStatement(AqlMetadataProvider metadataProvider, 
Statement stmt,
-            IHyracksClientConnection hcc) throws AsterixException, Exception {
-
+            IHyracksClientConnection hcc) throws Exception {
         RunStatement pregelixStmt = (RunStatement) stmt;
         boolean bActiveTxn = true;
-
         String dataverseNameFrom = 
getActiveDataverse(pregelixStmt.getDataverseNameFrom());
         String dataverseNameTo = 
getActiveDataverse(pregelixStmt.getDataverseNameTo());
         String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue();
         String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue();
 
-        if (dataverseNameFrom != dataverseNameTo) {
-            throw new AlgebricksException("Pregelix statements across 
different dataverses are not supported.");
-        }
-
         MetadataTransactionContext mdTxnCtx = 
MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.pregelixBegin(dataverseNameFrom, 
datasetNameFrom, datasetNameTo);
-
+        List<String> readDataverses = new ArrayList<String>();
+        readDataverses.add(dataverseNameFrom);
+        List<String> readDatasets = new ArrayList<String>();
+        readDatasets.add(datasetNameFrom);
+        MetadataLockManager.INSTANCE.insertDeleteBegin(dataverseNameTo, 
datasetNameTo, readDataverses, readDatasets);
         try {
+            prepareRunExternalRuntime(metadataProvider, hcc, pregelixStmt, 
dataverseNameFrom, dataverseNameTo,
+                    datasetNameFrom, datasetNameTo, mdTxnCtx);
 
-            // construct input paths
-            Index fromIndex = null;
-            List<Index> indexes = 
MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameFrom, 
pregelixStmt
-                    .getDatasetNameFrom().getValue());
-            for (Index ind : indexes) {
-                if (ind.isPrimaryIndex())
-                    fromIndex = ind;
-            }
-
-            if (fromIndex == null) {
-                throw new AlgebricksException("Tried to access non-existing 
dataset: " + datasetNameFrom);
-            }
-
-            Dataset datasetFrom = 
MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameFrom, 
datasetNameFrom);
-            IFileSplitProvider fromSplits = 
metadataProvider.splitProviderAndPartitionConstraintsForDataset(
-                    dataverseNameFrom, datasetNameFrom, 
fromIndex.getIndexName(), datasetFrom.getDatasetDetails()
-                            .isTemp()).first;
-            StringBuilder fromSplitsPaths = new StringBuilder();
-
-            for (FileSplit f : fromSplits.getFileSplits()) {
-                fromSplitsPaths.append("asterix://" + f.getNodeName() + 
f.getLocalFile().getFile().getAbsolutePath());
-                fromSplitsPaths.append(",");
-            }
-            fromSplitsPaths.setLength(fromSplitsPaths.length() - 1);
-
-            // Construct output paths
-            Index toIndex = null;
-            indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, 
dataverseNameTo, pregelixStmt
-                    .getDatasetNameTo().getValue());
-            for (Index ind : indexes) {
-                if (ind.isPrimaryIndex())
-                    toIndex = ind;
-            }
-
-            if (toIndex == null) {
-                throw new AlgebricksException("Tried to access non-existing 
dataset: " + datasetNameTo);
-            }
-
-            Dataset datasetTo = MetadataManager.INSTANCE.getDataset(mdTxnCtx, 
dataverseNameTo, datasetNameTo);
-            IFileSplitProvider toSplits = 
metadataProvider.splitProviderAndPartitionConstraintsForDataset(
-                    dataverseNameTo, datasetNameTo, toIndex.getIndexName(), 
datasetTo.getDatasetDetails().isTemp()).first;
-            StringBuilder toSplitsPaths = new StringBuilder();
-
-            for (FileSplit f : toSplits.getFileSplits()) {
-                toSplitsPaths.append("asterix://" + f.getNodeName() + 
f.getLocalFile().getFile().getAbsolutePath());
-                toSplitsPaths.append(",");
-            }
-            toSplitsPaths.setLength(toSplitsPaths.length() - 1);
-
-            try {
-                Dataset toDataset = 
MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameTo, datasetNameTo);
-                DropStatement dropStmt = new DropStatement(new 
Identifier(dataverseNameTo),
-                        pregelixStmt.getDatasetNameTo(), true);
-                this.handleDatasetDropStatement(metadataProvider, dropStmt, 
hcc);
-
-                IDatasetDetailsDecl idd = new InternalDetailsDecl(new 
Identifier(toDataset.getDatasetDetails()
-                        .getNodeGroupName()), toIndex.getKeyFieldNames(), 
false, toDataset.getDatasetDetails()
-                        .getCompactionPolicy(), 
toDataset.getDatasetDetails().getCompactionPolicyProperties(), null,
-                        toDataset.getDatasetDetails().isTemp());
-                DatasetDecl createToDataset = new DatasetDecl(new 
Identifier(dataverseNameTo),
-                        pregelixStmt.getDatasetNameTo(), new 
Identifier(toDataset.getItemTypeName()),
-                        toDataset.getHints(), toDataset.getDatasetType(), idd, 
false);
-                this.handleCreateDatasetStatement(metadataProvider, 
createToDataset, hcc);
-            } catch (Exception e) {
-                e.printStackTrace();
-                throw new AlgebricksException("Error cleaning the result 
dataset. This should not happen.");
-            }
-
-            // Flush source dataset
-            flushDataset(hcc, metadataProvider, mdTxnCtx, dataverseNameFrom, 
datasetNameFrom, fromIndex.getIndexName());
-
-            // call Pregelix
+            // Finds the PREGELIX_HOME.
             String pregelix_home = System.getenv("PREGELIX_HOME");
             if (pregelix_home == null) {
                 throw new AlgebricksException("PREGELIX_HOME is not defined!");
             }
 
-            // construct command
-            ArrayList<String> cmd = new ArrayList<String>();
-            cmd.add("bin/pregelix");
-            cmd.add(pregelixStmt.getParameters().get(0)); // jar
-            cmd.add(pregelixStmt.getParameters().get(1)); // class
-            for (String s : pregelixStmt.getParameters().get(2).split(" ")) {
-                cmd.add(s);
-            }
-            cmd.add("-inputpaths");
-            cmd.add(fromSplitsPaths.toString());
-            cmd.add("-outputpath");
-            cmd.add(toSplitsPaths.toString());
-
-            StringBuilder command = new StringBuilder();
-            for (String s : cmd) {
-                command.append(s);
-                command.append(" ");
-            }
-            LOGGER.info("Running Pregelix Command: " + command.toString());
-
+            // Constructs the pregelix command line.
+            List<String> cmd = constructPregelixCommand(pregelixStmt, 
dataverseNameFrom, datasetNameTo,
+                    dataverseNameFrom, dataverseNameTo);
             ProcessBuilder pb = new ProcessBuilder(cmd);
             pb.directory(new File(pregelix_home));
             pb.redirectErrorStream(true);
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
-
-            Process pr = pb.start();
-
-            int resultState = 0;
-
-            BufferedReader in = new BufferedReader(new 
InputStreamReader(pr.getInputStream()));
-            String line;
-            while ((line = in.readLine()) != null) {
-                System.out.println(line);
-                if (line.contains("job finished")) {
-                    resultState = 1;
-                }
-                if (line.contains("Exception") || line.contains("Error")) {
-
-                    if (line.contains("Connection refused")) {
-                        throw new AlgebricksException(
-                                "The connection to your Pregelix cluster was 
refused. Is it running? Is the port in the query correct?");
-                    }
-
-                    if (line.contains("Could not find or load main class")) {
-                        throw new AlgebricksException(
-                                "The main class of your Pregelix query was not 
found. Is the path to your .jar file correct?");
-                    }
-
-                    if (line.contains("ClassNotFoundException")) {
-                        throw new AlgebricksException(
-                                "The vertex class of your Pregelix query was 
not found. Does it exist? Is the spelling correct?");
-                    }
-
-                    if (line.contains("HyracksException")) {
-                        throw new AlgebricksException(
-                                "Something went wrong executing your Pregelix 
Job (HyracksException). Check the configuration of STORAGE_BUFFERCACHE_PAGESIZE 
and STORAGE_MEMORYCOMPONENT_PAGESIZE."
-                                        + "It must match the one of Asterix. 
You can use managix describe -admin to find out the right configuration. "
-                                        + "Check also if your datatypes in 
Pregelix and Asterix are matching.");
-                    }
-
-                    throw new AlgebricksException(
-                            "Something went wrong executing your Pregelix Job. 
Perhaps the Pregelix cluster needs to be restartet. "
-                                    + "Check the following things: Are the 
datatypes of Asterix and Pregelix matching? "
-                                    + "Is the server configuration correct 
(node names, buffer sizes, framesize)? Check the logfiles for more details.");
-                }
-            }
-            pr.waitFor();
-            in.close();
-
+            int resultState = executeExternalShellProgram(pb);
             if (resultState != 1) {
                 throw new AlgebricksException(
                         "Something went wrong executing your Pregelix Job. 
Perhaps the Pregelix cluster needs to be restartet. "
@@ -2987,60 +2838,155 @@
             }
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.pregelixEnd(dataverseNameFrom, 
datasetNameFrom, datasetNameTo);
+            MetadataLockManager.INSTANCE.insertDeleteEnd(dataverseNameTo, 
datasetNameTo, readDataverses, readDatasets);
         }
     }
 
-    private void flushDataset(IHyracksClientConnection hcc, 
AqlMetadataProvider metadataProvider,
-            MetadataTransactionContext mdTxnCtx, String dataverseName, String 
datasetName, String indexName)
-            throws Exception {
-        AsterixCompilerProperties compilerProperties = 
AsterixAppContextInfo.getInstance().getCompilerProperties();
-        int frameSize = compilerProperties.getFrameSize();
-        JobSpecification spec = new JobSpecification(frameSize);
+    // Prepares to run a program on external runtime.
+    private void prepareRunExternalRuntime(AqlMetadataProvider 
metadataProvider, IHyracksClientConnection hcc,
+            RunStatement pregelixStmt, String dataverseNameFrom, String 
dataverseNameTo, String datasetNameFrom,
+            String datasetNameTo, MetadataTransactionContext mdTxnCtx) throws 
AlgebricksException, AsterixException,
+            Exception {
+        // Validates the source/sink dataverses and datasets.
+        Dataset fromDataset = metadataProvider.findDataset(dataverseNameFrom, 
datasetNameFrom);
+        if (fromDataset == null) {
+            throw new AsterixException("The source dataset " + fromDataset + " 
could not be found for the Run command");
+        }
+        Dataset toDataset = metadataProvider.findDataset(dataverseNameTo, 
datasetNameTo);
+        if (toDataset == null) {
+            throw new AsterixException("The source dataset " + toDataset + " 
could not be found for the Run command");
+        }
 
-        RecordDescriptor[] rDescs = new RecordDescriptor[] { new 
RecordDescriptor(new ISerializerDeserializer[] {}) };
-        AlgebricksMetaOperatorDescriptor emptySource = new 
AlgebricksMetaOperatorDescriptor(spec, 0, 1,
-                new IPushRuntimeFactory[] { new 
EmptyTupleSourceRuntimeFactory() }, rDescs);
+        try {
+            // Cleans up the sink dataset -- Drop and then Create.
+            DropStatement dropStmt = new DropStatement(new 
Identifier(dataverseNameTo),
+                    pregelixStmt.getDatasetNameTo(), true);
+            this.handleDatasetDropStatement(metadataProvider, dropStmt, hcc);
+            IDatasetDetailsDecl idd = new InternalDetailsDecl(new 
Identifier(toDataset.getDatasetDetails()
+                    .getNodeGroupName()),
+                    ((InternalDatasetDetails) 
toDataset.getDatasetDetails()).getPartitioningKey(), false, toDataset
+                            .getDatasetDetails().getCompactionPolicy(), 
toDataset.getDatasetDetails()
+                            .getCompactionPolicyProperties(), null, 
toDataset.getDatasetDetails().isTemp());
+            DatasetDecl createToDataset = new DatasetDecl(new 
Identifier(dataverseNameTo),
+                    pregelixStmt.getDatasetNameTo(), new 
Identifier(toDataset.getItemTypeName()), toDataset.getHints(),
+                    toDataset.getDatasetType(), idd, false);
+            this.handleCreateDatasetStatement(metadataProvider, 
createToDataset, hcc);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException("Error cleaning the result dataset. 
This should not happen.");
+        }
 
-        edu.uci.ics.asterix.common.transactions.JobId jobId = 
JobIdFactory.generateJobId();
-        Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, 
dataverseName, datasetName);
-        FlushDatasetOperatorDescriptor flushOperator = new 
FlushDatasetOperatorDescriptor(spec, jobId,
-                dataset.getDatasetId());
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), emptySource, 0, 
flushOperator, 0);
-
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> 
primarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(dataverseName, 
datasetName, indexName, dataset
-                        .getDatasetDetails().isTemp());
-        AlgebricksPartitionConstraint primaryPartitionConstraint = 
primarySplitsAndConstraint.second;
-
-        
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, 
emptySource,
-                primaryPartitionConstraint);
-
-        JobEventListenerFactory jobEventListenerFactory = new 
JobEventListenerFactory(jobId, true);
-        spec.setJobletEventListenerFactory(jobEventListenerFactory);
-        runJob(hcc, spec, true);
+        // Flushes source dataset.
+        FlushDatasetUtils.flushDataset(hcc, metadataProvider, mdTxnCtx, 
dataverseNameFrom, datasetNameFrom,
+                datasetNameFrom);
     }
 
-    private JobId runJob(IHyracksClientConnection hcc, JobSpecification spec, 
boolean waitForCompletion)
-            throws Exception {
-        JobId[] jobIds = executeJobArray(hcc, new Job[] { new Job(spec) }, 
sessionConfig.out(), waitForCompletion);
-        return jobIds[0];
-    }
+    // Executes external shell commands
+    private int executeExternalShellProgram(ProcessBuilder pb) throws 
IOException, AlgebricksException,
+            InterruptedException {
+        Process pr = pb.start();
+        int resultState = 0;
+        BufferedReader in = new BufferedReader(new 
InputStreamReader(pr.getInputStream()));
+        String line;
+        while ((line = in.readLine()) != null) {
+            System.out.println(line);
+            if (line.contains("job finished")) {
+                resultState = 1;
+            }
+            if (line.contains("Exception") || line.contains("Error")) {
 
-    public JobId[] executeJobArray(IHyracksClientConnection hcc, Job[] jobs, 
PrintWriter out, boolean waitForCompletion)
-            throws Exception {
-        JobId[] startedJobIds = new JobId[jobs.length];
-        for (int i = 0; i < jobs.length; i++) {
-            JobSpecification spec = jobs[i].getJobSpec();
-            spec.setMaxReattempts(0);
-            JobId jobId = hcc.startJob(spec);
-            startedJobIds[i] = jobId;
-            if (waitForCompletion) {
-                hcc.waitForCompletion(jobId);
+                if (line.contains("Connection refused")) {
+                    throw new AlgebricksException(
+                            "The connection to your Pregelix cluster was 
refused. Is it running? Is the port in the query correct?");
+                }
+
+                if (line.contains("Could not find or load main class")) {
+                    throw new AlgebricksException(
+                            "The main class of your Pregelix query was not 
found. Is the path to your .jar file correct?");
+                }
+
+                if (line.contains("ClassNotFoundException")) {
+                    throw new AlgebricksException(
+                            "The vertex class of your Pregelix query was not 
found. Does it exist? Is the spelling correct?");
+                }
+
+                if (line.contains("HyracksException")) {
+                    throw new AlgebricksException(
+                            "Something went wrong executing your Pregelix Job 
(HyracksException). Check the configuration of STORAGE_BUFFERCACHE_PAGESIZE and 
STORAGE_MEMORYCOMPONENT_PAGESIZE."
+                                    + "It must match the one of Asterix. You 
can use managix describe -admin to find out the right configuration. "
+                                    + "Check also if your datatypes in 
Pregelix and Asterix are matching.");
+                }
+
+                throw new AlgebricksException(
+                        "Something went wrong executing your Pregelix Job. 
Perhaps the Pregelix cluster needs to be restartet. "
+                                + "Check the following things: Are the 
datatypes of Asterix and Pregelix matching? "
+                                + "Is the server configuration correct (node 
names, buffer sizes, framesize)? Check the logfiles for more details.");
             }
         }
-        return startedJobIds;
+        pr.waitFor();
+        in.close();
+        return resultState;
+    }
+
+    // Constructs a Pregelix command line.
+    private List<String> constructPregelixCommand(RunStatement pregelixStmt, 
String fromDataverseName,
+            String fromDatasetName, String toDataverseName, String 
toDatasetName) {
+        // construct command
+        List<String> cmd = new ArrayList<String>();
+        cmd.add("bin/pregelix");
+        cmd.add(pregelixStmt.getParameters().get(0)); // jar
+        cmd.add(pregelixStmt.getParameters().get(1)); // class
+
+        // User parameters.
+        for (String s : pregelixStmt.getParameters().get(2).split(" ")) {
+            cmd.add(s);
+        }
+
+        // Constructs AsterixDB parameters, e.g., URL, source dataset and sink 
dataset.
+        AsterixExternalProperties externalProperties = 
AsterixAppContextInfo.getInstance().getExternalProperties();
+        AsterixClusterProperties clusterProperties = 
AsterixClusterProperties.INSTANCE;
+        String clientIP = 
clusterProperties.getCluster().getMasterNode().getClientIp();
+        StringBuilder asterixdbParameterBuilder = new StringBuilder();
+        asterixdbParameterBuilder.append("pregelix.asterixdb.url=" + "http://"; 
+ clientIP + ":"
+                + externalProperties.getWebInterfacePort() + ",");
+        asterixdbParameterBuilder.append("pregelix.asterixdb.source=true,");
+        asterixdbParameterBuilder.append("pregelix.asterixdb.sink=true,");
+        asterixdbParameterBuilder.append("pregelix.asterixdb.input.dataverse=" 
+ fromDataverseName + ",");
+        asterixdbParameterBuilder.append("pregelix.asterixdb.input.dataset=" + 
fromDatasetName + ",");
+        
asterixdbParameterBuilder.append("pregelix.asterixdb.output.dataverse=" + 
toDataverseName + ",");
+        asterixdbParameterBuilder.append("pregelix.asterixdb.output.dataset=" 
+ toDatasetName + ",");
+
+        // Constructs the final execution command.
+        StringBuilder command = new StringBuilder();
+        boolean custPropAdded = false;
+        boolean meetCustProp = false;
+        for (String s : cmd) {
+            if (meetCustProp) {
+                // Appends data source/sink paramters.
+                command.append(asterixdbParameterBuilder.toString());
+                meetCustProp = false;
+                custPropAdded = true;
+            }
+            command.append(s);
+            command.append(" ");
+            if (s.equals("-cust-prop")) {
+                meetCustProp = true;
+            }
+        }
+        if (!custPropAdded) {
+            command.append("-cust-prop ");
+            // Appends data source/sink paramters.
+            command.append(asterixdbParameterBuilder.toString());
+            // Appends default converter classes.
+            
command.append("pregelix.asterixdb.input.converterclass=edu.uci.ics.pregelix.example.converter.VLongIdInputVertexConverter,");
+            
command.append("pregelix.asterixdb.output.converterclass=edu.uci.ics.pregelix.example.converter.VLongIdOutputVertexConverter");
+        }
+
+        String commandStr = command.toString().trim();
+        LOGGER.info("Running Pregelix Command: " + commandStr);
+        List<String> resultCommand = new ArrayList<String>();
+        resultCommand.add(commandStr);
+        return resultCommand;
     }
 
     private String getActiveDataverseName(String dataverse) throws 
AlgebricksException {
diff --git 
a/asterix-app/src/main/java/edu/uci/ics/asterix/util/FlushDatasetUtils.java 
b/asterix-app/src/main/java/edu/uci/ics/asterix/util/FlushDatasetUtils.java
new file mode 100644
index 0000000..3dc9dbf
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/util/FlushDatasetUtils.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.util;
+
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
+import 
edu.uci.ics.asterix.runtime.operators.std.FlushDatasetOperatorDescriptor;
+import 
edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
+import 
edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import 
edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import 
edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import 
edu.uci.ics.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+
+public class FlushDatasetUtils {
+
+    public static void flushDataset(IHyracksClientConnection hcc, 
AqlMetadataProvider metadataProvider,
+            MetadataTransactionContext mdTxnCtx, String dataverseName, String 
datasetName, String indexName)
+            throws Exception {
+        AsterixCompilerProperties compilerProperties = 
AsterixAppContextInfo.getInstance().getCompilerProperties();
+        int frameSize = compilerProperties.getFrameSize();
+        JobSpecification spec = new JobSpecification(frameSize);
+
+        RecordDescriptor[] rDescs = new RecordDescriptor[] { new 
RecordDescriptor(new ISerializerDeserializer[] {}) };
+        AlgebricksMetaOperatorDescriptor emptySource = new 
AlgebricksMetaOperatorDescriptor(spec, 0, 1,
+                new IPushRuntimeFactory[] { new 
EmptyTupleSourceRuntimeFactory() }, rDescs);
+
+        edu.uci.ics.asterix.common.transactions.JobId jobId = 
JobIdFactory.generateJobId();
+        Dataset dataset = metadataProvider.findDataset(dataverseName, 
datasetName);
+        FlushDatasetOperatorDescriptor flushOperator = new 
FlushDatasetOperatorDescriptor(spec, jobId,
+                dataset.getDatasetId());
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), emptySource, 0, 
flushOperator, 0);
+
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> 
primarySplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(dataverseName, 
datasetName, indexName, dataset
+                        .getDatasetDetails().isTemp());
+        AlgebricksPartitionConstraint primaryPartitionConstraint = 
primarySplitsAndConstraint.second;
+
+        
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, 
emptySource,
+                primaryPartitionConstraint);
+
+        JobEventListenerFactory jobEventListenerFactory = new 
JobEventListenerFactory(jobId, true);
+        spec.setJobletEventListenerFactory(jobEventListenerFactory);
+        JobUtils.runJob(hcc, spec, true);
+    }
+
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/util/JobUtils.java 
b/asterix-app/src/main/java/edu/uci/ics/asterix/util/JobUtils.java
new file mode 100644
index 0000000..72977e1
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/util/JobUtils.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.util;
+
+import edu.uci.ics.asterix.api.common.Job;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobId;
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class JobUtils {
+
+    public static JobId runJob(IHyracksClientConnection hcc, JobSpecification 
spec, boolean waitForCompletion)
+            throws Exception {
+        JobId[] jobIds = executeJobArray(hcc, new Job[] { new Job(spec) }, 
waitForCompletion);
+        return jobIds[0];
+    }
+
+    public static JobId[] executeJobArray(IHyracksClientConnection hcc, Job[] 
jobs, boolean waitForCompletion)
+            throws Exception {
+        JobId[] startedJobIds = new JobId[jobs.length];
+        for (int i = 0; i < jobs.length; i++) {
+            JobSpecification spec = jobs[i].getJobSpec();
+            spec.setMaxReattempts(0);
+            JobId jobId = hcc.startJob(spec);
+            startedJobIds[i] = jobId;
+            if (waitForCompletion) {
+                hcc.waitForCompletion(jobId);
+            }
+        }
+        return startedJobIds;
+    }
+
+}
diff --git 
a/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
 
b/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
index 7f5f480..8497f0b 100644
--- 
a/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
+++ 
b/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
@@ -151,9 +151,11 @@
         JSONObject element1 = new JSONObject();
         element1.put("ip", "127.0.0.1");
         element1.put("path", 
splits[0].getLocalFile().getFile().getAbsolutePath());
+        element1.put("ioDeviceId", 0);
         JSONObject element2 = new JSONObject();
         element2.put("ip", "127.0.0.2");
         element2.put("path", 
splits[1].getLocalFile().getFile().getAbsolutePath());
+        element2.put("ioDeviceId", 0);
         splitsArray.put(element1);
         splitsArray.put(element2);
         expectedResponse.put("splits", splitsArray);
diff --git 
a/asterix-app/src/test/java/edu/uci/ics/asterix/aql/translator/AqlTranslatorTest.java
 
b/asterix-app/src/test/java/edu/uci/ics/asterix/aql/translator/AqlTranslatorTest.java
new file mode 100644
index 0000000..806525a
--- /dev/null
+++ 
b/asterix-app/src/test/java/edu/uci/ics/asterix/aql/translator/AqlTranslatorTest.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.aql.translator;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import junit.extensions.PA;
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.RunStatement;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.MasterNode;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+
+@SuppressWarnings({ "unchecked", "deprecation" })
+public class AqlTranslatorTest {
+
+    @Test
+    public void test() throws Exception {
+        List<Statement> statements = new ArrayList<Statement>();
+        SessionConfig mockSessionConfig = mock(SessionConfig.class);
+        RunStatement mockRunStatement = mock(RunStatement.class);
+
+        // Mocks AsterixAppContextInfo.
+        AsterixAppContextInfo mockAsterixAppContextInfo = 
mock(AsterixAppContextInfo.class);
+        
setFinalStaticField(AsterixAppContextInfo.class.getDeclaredField("INSTANCE"), 
mockAsterixAppContextInfo);
+        AsterixExternalProperties mockAsterixExternalProperties = 
mock(AsterixExternalProperties.class);
+        
when(mockAsterixAppContextInfo.getExternalProperties()).thenReturn(mockAsterixExternalProperties);
+        
when(mockAsterixExternalProperties.getWebInterfacePort()).thenReturn(19001);
+
+        // Mocks AsterixClusterProperties.
+        Cluster mockCluster = mock(Cluster.class);
+        MasterNode mockMasterNode = mock(MasterNode.class);
+        AsterixClusterProperties mockClusterProperties = 
mock(AsterixClusterProperties.class);
+        
setFinalStaticField(AsterixClusterProperties.class.getDeclaredField("INSTANCE"),
 mockClusterProperties);
+        when(mockClusterProperties.getCluster()).thenReturn(mockCluster);
+        when(mockCluster.getMasterNode()).thenReturn(mockMasterNode);
+        when(mockMasterNode.getClientIp()).thenReturn("127.0.0.1");
+
+        AqlTranslator aqlTranslator = new AqlTranslator(statements, 
mockSessionConfig);
+        List<String> parameters = new ArrayList<String>();
+        
parameters.add("examples/pregelix-example-0.2.13-jar-with-dependencies.jar");
+        parameters.add("edu.uci.ics.pregelix.example.PageRankVertex");
+        parameters.add("-ip 10.0.2.15 -port 3199");
+        when(mockRunStatement.getParameters()).thenReturn(parameters);
+        // Test a customer command without "-cust-prop".
+        List<String> cmds = (List<String>) PA.invokeMethod(aqlTranslator,
+                
"constructPregelixCommand(edu.uci.ics.asterix.aql.expression.RunStatement,"
+                        + "String,String,String,String)", mockRunStatement, 
"fromDataverse", "fromDataset",
+                "toDataverse", "toDataset");
+        List<String> expectedCmds = Arrays
+                .asList(new String[] { "bin/pregelix 
examples/pregelix-example-0.2.13-jar-with-dependencies.jar "
+                        + "edu.uci.ics.pregelix.example.PageRankVertex -ip 
10.0.2.15 -port 3199 -cust-prop "
+                        + 
"pregelix.asterixdb.url=http://127.0.0.1:19001,pregelix.asterixdb.source=true,";
+                        + 
"pregelix.asterixdb.sink=true,pregelix.asterixdb.input.dataverse=fromDataverse,"
+                        + 
"pregelix.asterixdb.input.dataset=fromDataset,pregelix.asterixdb.output.dataverse=toDataverse,"
+                        + 
"pregelix.asterixdb.output.dataset=toDataset,pregelix.asterixdb.input.converterclass=edu.uci.ics.pregelix.example.converter.VLongIdInputVertexConverter,"
+                        + 
"pregelix.asterixdb.output.converterclass=edu.uci.ics.pregelix.example.converter.VLongIdOutputVertexConverter"
 });
+        Assert.assertEquals(cmds, expectedCmds);
+
+        parameters.remove(parameters.size() - 1);
+        parameters
+                .add("-ip 10.0.2.15 -port 3199 -cust-prop "
+                        + 
"pregelix.asterixdb.input.converterclass=edu.uci.ics.pregelix.example.converter.TestInputVertexConverter,"
+                        + 
"pregelix.asterixdb.output.converterclass=edu.uci.ics.pregelix.example.converter.TestOutputVertexConverter");
+        // Test a customer command with "-cust-prop".
+        cmds = (List<String>) PA.invokeMethod(aqlTranslator,
+                
"constructPregelixCommand(edu.uci.ics.asterix.aql.expression.RunStatement,"
+                        + "String,String,String,String)", mockRunStatement, 
"fromDataverse", "fromDataset",
+                "toDataverse", "toDataset");
+        expectedCmds = Arrays
+                .asList(new String[] { "bin/pregelix 
examples/pregelix-example-0.2.13-jar-with-dependencies.jar "
+                        + "edu.uci.ics.pregelix.example.PageRankVertex -ip 
10.0.2.15 -port 3199 -cust-prop "
+                        + 
"pregelix.asterixdb.url=http://127.0.0.1:19001,pregelix.asterixdb.source=true,";
+                        + 
"pregelix.asterixdb.sink=true,pregelix.asterixdb.input.dataverse=fromDataverse,"
+                        + 
"pregelix.asterixdb.input.dataset=fromDataset,pregelix.asterixdb.output.dataverse=toDataverse,"
+                        + 
"pregelix.asterixdb.output.dataset=toDataset,pregelix.asterixdb.input.converterclass=edu.uci.ics.pregelix.example.converter.TestInputVertexConverter,"
+                        + 
"pregelix.asterixdb.output.converterclass=edu.uci.ics.pregelix.example.converter.TestOutputVertexConverter"
 });
+        Assert.assertEquals(cmds, expectedCmds);
+    }
+
+    private void setFinalStaticField(Field field, Object newValue) throws 
Exception {
+        field.setAccessible(true);
+        // remove final modifier from field
+        Field modifiersField = Field.class.getDeclaredField("modifiers");
+        modifiersField.setAccessible(true);
+        modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
+        field.set(null, newValue);
+    }
+}
diff --git 
a/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java 
b/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
index 9222081..c6c2e56 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
@@ -26,7 +26,7 @@
 import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
 import edu.uci.ics.asterix.api.java.AsterixJavaClient;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.test.base.AsterixTestHelper;
 
 public class DmlTest {
@@ -36,12 +36,11 @@
     private static final String SEPARATOR = File.separator;
     private static final String PATH_BASE = "src" + SEPARATOR + "test" + 
SEPARATOR + "resources" + SEPARATOR + "dmlts"
             + SEPARATOR;
-    private static final String PATH_EXPECTED = PATH_BASE + "results" + 
SEPARATOR;
     private static final String PATH_SCRIPTS = PATH_BASE + "scripts" + 
SEPARATOR;
     private static final String LOAD_FOR_ENLIST_FILE = PATH_SCRIPTS + 
"load-cust.aql";
-    private static final String ENLIST_FILE = PATH_SCRIPTS + 
"enlist-scan-cust.aql";
 
     private static final PrintWriter ERR = new PrintWriter(System.err);
+    private static final TestsExecutor testExecutor = new TestsExecutor();
 
     @Test
     public void enlistTest() throws Exception {
@@ -64,18 +63,10 @@
             loadReader.close();
         }
         asterixLoad.execute();
-        File enlistFile = new File(ENLIST_FILE);
-        int dot = enlistFile.getName().lastIndexOf('.');
-        String resultFileName = enlistFile.getName().substring(0, dot + 1) + 
".adm";
-        File expectedFile = new File(PATH_EXPECTED + SEPARATOR + 
resultFileName);
-        File actualFile = new File(PATH_ACTUAL + SEPARATOR + resultFileName);
-        // Khurram
-        
//TestsUtils.runScriptAndCompareWithResult(AsterixHyracksIntegrationUtil.getHyracksClientConnection(),
-                //enlistFile, ERR, expectedFile, actualFile);
 
         AsterixHyracksIntegrationUtil.deinit();
         for (String d : ASTERIX_DATA_DIRS) {
-            TestsUtils.deleteRec(new File(d));
+            testExecutor.deleteRec(new File(d));
         }
         outdir.delete();
     }
diff --git 
a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java 
b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index 069c5aa..8235655 100644
--- 
a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ 
b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -31,7 +31,7 @@
 import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
 import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 /**
@@ -46,6 +46,7 @@
     private static final String PATH_BASE = StringUtils.join(new String[] { 
"src", "test", "resources",
             "metadata" + File.separator }, File.separator);
     private static final String TEST_CONFIG_FILE_NAME = 
"asterix-build-configuration.xml";
+    private static final TestsExecutor testsExecutor = new TestsExecutor();
 
     private static AsterixTransactionProperties txnProperties;
 
@@ -74,7 +75,7 @@
 
         // clean up the files written by the ASTERIX storage manager
         for (String d : AsterixHyracksIntegrationUtil.getDataDirs()) {
-            TestsUtils.deleteRec(new File(d));
+            testsExecutor.deleteRec(new File(d));
         }
     }
 
@@ -103,7 +104,7 @@
 
     @Test
     public void test() throws Exception {
-        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
+        testsExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false);
     }
 
 }
diff --git 
a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java 
b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index 8547eb0..eabd5db 100644
--- 
a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ 
b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -35,7 +35,7 @@
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
 import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 /**
@@ -51,6 +51,7 @@
             new String[] { "src", "test", "resources", "runtimets" }, 
File.separator);
 
     protected static final String TEST_CONFIG_FILE_NAME = 
"asterix-build-configuration.xml";
+    protected static final TestsExecutor testsExecutor = new TestsExecutor();
 
     protected static AsterixTransactionProperties txnProperties;
 
@@ -96,7 +97,7 @@
         }
         // clean up the files written by the ASTERIX storage manager
         for (String d : AsterixHyracksIntegrationUtil.getDataDirs()) {
-            TestsUtils.deleteRec(new File(d));
+            testsExecutor.deleteRec(new File(d));
         }
         HDFSCluster.getInstance().cleanup();
     }
@@ -137,6 +138,6 @@
 
     @Test
     public void test() throws Exception {
-            TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
+        testsExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false);
     }
 }
diff --git 
a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java 
b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java
index faeb688..26d4964 100644
--- 
a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java
+++ 
b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java
@@ -28,13 +28,12 @@
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.Statement;
 
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.test.runtime.RepeatRule.Repeat;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 /**
  * Runs runtime test cases that have been identified in the 
repeatedtestsuite.xml.
- * 
  * Each test is run 10000 times.
  */
 class RepeatRule implements MethodRule {
@@ -80,8 +79,9 @@
 @RunWith(Parameterized.class)
 public class RepeatedTest extends ExecutionTest {
 
+    private final TestsExecutor testsExecutor = new TestsExecutor();
     private int count;
-    
+
     @Parameters
     public static Collection<Object[]> tests() throws Exception {
         Collection<Object[]> testArgs = 
buildTestsInXml(TestCaseContext.DEFAULT_REPEADED_TESTSUITE_XML_NAME);
@@ -96,10 +96,11 @@
     @Rule
     public RepeatRule repeatRule = new RepeatRule();
 
+    @Override
     @Test
     @Repeat(times = 10000)
     public void test() throws Exception {
         System.err.println("***** Test Count: " + (++count) + " ******");
-        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
+        testsExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false);
     }
 }
diff --git 
a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java 
b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsExecutor.java
similarity index 90%
rename from 
asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
rename to 
asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsExecutor.java
index 1da2e83..a5e9419 100644
--- a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
+++ 
b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsExecutor.java
@@ -49,15 +49,15 @@
 import edu.uci.ics.asterix.testframework.context.TestFileContext;
 import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
 
-public class TestsUtils {
+public class TestsExecutor {
 
-    private static final Logger LOGGER = 
Logger.getLogger(TestsUtils.class.getName());
-    private static Method managixExecuteMethod = null;
+    private static final Logger LOGGER = 
Logger.getLogger(TestsExecutor.class.getName());
+    private Method managixExecuteMethod = null;
 
     /**
      * Probably does not work well with symlinks.
      */
-    public static boolean deleteRec(File path) {
+    public boolean deleteRec(File path) {
         if (path.isDirectory()) {
             for (File f : path.listFiles()) {
                 if (!deleteRec(f)) {
@@ -68,8 +68,8 @@
         return path.delete();
     }
 
-    private static void runScriptAndCompareWithResult(File scriptFile, 
PrintWriter print, File expectedFile,
-            File actualFile) throws Exception {
+    private void runScriptAndCompareWithResult(File scriptFile, PrintWriter 
print, File expectedFile, File actualFile)
+            throws Exception {
         System.err.println("Expected results file: " + 
expectedFile.toString());
         BufferedReader readerExpected = new BufferedReader(new 
InputStreamReader(new FileInputStream(expectedFile),
                 "UTF-8"));
@@ -109,7 +109,7 @@
 
     }
 
-    private static boolean equalStrings(String s1, String s2) {
+    private boolean equalStrings(String s1, String s2) {
         String[] rowsOne = s1.split("\n");
         String[] rowsTwo = s2.split("\n");
 
@@ -175,7 +175,7 @@
     }
 
     // For tests where you simply want the byte-for-byte output.
-    private static void writeOutputToFile(File actualFile, InputStream 
resultStream) throws Exception {
+    private void writeOutputToFile(File actualFile, InputStream resultStream) 
throws Exception {
         byte[] buffer = new byte[10240];
         int len;
         java.io.FileOutputStream out = new 
java.io.FileOutputStream(actualFile);
@@ -188,7 +188,7 @@
         }
     }
 
-    private static int executeHttpMethod(HttpMethod method) throws Exception {
+    private int executeHttpMethod(HttpMethod method) throws Exception {
         HttpClient client = new HttpClient();
         int statusCode;
         try {
@@ -214,7 +214,7 @@
     }
 
     // Executes Query and returns results as JSONArray
-    public static InputStream executeQuery(String str, OutputFormat fmt) 
throws Exception {
+    public InputStream executeQuery(String str, OutputFormat fmt) throws 
Exception {
         final String url = "http://localhost:19002/query";;
 
         // Create a method instance.
@@ -230,7 +230,7 @@
 
     // To execute Update statements
     // Insert and Delete statements are executed here
-    public static void executeUpdate(String str) throws Exception {
+    public void executeUpdate(String str) throws Exception {
         final String url = "http://localhost:19002/update";;
 
         // Create a method instance.
@@ -245,7 +245,7 @@
     }
 
     //Executes AQL in either async or async-defer mode.
-    public static InputStream executeAnyAQLAsync(String str, boolean defer, 
OutputFormat fmt) throws Exception {
+    public InputStream executeAnyAQLAsync(String str, boolean defer, 
OutputFormat fmt) throws Exception {
         final String url = "http://localhost:19002/aql";;
 
         // Create a method instance.
@@ -265,12 +265,12 @@
 
         String theHandle = IOUtils.toString(resultStream, "UTF-8");
 
-        //take the handle and parse it so results can be retrieved 
+        //take the handle and parse it so results can be retrieved
         InputStream handleResult = getHandleResult(theHandle, fmt);
         return handleResult;
     }
 
-    private static InputStream getHandleResult(String handle, OutputFormat 
fmt) throws Exception {
+    private InputStream getHandleResult(String handle, OutputFormat fmt) 
throws Exception {
         final String url = "http://localhost:19002/query/result";;
 
         // Create a method instance.
@@ -291,7 +291,7 @@
     // create index statement
     // create dataverse statement
     // create function statement
-    public static void executeDDL(String str) throws Exception {
+    public void executeDDL(String str) throws Exception {
         final String url = "http://localhost:19002/ddl";;
 
         // Create a method instance.
@@ -317,11 +317,11 @@
             stringBuilder.append(line);
             stringBuilder.append(ls);
         }
-
+        reader.close();
         return stringBuilder.toString();
     }
 
-    public static void executeManagixCommand(String command) throws 
ClassNotFoundException, NoSuchMethodException,
+    public void executeManagixCommand(String command) throws 
ClassNotFoundException, NoSuchMethodException,
             SecurityException, IllegalAccessException, 
IllegalArgumentException, InvocationTargetException {
         if (managixExecuteMethod == null) {
             Class<?> clazz = 
Class.forName("edu.uci.ics.asterix.installer.test.AsterixInstallerIntegrationUtil");
@@ -370,8 +370,8 @@
         return s.toString();
     }
 
-    public static void executeTest(String actualPath, TestCaseContext 
testCaseCtx, ProcessBuilder pb,
-            boolean isDmlRecoveryTest) throws Exception {
+    public void executeTest(String actualPath, TestCaseContext testCaseCtx, 
ProcessBuilder pb, boolean isDmlRecoveryTest)
+            throws Exception {
 
         File testFile;
         File expectedResultFile;
@@ -389,12 +389,12 @@
             expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
             for (TestFileContext ctx : testFileCtxs) {
                 testFile = ctx.getFile();
-                statement = TestsUtils.readTestFile(testFile);
+                statement = TestsExecutor.readTestFile(testFile);
                 boolean failed = false;
                 try {
                     switch (ctx.getType()) {
                         case "ddl":
-                            TestsUtils.executeDDL(statement);
+                            executeDDL(statement);
                             break;
                         case "update":
                             //isDmlRecoveryTest: set IP address
@@ -403,7 +403,7 @@
                                         .replaceAll("nc1://", 
"127.0.0.1://../../../../../../asterix-app/");
                             }
 
-                            TestsUtils.executeUpdate(statement);
+                            executeUpdate(statement);
                             break;
                         case "query":
                         case "async":
@@ -431,10 +431,10 @@
 
                             File actualResultFile = 
testCaseCtx.getActualResultFile(cUnit, new File(actualPath));
                             actualResultFile.getParentFile().mkdirs();
-                            TestsUtils.writeOutputToFile(actualResultFile, 
resultStream);
+                            writeOutputToFile(actualResultFile, resultStream);
 
-                            TestsUtils.runScriptAndCompareWithResult(testFile, 
new PrintWriter(System.err),
-                                    expectedResultFile, actualResultFile);
+                            runScriptAndCompareWithResult(testFile, new 
PrintWriter(System.err), expectedResultFile,
+                                    actualResultFile);
                             LOGGER.info("[TEST]: " + 
testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName()
                                     + " PASSED ");
 
@@ -449,7 +449,7 @@
                                     + 
testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
                                     + cUnit.getName() + "_qbc.adm");
                             qbcFile.getParentFile().mkdirs();
-                            TestsUtils.writeOutputToFile(qbcFile, 
resultStream);
+                            writeOutputToFile(qbcFile, resultStream);
                             break;
                         case "txnqar": //qar represents query after recovery
                             resultStream = executeQuery(statement, 
OutputFormat.forCompilationUnit(cUnit));
@@ -457,16 +457,15 @@
                                     + 
testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
                                     + cUnit.getName() + "_qar.adm");
                             qarFile.getParentFile().mkdirs();
-                            TestsUtils.writeOutputToFile(qarFile, 
resultStream);
-                            TestsUtils.runScriptAndCompareWithResult(testFile, 
new PrintWriter(System.err), qbcFile,
-                                    qarFile);
+                            writeOutputToFile(qarFile, resultStream);
+                            runScriptAndCompareWithResult(testFile, new 
PrintWriter(System.err), qbcFile, qarFile);
 
                             LOGGER.info("[TEST]: " + 
testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName()
                                     + " PASSED ");
                             break;
                         case "txneu": //eu represents erroneous update
                             try {
-                                TestsUtils.executeUpdate(statement);
+                                executeUpdate(statement);
                             } catch (Exception e) {
                                 //An exception is expected.
                                 failed = true;
@@ -496,7 +495,7 @@
                             break;
                         case "errddl": // a ddlquery that expects error
                             try {
-                                TestsUtils.executeDDL(statement);
+                                executeDDL(statement);
                             } catch (Exception e) {
                                 // expected error happens
                                 failed = true;
@@ -509,7 +508,7 @@
                             System.err.println("...but that was expected.");
                             break;
                         default:
-                            throw new IllegalArgumentException("No statements 
of type " + ctx.getType());
+                            executeExternalRuntime(ctx);
                     }
 
                 } catch (Exception e) {
@@ -527,4 +526,9 @@
             }
         }
     }
+
+    // Executes external runtimes.
+    protected void executeExternalRuntime(TestFileContext ctx) throws 
Exception {
+        throw new IllegalArgumentException("No statements of type " + 
ctx.getType());
+    }
 }
diff --git 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixClusterLifeCycleIT.java
 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixClusterLifeCycleIT.java
index 8481ef7..38bbfbe 100644
--- 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixClusterLifeCycleIT.java
+++ 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixClusterLifeCycleIT.java
@@ -15,25 +15,24 @@
 package edu.uci.ics.asterix.installer.test;
 
 import java.io.File;
+import java.io.FilenameFilter;
 import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.logging.Logger;
 import java.util.List;
-import java.io.InputStream;
-import java.io.FilenameFilter;
-import java.lang.ProcessBuilder;
-import java.nio.charset.StandardCharsets;
+import java.util.logging.Logger;
 
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.Assert;
-import org.junit.runners.Parameterized.Parameters;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runners.Parameterized.Parameters;
 
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 public class AsterixClusterLifeCycleIT {
@@ -47,6 +46,7 @@
     private static final Logger LOGGER = 
Logger.getLogger(AsterixClusterLifeCycleIT.class.getName());
     private static List<TestCaseContext> testCaseCollection;
     private static File asterixProjectDir = new 
File(System.getProperty("user.dir"));
+    private static final TestsExecutor testsExecutor = new TestsExecutor();
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -79,7 +79,7 @@
         String pout = processOut(p);
         LOGGER.info(pout);
         Assert.assertTrue(checkOutput(pout, "ACTIVE"));
-        //TODO: I should check for 'WARNING' here, but issue 764 stops this 
from being reliable 
+        //TODO: I should check for 'WARNING' here, but issue 764 stops this 
from being reliable
         LOGGER.info("Test start active cluster instance PASSED");
 
         Process stop = managixInvoke("stop -n vagrant-ssh");
@@ -168,7 +168,7 @@
 
     public void test() throws Exception {
         for (TestCaseContext testCaseCtx : testCaseCollection) {
-            TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
+            testsExecutor.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
         }
     }
 
diff --git 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java
 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java
index bb41b05..135276d 100644
--- 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java
+++ 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java
@@ -23,7 +23,7 @@
 import org.junit.Test;
 
 import edu.uci.ics.asterix.event.model.AsterixInstance.State;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 public class AsterixExternalLibraryIT {
@@ -36,6 +36,7 @@
             + "testlib-zip-binary-assembly.zip";
     private static final Logger LOGGER = 
Logger.getLogger(AsterixExternalLibraryIT.class.getName());
     private static List<TestCaseContext> testCaseCollection;
+    private static final TestsExecutor testsExecutor = new TestsExecutor();
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -59,7 +60,7 @@
     @Test
     public void test() throws Exception {
         for (TestCaseContext testCaseCtx : testCaseCollection) {
-            TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
+            testsExecutor.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
         }
     }
 
diff --git 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
index fde30a9..8563f29 100644
--- 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
+++ 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
@@ -22,10 +22,10 @@
 
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runners.Parameterized.Parameters;
 import org.junit.FixMethodOrder;
+import org.junit.Test;
 import org.junit.runners.MethodSorters;
+import org.junit.runners.Parameterized.Parameters;
 
 import edu.uci.ics.asterix.event.error.VerificationUtil;
 import edu.uci.ics.asterix.event.model.AsterixInstance;
@@ -33,11 +33,10 @@
 import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
 import edu.uci.ics.asterix.event.service.ServiceProvider;
 import edu.uci.ics.asterix.installer.command.CommandHandler;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
-
 public class AsterixLifecycleIT {
 
     private static final int NUM_NC = 1;
@@ -46,6 +45,7 @@
     private static final String PATH_ACTUAL = "ittest/";
     private static final Logger LOGGER = 
Logger.getLogger(AsterixLifecycleIT.class.getName());
     private static List<TestCaseContext> testCaseCollection;
+    private static final TestsExecutor testsExecutor = new TestsExecutor();
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -125,7 +125,7 @@
     @Test
     public void test() throws Exception {
         for (TestCaseContext testCaseCtx : testCaseCollection) {
-            TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
+            testsExecutor.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
         }
     }
 
diff --git 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java
 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java
index c464e68..fe16bbc 100644
--- 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java
+++ 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java
@@ -16,16 +16,12 @@
 
 import java.io.File;
 import java.io.FilenameFilter;
-import java.io.InputStream;
-import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
 
 import org.apache.commons.io.FileUtils;
-import org.codehaus.jackson.map.JsonMappingException;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -33,10 +29,8 @@
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-import edu.uci.ics.asterix.testframework.context.TestFileContext;
-import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
 
 @RunWith(Parameterized.class)
 public class DmlRecoveryIT {
@@ -50,14 +44,13 @@
 
     private TestCaseContext tcCtx;
     private static File asterixInstallerPath;
-    private static File asterixAppPath;
-    private static File asterixDBPath;
     private static File installerTargetPath;
     private static String managixHomeDirName;
     private static String managixHomePath;
     private static String scriptHomePath;
     private static ProcessBuilder pb;
     private static Map<String, String> env;
+    private static final TestsExecutor testsExecutor = new TestsExecutor();
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -65,8 +58,6 @@
         outdir.mkdirs();
 
         asterixInstallerPath = new File(System.getProperty("user.dir"));
-        asterixDBPath = new File(asterixInstallerPath.getParent());
-        asterixAppPath = new File(asterixDBPath.getAbsolutePath() + 
File.separator + "asterix-app");
         installerTargetPath = new File(asterixInstallerPath, "target");
         managixHomeDirName = installerTargetPath.list(new FilenameFilter() {
             @Override
@@ -85,12 +76,12 @@
                 + "resources" + File.separator + "transactionts" + 
File.separator + "scripts";
         env.put("SCRIPT_HOME", scriptHomePath);
 
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
                 + "configure_and_validate.sh");
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
                 + "stop_and_delete.sh");
 
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
                 + "create_and_start.sh");
 
     }
@@ -99,9 +90,10 @@
     public static void tearDown() throws Exception {
         File outdir = new File(PATH_ACTUAL);
         FileUtils.deleteDirectory(outdir);
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
                 + "stop_and_delete.sh");
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator + "shutdown.sh");
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"dml_recovery" + File.separator
+                + "shutdown.sh");
 
     }
 
@@ -123,8 +115,6 @@
 
     @Test
     public void test() throws Exception {
-
-        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, pb, true);
-
+        testsExecutor.executeTest(PATH_ACTUAL, tcCtx, pb, true);
     }
 }
diff --git 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
index 018cb18..6187613 100644
--- 
a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
+++ 
b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
@@ -29,7 +29,7 @@
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 @RunWith(Parameterized.class)
@@ -46,6 +46,7 @@
     private static String scriptHomePath;
     private static ProcessBuilder pb;
     private static Map<String, String> env;
+    private static final TestsExecutor testsExecutor = new TestsExecutor();
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -71,9 +72,9 @@
                 + "resources" + File.separator + "transactionts" + 
File.separator + "scripts";
         env.put("SCRIPT_HOME", scriptHomePath);
 
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
                 + "configure_and_validate.sh");
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
                 + "stop_and_delete.sh");
     }
 
@@ -84,9 +85,9 @@
         File dataCopyDir = new File(managixHomePath + File.separator + ".." + 
File.separator + ".." + File.separator
                 + "data");
         FileUtils.deleteDirectory(dataCopyDir);
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
                 + "stop_and_delete.sh");
-        TestsUtils.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
+        TestsExecutor.executeScript(pb, scriptHomePath + File.separator + 
"setup_teardown" + File.separator
                 + "shutdown.sh");
     }
 
@@ -106,7 +107,7 @@
 
     @Test
     public void test() throws Exception {
-        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, pb, false);
+        testsExecutor.executeTest(PATH_ACTUAL, tcCtx, pb, false);
     }
 
 }
diff --git 
a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/MetadataLockManager.java
 
b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/MetadataLockManager.java
index e3c9425..a5c03c2 100644
--- 
a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/MetadataLockManager.java
+++ 
b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/MetadataLockManager.java
@@ -221,7 +221,7 @@
     public void releaseFeedWriteLock(String feedName) {
         feedsLocks.get(feedName).writeLock().unlock();
     }
-    
+
     public void acquireFeedPolicyWriteLock(String policyName) {
         ReentrantReadWriteLock fLock = feedPolicyLocks.get(policyName);
         if (fLock == null) {
@@ -440,7 +440,7 @@
         releaseFeedWriteLock(feedFullyQualifiedName);
         releaseDataverseReadLock(dataverseName);
     }
-    
+
     public void dropFeedPolicyBegin(String dataverseName, String policyName) {
         releaseFeedWriteLock(policyName);
         releaseDataverseReadLock(dataverseName);
@@ -482,7 +482,7 @@
         releaseFeedPolicyWriteLock(policyName);
         releaseDataverseReadLock(dataverseName);
     }
-    
+
     public void disconnectFeedBegin(String dataverseName, String 
datasetFullyQualifiedName,
             String feedFullyQualifiedName) {
         acquireDataverseReadLock(dataverseName);
@@ -495,14 +495,13 @@
         releaseDatasetReadLock(datasetFullyQualifiedName);
         releaseDataverseReadLock(dataverseName);
     }
-    
-    public void subscribeFeedBegin(String dataverseName, String 
datasetFullyQualifiedName,
-            String feedFullyQualifiedName) {
+
+    public void subscribeFeedBegin(String dataverseName, String 
datasetFullyQualifiedName, String feedFullyQualifiedName) {
         acquireDataverseReadLock(dataverseName);
         acquireDatasetReadLock(datasetFullyQualifiedName);
         acquireFeedReadLock(feedFullyQualifiedName);
     }
-    
+
     public void subscribeFeedEnd(String dataverseName, String 
datasetFullyQualifiedName, String feedFullyQualifiedName) {
         releaseFeedReadLock(feedFullyQualifiedName);
         releaseDatasetReadLock(datasetFullyQualifiedName);
@@ -569,27 +568,6 @@
 
     public void refreshDatasetEnd(String dataverseName, String 
datasetFullyQualifiedName) {
         releaseExternalDatasetRefreshLock(datasetFullyQualifiedName);
-        releaseDataverseReadLock(dataverseName);
-    }
-
-    public void pregelixBegin(String dataverseName, String 
datasetFullyQualifiedNameFrom,
-            String datasetFullyQualifiedNameTo) {
-        acquireDataverseReadLock(dataverseName);
-
-        if 
(datasetFullyQualifiedNameFrom.compareTo(datasetFullyQualifiedNameTo) < 0) {
-            acquireDatasetReadLock(datasetFullyQualifiedNameFrom);
-            acquireDatasetWriteLock(datasetFullyQualifiedNameTo);
-        } else {
-            acquireDatasetWriteLock(datasetFullyQualifiedNameTo);
-            acquireDatasetReadLock(datasetFullyQualifiedNameFrom);
-        }
-    }
-
-    public void pregelixEnd(String dataverseName, String 
datasetFullyQualifiedNameFrom,
-            String datasetFullyQualifiedNameTo) {
-
-        releaseDatasetReadLock(datasetFullyQualifiedNameFrom);
-        releaseDatasetWriteLock(datasetFullyQualifiedNameTo);
         releaseDataverseReadLock(dataverseName);
     }
 }
diff --git a/asterix-tools/pom.xml b/asterix-tools/pom.xml
index 7daa940..72f2d09 100644
--- a/asterix-tools/pom.xml
+++ b/asterix-tools/pom.xml
@@ -230,13 +230,13 @@
                <dependency>
                        <groupId>org.apache.httpcomponents</groupId>
                        <artifactId>httpclient</artifactId>
-                       <version>4.2.2</version>
+                       <version>4.3</version>
                        <scope>compile</scope>
                </dependency>
                <dependency>
                        <groupId>org.apache.httpcomponents</groupId>
                        <artifactId>httpcore</artifactId>
-                       <version>4.2.2</version>
+                       <version>4.3</version>
                        <scope>compile</scope>
                </dependency>
        </dependencies>
diff --git 
a/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLibraryTestIT.java
 
b/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLibraryTestIT.java
index 0b4dc63..3845344 100644
--- 
a/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLibraryTestIT.java
+++ 
b/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLibraryTestIT.java
@@ -24,11 +24,10 @@
 import org.junit.Test;
 
 import edu.uci.ics.asterix.aoya.AsterixYARNClient;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.aql.TestsExecutor;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 public class AsterixYARNLibraryTestIT {
-    private static final String LIBRARY_NAME = "testlib";
     private static final String LIBRARY_DATAVERSE = "externallibtest";
     private static final String INSTANCE_NAME = "asterix-lib-test";
     private static final String PATH_BASE = "src/test/resources/library";
@@ -42,6 +41,7 @@
     private static List<TestCaseContext> testCaseCollection;
     private static final String LIBRARY_PATH = "asterix-external-data" + 
File.separator + "target" + File.separator
             + "testlib-zip-binary-assembly.zip";
+    private static final TestsExecutor testsExecutor = new TestsExecutor();
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -60,7 +60,8 @@
 
         String asterixExternalLibraryPath = new 
File(System.getProperty("user.dir")).getParentFile().getAbsolutePath()
                 + File.separator + LIBRARY_PATH;
-        command = "-n " + INSTANCE_NAME + " -l " + asterixExternalLibraryPath 
+ " -ld " + LIBRARY_DATAVERSE + " -bc " + parameterPath + " libinstall";
+        command = "-n " + INSTANCE_NAME + " -l " + asterixExternalLibraryPath 
+ " -ld " + LIBRARY_DATAVERSE + " -bc "
+                + parameterPath + " libinstall";
         executeAoyaCommand(command);
 
         command = "-n " + INSTANCE_NAME + " -bc " + parameterPath + " start";
@@ -72,7 +73,8 @@
 
     @AfterClass
     public static void tearDown() throws Exception {
-        String command = "-n " + INSTANCE_NAME + " -zip " +  aoyaServerPath + 
" -f" + " -bc " + parameterPath + " destroy";
+        String command = "-n " + INSTANCE_NAME + " -zip " + aoyaServerPath + " 
-f" + " -bc " + parameterPath
+                + " destroy";
         executeAoyaCommand(command);
         instance.tearDown();
     }
@@ -80,7 +82,7 @@
     @Test
     public void test() throws Exception {
         for (TestCaseContext testCaseCtx : testCaseCollection) {
-            TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
+            testsExecutor.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
         }
     }
 
diff --git 
a/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLifecycleIT.java
 
b/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLifecycleIT.java
index 1407e78..f1a6cc0 100644
--- 
a/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLifecycleIT.java
+++ 
b/asterix-yarn/src/test/java/edu/uci/ics/asterix/aoya/test/AsterixYARNLifecycleIT.java
@@ -15,45 +15,21 @@
 package edu.uci.ics.asterix.aoya.test;
 
 import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FilenameFilter;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 import java.util.logging.Logger;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.MiniYARNCluster;
-import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
-import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runners.Parameterized.Parameters;
 import org.junit.FixMethodOrder;
+import org.junit.Test;
 import org.junit.runners.MethodSorters;
+import org.junit.runners.Parameterized.Parameters;
 
 import edu.uci.ics.asterix.aoya.AsterixYARNClient;
 import edu.uci.ics.asterix.aoya.Utils;
-import edu.uci.ics.asterix.event.error.VerificationUtil;
-import edu.uci.ics.asterix.event.model.AsterixInstance;
-import edu.uci.ics.asterix.event.model.AsterixInstance.State;
-import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
-import edu.uci.ics.asterix.event.schema.yarnCluster.Cluster;
-import edu.uci.ics.asterix.event.schema.yarnCluster.Node;
-import edu.uci.ics.asterix.event.service.ServiceProvider;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-import edu.uci.ics.asterix.aoya.test.YARNCluster;
-import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
-import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 public class AsterixYARNLifecycleIT {
@@ -137,7 +113,8 @@
 
     @Test
     public void test_8_DeleteActiveInstance() throws Exception {
-        String command = "-n " + INSTANCE_NAME + " -zip " + aoyaServerPath + " 
-f" + " -bc " + parameterPath + " destroy";
+        String command = "-n " + INSTANCE_NAME + " -zip " + aoyaServerPath + " 
-f" + " -bc " + parameterPath
+                + " destroy";
         executeAoyaCommand(command);
     }
 
diff --git a/pom.xml b/pom.xml
index 6f4217e..fa6c65e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -144,7 +144,18 @@
                     </includes>
                 </configuration>
             </plugin>
-
+           <plugin>
+               <groupId>org.apache.maven.plugins</groupId>
+                       <artifactId>maven-jar-plugin</artifactId>
+                       <version>2.6</version>
+                       <executions>
+                        <execution>
+                               <goals>
+                                   <goal>test-jar</goal>
+                               </goals>
+                       </execution>
+                       </executions>
+           </plugin>
         </plugins>
     </build>
 

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/325
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I7d167b64bf9ec754182b5b2fe44dfc7e5908c686
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb
Gerrit-Branch: master
Gerrit-Owner: Yingyi Bu <[email protected]>

Reply via email to