Repository: airavata
Updated Branches:
  refs/heads/master 742edee5c -> 702399169


http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git 
a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
 
b/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
deleted file mode 100644
index 6d5427a..0000000
--- 
a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.gram.security.GSISecurityContext;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class GramProviderTestWithMyProxyAuth extends 
GFacBaseTestWithMyProxyAuth {
-    private JobExecutionContext jobExecutionContext;
-
-
-//    private static final String hostAddress = "blacklight.psc.teragrid.org";
-//    private static final String hostName = "Blacklight";
-//    private static final String gridftpAddress = 
"gsiftp://gridftp.blacklight.psc.teragrid.org:2812";;
-//    private static final String gramAddress = "";
-
-    //FIXME: move job properties to configuration file
-    private static final String hostAddress = "trestles.sdsc.edu";
-    private static final String hostName = "trestles";
-    private static final String gridftpAddress = 
"gsiftp://trestles.sdsc.edu:2811/";;
-    private static final String gramAddress = 
"trestles-login2.sdsc.edu:2119/jobmanager-pbstest2";
-
-    @Before
-    public void setUp() throws Exception {
-        URL resource = 
GramProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new 
File(resource.getPath()),null);
-//        gFacConfiguration.setMyProxyLifeCycle(3600);
-//        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
-//        gFacConfiguration.setMyProxyUser("*****");
-//        gFacConfiguration.setMyProxyPassphrase("*****");
-//        gFacConfiguration.setTrustedCertLocation("./certificates");
-//        //have to set InFlwo Handlers and outFlowHandlers
-//        gFacConfiguration.setInHandlers(Arrays.asList(new String[] 
{"GramDirectorySetupHandler","GridFTPInputHandler"}));
-//        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] 
{"GridFTPOutputHandler"}));
-
-        /*
-           * Host
-           */
-        HostDescription host = new HostDescription(GlobusHostType.type);
-        host.getType().setHostAddress(hostAddress);
-        host.getType().setHostName(hostName);
-        ((GlobusHostType)host.getType()).setGlobusGateKeeperEndPointArray(new 
String[]{gramAddress});
-        ((GlobusHostType)host.getType()).setGridFTPEndPointArray(new 
String[]{gridftpAddress});
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new 
ApplicationDescription(HpcApplicationDeploymentType.type);
-        HpcApplicationDeploymentType app = 
(HpcApplicationDeploymentType)appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = 
ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-        ProjectAccountType projectAccountType = app.addNewProjectAccount();
-        projectAccountType.setProjectAccountNumber("sds128");
-
-        QueueType queueType = app.addNewQueue();
-        queueType.setQueueName("development");
-
-        app.setCpuCount(1);
-        app.setJobType(JobTypeType.SERIAL);
-        app.setNodeCount(1);
-        app.setProcessorsPerNode(1);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        app.setExecutableLocation("/bin/echo");
-
-        /*
-           * Default tmp location
-           */
-        String tempDir = "/scratch/01437/ogce/test/";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
-
-        System.out.println(tempDir);
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "inputData");
-        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
-        app.setStandardOutput(tempDir + File.separator + 
app.getApplicationName().getStringValue() + ".stdout");
-        app.setStandardError(tempDir + File.separator + 
app.getApplicationName().getStringValue() + ".stderr");
-
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new 
ArrayList<InputParameterType>();
-
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-
-        InputParameterType input1 = InputParameterType.Factory.newInstance();
-        input.setParameterName("myinput");
-        URIParameterType uriType = URIParameterType.Factory.newInstance();
-        
uriType.setValue("gsiftp://gridftp1.ls4.tacc.utexas.edu:2811//home1/01437/ogce/gram_20130215.log";);
-        input.setParameterType(uriType);
-        inputList.add(input1);
-
-
-        InputParameterType[] inputParamList = inputList.toArray(new 
InputParameterType[inputList
-
-                                                                               
        .size()]);
-        List<OutputParameterType> outputList = new 
ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new 
JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        // Adding security context
-        
jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, 
getSecurityContext());
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-        applicationContext.setHostDescription(host);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        
((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        // added extra
-        ActualParameter copy_input = new ActualParameter();
-        copy_input.getType().changeType(URIParameterType.type);
-        
((URIParameterType)copy_input.getType()).setValue("file:///tmp/tmpstrace");
-
-        ActualParameter outlocation = new ActualParameter();
-        
((StringParameterType)outlocation.getType()).setValue("./outputData/.");
-        inMessage.addParameter("copy_input", copy_input);
-        inMessage.addParameter("outputlocation", outlocation);
-
-        // added extra
-
-
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//             
((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-    @Test
-    public void testGramProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), 
jobExecutionContext.getTaskData().getTaskID(), 
jobExecutionContext.getGatewayID());
-        MessageContext outMessageContext = 
jobExecutionContext.getOutMessageContext();
-        
Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")),
 "hello");
-    }
-
-    @Test
-    public void testGetJdbcUrl()  {
-        System.out.println(getJDBCUrl());
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt 
b/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
deleted file mode 100644
index e749e9c..0000000
--- a/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
+++ /dev/null
@@ -1,73 +0,0 @@
-<!--Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE file
-       distributed with this work for additional information regarding 
copyright ownership. The ASF licenses this file to you under
-       the Apache License, Version 2.0 (theÏ "License"); you may not use this 
file except in compliance with the License. You may
-       obtain a copy of the License at 
http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or 
agreed to
-       in writing, software distributed under the License is distributed on an 
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-       ANY ~ KIND, either express or implied. See the License for the specific 
language governing permissions and limitations under
-       the License. -->
-<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform"; 
xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12";>
-<xsl:output method="text" />
-<xsl:template match="/ns:JobDescriptor">
-#! /bin/sh
-# PBS batch job script built by Globus job manager
-#   <xsl:choose>
-    <xsl:when test="ns:shellName">
-##PBS -S <xsl:value-of select="ns:shellName"/>
-    </xsl:when></xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:queueName">
-#PBS -q <xsl:value-of select="ns:queueName"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:mailOptions">
-#PBS -m <xsl:value-of select="ns:mailOptions"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-<xsl:when test="ns:acountString">
-#PBS -A <xsl:value-of select="ns:acountString"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:maxWallTime">
-#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -o <xsl:value-of select="ns:standardOutFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
-#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of 
select="ns:processesPerNode"/>
-<xsl:text>&#xa;</xsl:text>
-    </xsl:when>
-    </xsl:choose>
-<xsl:for-each select="ns:exports/ns:name">
-<xsl:value-of select="."/>=<xsl:value-of 
select="./@value"/><xsl:text>&#xa;</xsl:text>
-export<xsl:text>   </xsl:text><xsl:value-of select="."/>
-<xsl:text>&#xa;</xsl:text>
-</xsl:for-each>
-<xsl:for-each select="ns:preJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-cd <xsl:text>   </xsl:text><xsl:value-of 
select="ns:workingDirectory"/><xsl:text>&#xa;</xsl:text>
-    <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
-<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text>   
</xsl:text></xsl:when></xsl:choose><xsl:value-of 
select="ns:executablePath"/><xsl:text>   </xsl:text>
-<xsl:for-each select="ns:inputs/ns:input">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-<xsl:for-each select="ns:postJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-</xsl:for-each>
-
-</xsl:template>
-
-</xsl:stylesheet>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-gram/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/resources/logging.properties 
b/modules/gfac/gfac-gram/src/test/resources/logging.properties
deleted file mode 100644
index 0584d38..0000000
--- a/modules/gfac/gfac-gram/src/test/resources/logging.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#default/fallback log4j configuration
-#
-
-# Set root logger level to WARN and its only appender to A1.
-log4j.rootLogger=INFO, A1, A2
-
-# A1 is set to be a rolling file appender with default params
-log4j.appender.A1=org.apache.log4j.RollingFileAppender
-log4j.appender.A1.File=target/seclogs.txt
-
-# A1 uses PatternLayout.
-log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
-
-# A2 is a console appender
-log4j.appender.A2=org.apache.log4j.ConsoleAppender
-
-# A2 uses PatternLayout.
-log4j.appender.A2.layout=org.apache.log4j.PatternLayout
-log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
-
-log4j.logger.unicore.security=INFO
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/pom.xml b/modules/gfac/gfac-hadoop/pom.xml
deleted file mode 100644
index 3cd412c..0000000
--- a/modules/gfac/gfac-hadoop/pom.xml
+++ /dev/null
@@ -1,116 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE file 
-    distributed with this work for additional information regarding copyright 
ownership. The ASF licenses this file to you under 
-    the Apache License, Version 2.0 (theÏ "License"); you may not use this 
file except in compliance with the License. You may 
-    obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
Unless required by applicable law or agreed to 
-    in writing, software distributed under the License is distributed on an 
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
-    ANY ~ KIND, either express or implied. See the License for the specific 
language governing permissions and limitations under 
-    the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <parent>
-        <groupId>org.apache.airavata</groupId>
-        <artifactId>gfac</artifactId>
-        <version>0.14-SNAPSHOT</version>
-        <relativePath>../pom.xml</relativePath>
-    </parent>
-
-    <modelVersion>4.0.0</modelVersion>
-    <artifactId>airavata-gfac-hadoop</artifactId>
-    <name>Airavata GFac Hadoop implementation</name>
-    <description>This is the extension of GFAC to use Hadoop.</description>
-    <url>http://airavata.apache.org/</url>
-
-    <dependencies>
-        <!-- Logging -->
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>
-
-        <!-- GFAC schemas -->
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-gfac-core</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <!-- Test -->
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-            <version>6.1.1</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>jcl-over-slf4j</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-server-configuration</artifactId>
-           <scope>test</scope>
-        </dependency>
-           <dependency>
-            <groupId>org.apache.airavata</groupId>
-            <artifactId>airavata-client-configuration</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-
-
-        <!-- Hadoop provider related dependencies -->
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
-            <version>1.0.3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <version>1.0.3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.whirr</groupId>
-            <artifactId>whirr-core</artifactId>
-            <version>0.7.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.whirr</groupId>
-            <artifactId>whirr-hadoop</artifactId>
-            <version>0.7.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.hamcrest</groupId>
-            <artifactId>hamcrest-all</artifactId>
-            <version>1.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <version>1.8.5</version>
-        </dependency>
-        <dependency>
-            <groupId>commons-configuration</groupId>
-            <artifactId>commons-configuration</artifactId>
-            <version>1.7</version>
-        </dependency>
-        <dependency>
-            <groupId>net.sf.jopt-simple</groupId>
-            <artifactId>jopt-simple</artifactId>
-            <version>3.2</version>
-        </dependency>
-    </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java
----------------------------------------------------------------------
diff --git 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java
 
b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java
deleted file mode 100644
index db75cb1..0000000
--- 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HDFSDataMovementHandler.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.handler;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.GFacHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.gfac.hadoop.provider.utils.HadoopUtils;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import 
org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Properties;
-
-public class HDFSDataMovementHandler implements GFacHandler {
-    private static final Logger logger = 
LoggerFactory.getLogger(HDFSDataMovementHandler.class);
-
-    private boolean isWhirrBasedDeployment = false;
-    private File hadoopConfigDir;
-
-    public void invoke(JobExecutionContext jobExecutionContext) throws 
GFacHandlerException {
-        MessageContext inMessageContext = 
jobExecutionContext.getInMessageContext();
-        
if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
-            isWhirrBasedDeployment = true;
-        } else {
-            String hadoopConfigDirPath = 
(String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
-            File hadoopConfigDir = new File(hadoopConfigDirPath);
-            if (!hadoopConfigDir.exists()){
-                throw new GFacHandlerException("Specified hadoop configuration 
directory doesn't exist.");
-            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() 
<= 0){
-                throw new GFacHandlerException("Cannot find any hadoop 
configuration files inside specified directory.");
-            }
-
-            this.hadoopConfigDir = hadoopConfigDir;
-        }
-
-        if(jobExecutionContext.isInPath()){
-            try {
-                handleInPath(jobExecutionContext);
-            } catch (IOException e) {
-                throw new GFacHandlerException("Error while copying input data 
from local file system to HDFS.",e);
-            }
-        } else {
-            handleOutPath(jobExecutionContext);
-        }
-    }
-
-    private void handleInPath(JobExecutionContext jobExecutionContext) throws 
GFacHandlerException, IOException {
-        ApplicationDeploymentDescriptionType appDepDesc =
-                
jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
-                (HadoopApplicationDeploymentDescriptionType)appDepDesc;
-        if(appDepDesc.isSetInputDataDirectory() && 
isInputDataDirectoryLocal(appDepDesc)){
-            Configuration hadoopConf = 
HadoopUtils.createHadoopConfiguration(jobExecutionContext, 
isWhirrBasedDeployment, hadoopConfigDir);
-            FileSystem hdfs = FileSystem.get(hadoopConf);
-            hdfs.copyFromLocalFile(new 
Path(appDepDesc.getInputDataDirectory()),
-                    new 
Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
-        }
-    }
-
-    private boolean 
isInputDataDirectoryLocal(ApplicationDeploymentDescriptionType appDepDesc){
-        String inputDataDirectoryPath = appDepDesc.getInputDataDirectory();
-        File inputDataDirectory = new File(inputDataDirectoryPath);
-        if(inputDataDirectory.exists() && 
FileUtils.listFiles(inputDataDirectory, null, null).size() > 0){
-            return true;
-        }
-
-        return false;
-    }
-
-    private void handleOutPath(JobExecutionContext jobExecutionContext){}
-
-    public void initProperties(Properties properties) throws 
GFacHandlerException {
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java
----------------------------------------------------------------------
diff --git 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java
 
b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java
deleted file mode 100644
index 1d49a84..0000000
--- 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/handler/HadoopDeploymentHandler.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.handler;
-
-import com.google.common.io.Files;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.GFacHandler;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.schemas.gfac.HadoopHostType;
-import org.apache.commons.configuration.CompositeConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.io.FileUtils;
-import org.apache.whirr.Cluster;
-import org.apache.whirr.ClusterController;
-import org.apache.whirr.ClusterControllerFactory;
-import org.apache.whirr.ClusterSpec;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.*;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.EnumSet;
-import java.util.Map;
-import java.util.Properties;
-
-import static org.apache.whirr.ClusterSpec.Property.*;
-import static org.apache.whirr.ClusterSpec.Property.INSTANCE_TEMPLATES;
-import static org.apache.whirr.ClusterSpec.Property.PRIVATE_KEY_FILE;
-
-/**
- * This handler takes care of deploying hadoop in cloud(in cloud bursting 
scenarios) and
- * deploying hadoop in local cluster. In case of existing hadoop cluster this 
will ignore
- * cluster setup just use the hadoop configuration provided by user.
- */
-public class HadoopDeploymentHandler implements GFacHandler {
-    private static final Logger logger = 
LoggerFactory.getLogger("hadoop-dep-handler");
-
-    /**
-     * Once invoked this method will deploy Hadoop in a local cluster or cloud 
based on the
-     * configuration provided. If there is a already deployed hadoop cluster 
this will skip
-     * deployment.
-     *
-     *
-     * @param jobExecutionContext job execution context containing all the 
required configurations
-     *                            and runtime information.
-     * @throws org.apache.airavata.gfac.core.handler.GFacHandlerException
-     */
-    public void invoke(JobExecutionContext jobExecutionContext) throws 
GFacHandlerException {
-        if(jobExecutionContext.isInPath()){
-            handleInPath(jobExecutionContext);
-        } else {
-            handleOutPath(jobExecutionContext);
-        }
-    }
-
-    private void handleInPath(JobExecutionContext jobExecutionContext) throws 
GFacHandlerException {
-        HostDescription hostDescription =
-                
jobExecutionContext.getApplicationContext().getHostDescription();
-        if (!isHadoopDeploymentAvailable(hostDescription)) {
-            // Temp directory to keep generated configuration files.
-            File tempDirectory = Files.createTempDir();
-            try {
-                File hadoopSiteXML = launchHadoopCluster(hostDescription, 
tempDirectory);
-                
jobExecutionContext.getInMessageContext().addParameter("HADOOP_SITE_XML", 
hadoopSiteXML.getAbsolutePath());
-                
jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE",
 "WHIRR");
-                // TODO: Add hadoop-site.xml to job execution context.
-            } catch (IOException e) {
-                throw new GFacHandlerException("IO Error while processing 
configurations.",e);
-            } catch (ConfigurationException e) {
-                throw  new GFacHandlerException("Whirr configuration error.", 
e);
-            } catch (InterruptedException e) {
-                throw new GFacHandlerException("Hadoop cluster launch 
interrupted.", e);
-            } catch (TransformerException e) {
-                throw new GFacHandlerException("Error while creating 
hadoop-site.xml", e);
-            } catch (ParserConfigurationException e) {
-                throw new GFacHandlerException("Error while creating 
hadoop-site.xml", e);
-            }
-        } else {
-            
jobExecutionContext.getInMessageContext().addParameter("HADOOP_DEPLOYMENT_TYPE",
-                    "MANUAL");
-            
jobExecutionContext.getInMessageContext().addParameter("HADOOP_CONFIG_DIR",
-                    
((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory());
-            logger.info("Hadoop configuration is available. Skipping hadoop 
deployment.");
-            if(logger.isDebugEnabled()){
-                logger.debug("Hadoop configuration directory: " +
-                        getHadoopConfigDirectory(hostDescription));
-            }
-        }
-    }
-
-    private void handleOutPath(JobExecutionContext jobExecutionContext){
-        MessageContext inMessageContext = 
jobExecutionContext.getInMessageContext();
-        
if(((String)inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE")).equals("WHIRR")){
-            // TODO: Shutdown hadoop cluster.
-            logger.info("Shutdown hadoop cluster.");
-        }
-    }
-
-    private File launchHadoopCluster(HostDescription hostDescription, File 
workingDirectory)
-            throws IOException, GFacHandlerException, ConfigurationException, 
InterruptedException, TransformerException, ParserConfigurationException {
-        ClusterSpec hadoopClusterSpec =
-                whirrConfigurationToClusterSpec(hostDescription, 
workingDirectory);
-        ClusterController hadoopClusterController =
-                createClusterController(hadoopClusterSpec.getServiceName());
-        Cluster hadoopCluster =  
hadoopClusterController.launchCluster(hadoopClusterSpec);
-
-        logger.info(String.format("Started cluster of %s instances.\n",
-                hadoopCluster.getInstances().size()));
-
-        File siteXML = new File(workingDirectory, "hadoop-site.xml");
-        clusterPropertiesToHadoopSiteXml(hadoopCluster.getConfiguration(), 
siteXML);
-
-        return siteXML;
-    }
-
-    private ClusterController createClusterController(String serviceName){
-        ClusterControllerFactory factory = new ClusterControllerFactory();
-        ClusterController controller = factory.create(serviceName);
-
-        if(controller == null){
-            logger.warn("Unable to find the service {0}, using default.", 
serviceName);
-            controller = factory.create(null);
-        }
-
-        return controller;
-    }
-
-    private ClusterSpec whirrConfigurationToClusterSpec(HostDescription 
hostDescription,
-                                                        File workingDirectory) 
throws IOException, GFacHandlerException, ConfigurationException {
-        File whirrConfig = getWhirrConfigurationFile(hostDescription, 
workingDirectory);
-        CompositeConfiguration compositeConfiguration = new 
CompositeConfiguration();
-        Configuration configuration = new PropertiesConfiguration(whirrConfig);
-        compositeConfiguration.addConfiguration(configuration);
-
-        ClusterSpec hadoopClusterSpec = new 
ClusterSpec(compositeConfiguration);
-
-        for (ClusterSpec.Property required : EnumSet.of(CLUSTER_NAME, 
PROVIDER, IDENTITY, CREDENTIAL,
-                INSTANCE_TEMPLATES, PRIVATE_KEY_FILE)) {
-            if 
(hadoopClusterSpec.getConfiguration().getString(required.getConfigName()) == 
null) {
-                throw new IllegalArgumentException(String.format("Option '%s' 
not set.",
-                        required.getSimpleName()));
-            }
-        }
-
-        return hadoopClusterSpec;
-    }
-
-    private File getWhirrConfigurationFile(HostDescription hostDescription, 
File workingDirectory)
-            throws GFacHandlerException, IOException {
-        HadoopHostType hadoopHostDesc = (HadoopHostType)hostDescription;
-        if(hadoopHostDesc.isSetWhirrConfiguration()){
-            HadoopHostType.WhirrConfiguration whirrConfig = 
hadoopHostDesc.getWhirrConfiguration();
-            if(whirrConfig.isSetConfigurationFile()){
-                File whirrConfigFile = new 
File(whirrConfig.getConfigurationFile());
-                if(!whirrConfigFile.exists()){
-                    throw new GFacHandlerException(
-                            "Specified whirr configuration file doesn't 
exists.");
-                }
-
-                FileUtils.copyFileToDirectory(whirrConfigFile, 
workingDirectory);
-
-                return new File(workingDirectory, whirrConfigFile.getName());
-            } else if(whirrConfig.isSetConfiguration()){
-                Properties whirrConfigProps =
-                        
whirrConfigurationsToProperties(whirrConfig.getConfiguration());
-                File whirrConfigFile = new File(workingDirectory, 
"whirr-hadoop.config");
-                whirrConfigProps.store(
-                        new FileOutputStream(whirrConfigFile), null);
-
-                return whirrConfigFile;
-            }
-        }
-
-        throw new GFacHandlerException("Cannot find Whirr configurations. 
Whirr configuration "
-                + "is required if you don't have already running Hadoop 
deployment.");
-    }
-
-    private Properties whirrConfigurationsToProperties(
-            HadoopHostType.WhirrConfiguration.Configuration configuration){
-        Properties whirrConfigProps = new Properties();
-
-        for(HadoopHostType.WhirrConfiguration.Configuration.Property property:
-                configuration.getPropertyArray()) {
-            whirrConfigProps.put(property.getName(), property.getValue());
-        }
-
-        return whirrConfigProps;
-    }
-
-    private void clusterPropertiesToHadoopSiteXml(Properties props, File 
hadoopSiteXml) throws ParserConfigurationException, TransformerException {
-        DocumentBuilderFactory domFactory = 
DocumentBuilderFactory.newInstance();
-        DocumentBuilder documentBuilder = domFactory.newDocumentBuilder();
-
-        Document hadoopSiteXmlDoc = documentBuilder.newDocument();
-
-        hadoopSiteXmlDoc.setXmlVersion("1.0");
-        hadoopSiteXmlDoc.setXmlStandalone(true);
-        hadoopSiteXmlDoc.createProcessingInstruction("xml-stylesheet", 
"type=\"text/xsl\" href=\"configuration.xsl\"");
-
-        Element configEle = hadoopSiteXmlDoc.createElement("configuration");
-
-        hadoopSiteXmlDoc.appendChild(configEle);
-
-        for(Map.Entry<Object, Object> entry : props.entrySet()){
-            addPropertyToConfiguration(entry, configEle, hadoopSiteXmlDoc);
-        }
-
-        saveDomToFile(hadoopSiteXmlDoc, hadoopSiteXml);
-    }
-
-    private void saveDomToFile(Document dom, File destFile) throws 
TransformerException {
-        Source source = new DOMSource(dom);
-
-        Result result = new StreamResult(destFile);
-
-        Transformer transformer = 
TransformerFactory.newInstance().newTransformer();
-        transformer.transform(source, result);
-    }
-
-    private void addPropertyToConfiguration(Map.Entry<Object, Object> entry, 
Element configElement, Document doc){
-        Element property = doc.createElement("property");
-        configElement.appendChild(property);
-
-        Element nameEle = doc.createElement("name");
-        nameEle.setTextContent(entry.getKey().toString());
-        property.appendChild(nameEle);
-
-        Element valueEle = doc.createElement("value");
-        valueEle.setTextContent(entry.getValue().toString());
-        property.appendChild(valueEle);
-    }
-
-    private boolean isHadoopDeploymentAvailable(HostDescription 
hostDescription) {
-        return ((HadoopHostType) 
hostDescription.getType()).isSetHadoopConfigurationDirectory();
-    }
-
-    private String getHadoopConfigDirectory(HostDescription hostDescription){
-        return 
((HadoopHostType)hostDescription.getType()).getHadoopConfigurationDirectory();
-    }
-
-    public void initProperties(Properties properties) throws 
GFacHandlerException {
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java
----------------------------------------------------------------------
diff --git 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java
 
b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java
deleted file mode 100644
index 30a1bf9..0000000
--- 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/impl/HadoopProvider.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.provider.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.ArrayList;
-import java.util.Map;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.provider.AbstractProvider;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.hadoop.provider.utils.HadoopUtils;
-import 
org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import sun.reflect.generics.reflectiveObjects.NotImplementedException;
-
-/**
- * Executes hadoop job using the cluster configuration provided by handlers in
- * in-flow.
- */
-public class HadoopProvider extends AbstractProvider {
-    private static final Logger logger = 
LoggerFactory.getLogger(HadoopProvider.class);
-
-    private boolean isWhirrBasedDeployment = false;
-    private File hadoopConfigDir;
-
-    public void initialize(JobExecutionContext jobExecutionContext) throws 
GFacProviderException {
-        MessageContext inMessageContext = 
jobExecutionContext.getInMessageContext();
-        
if(inMessageContext.getParameter("HADOOP_DEPLOYMENT_TYPE").equals("WHIRR")){
-            isWhirrBasedDeployment = true;
-        } else {
-            String hadoopConfigDirPath = 
(String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
-            File hadoopConfigDir = new File(hadoopConfigDirPath);
-            if (!hadoopConfigDir.exists()){
-                throw new GFacProviderException("Specified hadoop 
configuration directory doesn't exist.");
-            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() 
<= 0){
-                throw new GFacProviderException("Cannot find any hadoop 
configuration files inside specified directory.");
-            }
-
-            this.hadoopConfigDir = hadoopConfigDir;
-        }
-    }
-
-    public void execute(JobExecutionContext jobExecutionContext) throws 
GFacProviderException {
-        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
-                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
-                        
.getApplicationContext().getApplicationDeploymentDescription().getType();
-        MessageContext inMessageContext = 
jobExecutionContext.getInMessageContext();
-        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration 
jobConf = hadoopAppDesc.getHadoopJobConfiguration();
-
-        try{
-            // Preparing Hadoop configuration
-            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
-                    jobExecutionContext, isWhirrBasedDeployment, 
hadoopConfigDir);
-
-            // Load jar containing map-reduce job implementation
-            ArrayList<URL> mapRedJars = new ArrayList<URL>();
-            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
-            URLClassLoader childClassLoader = new 
URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
-                    this.getClass().getClassLoader());
-
-            Job job = new Job(hadoopConf);
-
-            job.setJobName(jobConf.getJobName());
-
-            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), 
true, childClassLoader));
-            
job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, 
childClassLoader));
-
-            job.setMapperClass((Class<? extends 
Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
-            job.setCombinerClass((Class<? extends Reducer>) 
Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
-            job.setReducerClass((Class<? extends Reducer>) 
Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
-
-            job.setInputFormatClass((Class<? extends 
InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, 
childClassLoader));
-            job.setOutputFormatClass((Class<? extends OutputFormat>) 
Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));
-
-            FileInputFormat.setInputPaths(job, new 
Path(hadoopAppDesc.getInputDataDirectory()));
-            FileOutputFormat.setOutputPath(job, new 
Path(hadoopAppDesc.getOutputDataDirectory()));
-
-            job.waitForCompletion(true);
-            System.out.println(job.getTrackingURL());
-            if(jobExecutionContext.getOutMessageContext() == null){
-                jobExecutionContext.setOutMessageContext(new MessageContext());
-            }
-
-            OutputParameterType[] outputParametersArray = 
jobExecutionContext.getApplicationContext().
-                    
getServiceDescription().getType().getOutputParametersArray();
-            for(OutputParameterType outparamType : outputParametersArray){
-                String paramName = outparamType.getParameterName();
-                if(paramName.equals("test-hadoop")){
-                    ActualParameter outParam = new ActualParameter();
-                    outParam.getType().changeType(StringParameterType.type);
-                    ((StringParameterType) 
outParam.getType()).setValue(job.getTrackingURL());
-                    
jobExecutionContext.getOutMessageContext().addParameter("test-hadoop", 
outParam);
-                }
-            }
-        } catch (Exception e) {
-            String errMessage = "Error occurred during Map-Reduce job 
execution.";
-            logger.error(errMessage, e);
-            throw new GFacProviderException(errMessage, e);
-        }
-    }
-
-    public void dispose(JobExecutionContext jobExecutionContext) throws 
GFacProviderException {
-        // TODO: How to handle cluster shutdown. Best way is to introduce 
inPath/outPath to handler.
-    }
-
-    @Override
-    public void cancelJob(JobExecutionContext jobExecutionContext) throws 
GFacException {
-        throw new NotImplementedException();
-    }
-
-
-    public void initProperties(Map<String, String> properties) throws 
GFacProviderException, GFacException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java
----------------------------------------------------------------------
diff --git 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java
 
b/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java
deleted file mode 100644
index 9d46446..0000000
--- 
a/modules/gfac/gfac-hadoop/src/main/java/org/apache/airavata/gfac/hadoop/provider/utils/HadoopUtils.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.hadoop.provider.utils;
-
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.util.Collection;
-
-public class HadoopUtils {
-    public static Configuration createHadoopConfiguration(
-            JobExecutionContext jobExecutionContext,
-            boolean isWhirrBasedDeployment,
-            File hadoopConfigDir) throws FileNotFoundException {
-        MessageContext inMessageContext = 
jobExecutionContext.getInMessageContext();
-        Configuration hadoopConf = new Configuration();
-
-        if(isWhirrBasedDeployment){
-            hadoopConf.addResource(new FileInputStream(
-                    new 
File((String)inMessageContext.getParameter("HADOOP_SITE_XML"))));
-        } else {
-            readHadoopClusterConfigurationFromDirectory(hadoopConfigDir, 
hadoopConf);
-        }
-
-        return hadoopConf;
-    }
-
-    private static void readHadoopClusterConfigurationFromDirectory(File 
localHadoopConfigurationDirectory, Configuration hadoopConf)
-            throws FileNotFoundException {
-        Collection hadoopConfigurationFiles =
-                FileUtils.listFiles(localHadoopConfigurationDirectory, null, 
false);
-        for (Object f : hadoopConfigurationFiles) {
-            hadoopConf.addResource(new FileInputStream((File)f));
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/resources/errors.properties 
b/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
deleted file mode 100644
index 88c41b8..0000000
--- a/modules/gfac/gfac-hadoop/src/main/resources/errors.properties
+++ /dev/null
@@ -1,197 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# Directly copied from jglobus. Not a good way to manager error properties.
-1 = Parameter not supported
-2 = The RSL length is greater than the maximum allowed
-3 = No resources available
-4 = Bad directory specified
-5 = The executable does not exist
-6 = Insufficient funds
-7 = Authentication with the remote server failed
-8 = Job cancelled by user
-9 = Job cancelled by system
-
-10 = Data transfer to the server failed
-11 = The stdin file does not exist
-12 = The connection to the server failed (check host and port)
-13 = The provided RSL 'maxtime' value is invalid (not an integer or must be 
greater than 0)
-14 = The provided RSL 'count' value is invalid (not an integer or must be 
greater than 0)
-15 = The job manager received an invalid RSL
-16 = Could not connect to job manager
-17 = The job failed when the job manager attempted to run it
-18 = Paradyn error
-19 = The provided RSL 'jobtype' value is invalid
-
-20 = The provided RSL 'myjob' value is invalid
-21 = The job manager failed to locate an internal script argument file
-22 = The job manager failed to create an internal script argument file
-23 = The job manager detected an invalid job state
-24 = The job manager detected an invalid script response
-25 = The job manager detected an invalid job state
-26 = The provided RSL 'jobtype' value is not supported by this job manager
-27 = Unimplemented
-28 = The job manager failed to create an internal script submission file
-29 = The job manager cannot find the user proxy
-
-30 = The job manager failed to open the user proxy
-31 = The job manager failed to cancel the job as requested
-32 = System memory allocation failed
-33 = The interprocess job communication initialization failed
-34 = The interprocess job communication setup failed
-35 = The provided RSL 'host count' value is invalid
-36 = One of the provided RSL parameters is unsupported
-37 = The provided RSL 'queue' parameter is invalid
-38 = The provided RSL 'project' parameter is invalid
-39 = The provided RSL string includes variables that could not be identified
-
-40 = The provided RSL 'environment' parameter is invalid
-41 = The provided RSL 'dryrun' parameter is invalid
-42 = The provided RSL is invalid (an empty string)
-43 = The job manager failed to stage the executable
-44 = The job manager failed to stage the stdin file
-45 = The requested job manager type is invalid
-46 = The provided RSL 'arguments' parameter is invalid
-47 = The gatekeeper failed to run the job manager
-48 = The provided RSL could not be properly parsed
-49 = There is a version mismatch between GRAM components
-
-50 = The provided RSL 'arguments' parameter is invalid
-51 = The provided RSL 'count' parameter is invalid
-52 = The provided RSL 'directory' parameter is invalid
-53 = The provided RSL 'dryrun' parameter is invalid
-54 = The provided RSL 'environment' parameter is invalid
-55 = The provided RSL 'executable' parameter is invalid
-56 = The provided RSL 'host_count' parameter is invalid
-57 = The provided RSL 'jobtype' parameter is invalid
-58 = The provided RSL 'maxtime' parameter is invalid
-59 = The provided RSL 'myjob' parameter is invalid
-
-60 = The provided RSL 'paradyn' parameter is invalid
-61 = The provided RSL 'project' parameter is invalid
-62 = The provided RSL 'queue' parameter is invalid
-63 = The provided RSL 'stderr' parameter is invalid
-64 = The provided RSL 'stdin' parameter is invalid
-65 = The provided RSL 'stdout' parameter is invalid
-66 = The job manager failed to locate an internal script
-67 = The job manager failed on the system call pipe()
-68 = The job manager failed on the system call fcntl()
-69 = The job manager failed to create the temporary stdout filename
-
-70 = The job manager failed to create the temporary stderr filename
-71 = The job manager failed on the system call fork()
-72 = The executable file permissions do not allow execution
-73 = The job manager failed to open stdout
-74 = The job manager failed to open stderr
-75 = The cache file could not be opened in order to relocate the user proxy
-76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, 
quota, and disk space
-77 = The job manager failed to insert the contact in the client contact list
-78 = The contact was not found in the job manager's client contact list
-79 = Connecting to the job manager failed.  Possible reasons: job terminated, 
invalid job contact, network problems, ...
-
-80 = The syntax of the job contact is invalid
-81 = The executable parameter in the RSL is undefined
-82 = The job manager service is misconfigured.  condor arch undefined
-83 = The job manager service is misconfigured.  condor os undefined
-84 = The provided RSL 'min_memory' parameter is invalid
-85 = The provided RSL 'max_memory' parameter is invalid
-86 = The RSL 'min_memory' value is not zero or greater
-87 = The RSL 'max_memory' value is not zero or greater
-88 = The creation of a HTTP message failed
-89 = Parsing incoming HTTP message failed
-
-90 = The packing of information into a HTTP message failed
-91 = An incoming HTTP message did not contain the expected information
-92 = The job manager does not support the service that the client requested
-93 = The gatekeeper failed to find the requested service
-94 = The jobmanager does not accept any new requests (shutting down)
-95 = The client failed to close the listener associated with the callback URL
-96 = The gatekeeper contact cannot be parsed
-97 = The job manager could not find the 'poe' command
-98 = The job manager could not find the 'mpirun' command
-99 = The provided RSL 'start_time' parameter is invalid"
-100 = The provided RSL 'reservation_handle' parameter is invalid
-
-101 = The provided RSL 'max_wall_time' parameter is invalid
-102 = The RSL 'max_wall_time' value is not zero or greater
-103 = The provided RSL 'max_cpu_time' parameter is invalid
-104 = The RSL 'max_cpu_time' value is not zero or greater
-105 = The job manager is misconfigured, a scheduler script is missing
-106 = The job manager is misconfigured, a scheduler script has invalid 
permissions
-107 = The job manager failed to signal the job
-108 = The job manager did not recognize/support the signal type
-109 = The job manager failed to get the job id from the local scheduler
-
-110 = The job manager is waiting for a commit signal
-111 = The job manager timed out while waiting for a commit signal
-112 = The provided RSL 'save_state' parameter is invalid
-113 = The provided RSL 'restart' parameter is invalid
-114 = The provided RSL 'two_phase' parameter is invalid
-115 = The RSL 'two_phase' value is not zero or greater
-116 = The provided RSL 'stdout_position' parameter is invalid
-117 = The RSL 'stdout_position' value is not zero or greater
-118 = The provided RSL 'stderr_position' parameter is invalid
-119 = The RSL 'stderr_position' value is not zero or greater
-
-120 = The job manager restart attempt failed
-121 = The job state file doesn't exist
-122 = Could not read the job state file
-123 = Could not write the job state file
-124 = The old job manager is still alive
-125 = The job manager state file TTL expired
-126 = It is unknown if the job was submitted
-127 = The provided RSL 'remote_io_url' parameter is invalid
-128 = Could not write the remote io url file
-129 = The standard output/error size is different
-
-130 = The job manager was sent a stop signal (job is still running)
-131 = The user proxy expired (job is still running)
-132 = The job was not submitted by original jobmanager
-133 = The job manager is not waiting for that commit signal
-134 = The provided RSL scheduler specific parameter is invalid
-135 = The job manager could not stage in a file
-136 = The scratch directory could not be created
-137 = The provided 'gass_cache' parameter is invalid
-138 = The RSL contains attributes which are not valid for job submission
-139 = The RSL contains attributes which are not valid for stdio update
-
-140 = The RSL contains attributes which are not valid for job restart
-141 = The provided RSL 'file_stage_in' parameter is invalid
-142 = The provided RSL 'file_stage_in_shared' parameter is invalid
-143 = The provided RSL 'file_stage_out' parameter is invalid
-144 = The provided RSL 'gass_cache' parameter is invalid
-145 = The provided RSL 'file_cleanup' parameter is invalid
-146 = The provided RSL 'scratch_dir' parameter is invalid
-147 = The provided scheduler-specific RSL parameter is invalid
-148 = A required RSL attribute was not defined in the RSL spec
-149 = The gass_cache attribute points to an invalid cache directory
-
-150 = The provided RSL 'save_state' parameter has an invalid value
-151 = The job manager could not open the RSL attribute validation file
-152 = The  job manager could not read the RSL attribute validation file
-153 = The provided RSL 'proxy_timeout' is invalid
-154 = The RSL 'proxy_timeout' value is not greater than zero
-155 = The job manager could not stage out a file
-156 = The job contact string does not match any which the job manager is 
handling
-157 = Proxy delegation failed
-158 = The job manager could not lock the state lock file
-
-1000 = Failed to start up callback handler
-1003 = Job contact not set

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/main/resources/service.properties 
b/modules/gfac/gfac-hadoop/src/main/resources/service.properties
deleted file mode 100644
index 391bfea..0000000
--- a/modules/gfac/gfac-hadoop/src/main/resources/service.properties
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-
-#
-# Class which implemented Scheduler interface. It will be used to determine a 
Provider
-#
-scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
-
-#
-# Data Service Plugins classes
-#
-datachain.classes= 
org.apache.airavata.core.gfac.extension.data.RegistryDataService
-
-#
-# Pre execution Plugins classes. For example, GridFTP Input Staging
-#
-prechain.classes= 
org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging 
-prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
-
-#
-# Post execution Plugins classes. For example, GridFTP Output Staging
-#
-postchain.classes= 
org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
-postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
-
-#
-# SSH private key location. It will be used by SSHProvider
-#
-# ssh.key=/home/user/.ssh/id_rsa
-# ssh.keypass=
-# ssh.username=usernameAtHost
-
-#
-# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
-#
-# myproxy.server=myproxy.teragrid.org
-# myproxy.user=username
-# myproxy.pass=password
-# myproxy.life=3600
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt 
b/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
deleted file mode 100644
index e749e9c..0000000
--- a/modules/gfac/gfac-hadoop/src/test/resources/PBSTemplate.xslt
+++ /dev/null
@@ -1,73 +0,0 @@
-<!--Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE file
-       distributed with this work for additional information regarding 
copyright ownership. The ASF licenses this file to you under
-       the Apache License, Version 2.0 (theÏ "License"); you may not use this 
file except in compliance with the License. You may
-       obtain a copy of the License at 
http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or 
agreed to
-       in writing, software distributed under the License is distributed on an 
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
-       ANY ~ KIND, either express or implied. See the License for the specific 
language governing permissions and limitations under
-       the License. -->
-<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform"; 
xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12";>
-<xsl:output method="text" />
-<xsl:template match="/ns:JobDescriptor">
-#! /bin/sh
-# PBS batch job script built by Globus job manager
-#   <xsl:choose>
-    <xsl:when test="ns:shellName">
-##PBS -S <xsl:value-of select="ns:shellName"/>
-    </xsl:when></xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:queueName">
-#PBS -q <xsl:value-of select="ns:queueName"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:mailOptions">
-#PBS -m <xsl:value-of select="ns:mailOptions"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-<xsl:when test="ns:acountString">
-#PBS -A <xsl:value-of select="ns:acountString"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:maxWallTime">
-#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -o <xsl:value-of select="ns:standardOutFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="ns:standardOutFile">
-#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
-    </xsl:when>
-    </xsl:choose>
-    <xsl:choose>
-    <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
-#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of 
select="ns:processesPerNode"/>
-<xsl:text>&#xa;</xsl:text>
-    </xsl:when>
-    </xsl:choose>
-<xsl:for-each select="ns:exports/ns:name">
-<xsl:value-of select="."/>=<xsl:value-of 
select="./@value"/><xsl:text>&#xa;</xsl:text>
-export<xsl:text>   </xsl:text><xsl:value-of select="."/>
-<xsl:text>&#xa;</xsl:text>
-</xsl:for-each>
-<xsl:for-each select="ns:preJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-cd <xsl:text>   </xsl:text><xsl:value-of 
select="ns:workingDirectory"/><xsl:text>&#xa;</xsl:text>
-    <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
-<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text>   
</xsl:text></xsl:when></xsl:choose><xsl:value-of 
select="ns:executablePath"/><xsl:text>   </xsl:text>
-<xsl:for-each select="ns:inputs/ns:input">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-    </xsl:for-each>
-<xsl:for-each select="ns:postJobCommands/ns:command">
-      <xsl:value-of select="."/><xsl:text>   </xsl:text>
-</xsl:for-each>
-
-</xsl:template>
-
-</xsl:stylesheet>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-hadoop/src/test/resources/logging.properties 
b/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
deleted file mode 100644
index 0584d38..0000000
--- a/modules/gfac/gfac-hadoop/src/test/resources/logging.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#default/fallback log4j configuration
-#
-
-# Set root logger level to WARN and its only appender to A1.
-log4j.rootLogger=INFO, A1, A2
-
-# A1 is set to be a rolling file appender with default params
-log4j.appender.A1=org.apache.log4j.RollingFileAppender
-log4j.appender.A1.File=target/seclogs.txt
-
-# A1 uses PatternLayout.
-log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
-
-# A2 is a console appender
-log4j.appender.A2=org.apache.log4j.ConsoleAppender
-
-# A2 uses PatternLayout.
-log4j.appender.A2.layout=org.apache.log4j.PatternLayout
-log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
-
-log4j.logger.unicore.security=INFO
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/gfac/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/pom.xml b/modules/gfac/pom.xml
index 9454f37..7f8737f 100644
--- a/modules/gfac/pom.xml
+++ b/modules/gfac/pom.xml
@@ -32,11 +32,8 @@
             </activation>
             <modules>
                 <module>gfac-core</module>
-                <module>gfac-ec2</module>
                 <module>gfac-ssh</module>
                 <module>gfac-local</module>
-                <!--<module>gfac-hadoop</module>-->
-                <!--<module>gfac-gram</module>-->
                 <module>gfac-gsissh</module>
                 <module>gfac-bes</module>
                 <module>gfac-monitor</module>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/workflow-model/workflow-engine/pom.xml
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-engine/pom.xml 
b/modules/workflow-model/workflow-engine/pom.xml
index 7de0b8d..66445ff 100644
--- a/modules/workflow-model/workflow-engine/pom.xml
+++ b/modules/workflow-model/workflow-engine/pom.xml
@@ -267,11 +267,11 @@
             <artifactId>airavata-gfac-core</artifactId>
             <version>${project.version}</version>
         </dependency>
-        <dependency>
+<!--        <dependency>
             <groupId>org.apache.airavata</groupId>
             <artifactId>airavata-gfac-ec2</artifactId>
             <version>${project.version}</version>
-        </dependency>
+        </dependency>-->
         <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-api</artifactId>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/xbaya-gui/pom.xml
----------------------------------------------------------------------
diff --git a/modules/xbaya-gui/pom.xml b/modules/xbaya-gui/pom.xml
index 9589084..1a39990 100644
--- a/modules/xbaya-gui/pom.xml
+++ b/modules/xbaya-gui/pom.xml
@@ -232,11 +232,11 @@
             <artifactId>airavata-gfac-core</artifactId>
             <version>${project.version}</version>
         </dependency>
-        <dependency>
+<!--        <dependency>
             <groupId>org.apache.airavata</groupId>
             <artifactId>airavata-gfac-ec2</artifactId>
             <version>${project.version}</version>
-        </dependency>
+        </dependency>-->
         <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-api</artifactId>

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
----------------------------------------------------------------------
diff --git 
a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
 
b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
index aab86a4..33012c5 100644
--- 
a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
+++ 
b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/XBayaConfiguration.java
@@ -31,7 +31,6 @@ import java.util.Observable;
 import java.util.Observer;
 
 import org.apache.airavata.api.Airavata.Client;
-import org.apache.airavata.gfac.ec2.AmazonSecurityContext;
 import org.apache.airavata.model.error.AiravataClientConnectException;
 import org.apache.airavata.schemas.wec.ContextHeaderDocument;
 import 
org.apache.airavata.workflow.model.component.registry.JCRComponentRegistry;
@@ -141,7 +140,6 @@ public class XBayaConfiguration extends Observable 
implements Observer {
 
     private boolean regURLSetByCMD = false;
 
-    private AmazonSecurityContext amazonSecurityContext = null;
 
     private ContextHeaderDocument.ContextHeader contextHeader;
 
@@ -330,22 +328,6 @@ public class XBayaConfiguration extends Observable 
implements Observer {
     }
 
     /**
-     * Get the AmazonSecurityContext needed for cloud job submission.
-     * @return AmazonSecurityContext
-     */
-    public AmazonSecurityContext getAmazonSecurityContext() {
-        return amazonSecurityContext;
-    }
-
-    /**
-     * Set the AmazonSecurityContext needed for cloud job submission.
-     * @param amazonSecurityContext amazon security context.
-     */
-    public void setAmazonSecurityContext(AmazonSecurityContext 
amazonSecurityContext) {
-        this.amazonSecurityContext = amazonSecurityContext;
-    }
-
-    /**
      * @return The DSC URL
      */
     public URI getDSCURL() {

http://git-wip-us.apache.org/repos/asf/airavata/blob/70239916/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
----------------------------------------------------------------------
diff --git 
a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
 
b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
index d67caad..53659fa 100644
--- 
a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
+++ 
b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/dialogs/amazon/ChangeCredentialWindow.java
@@ -21,11 +21,6 @@
 
 package org.apache.airavata.xbaya.ui.dialogs.amazon;
 
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import org.apache.airavata.gfac.ec2.EC2Provider;
-import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
 import org.apache.airavata.xbaya.XBayaEngine;
 import org.apache.airavata.xbaya.core.amazon.AmazonCredential;
 import org.apache.airavata.xbaya.ui.dialogs.XBayaDialog;
@@ -33,12 +28,12 @@ import org.apache.airavata.xbaya.ui.widgets.GridPanel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaLabel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaTextField;
 
-import javax.swing.*;
+import javax.swing.AbstractAction;
+import javax.swing.JButton;
+import javax.swing.JDialog;
+import javax.swing.JOptionPane;
+import javax.swing.JPanel;
 import java.awt.event.ActionEvent;
-import java.io.File;
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.security.spec.InvalidKeySpecException;
 
 public class ChangeCredentialWindow {
     private XBayaEngine engine;
@@ -109,7 +104,7 @@ public class ChangeCredentialWindow {
 
             @Override
             public void actionPerformed(ActionEvent e) {
-                String accessID = 
ChangeCredentialWindow.this.accessKeyIDTextField.getText();
+               /* String accessID = 
ChangeCredentialWindow.this.accessKeyIDTextField.getText();
                 if (!"".equals(accessID)) {
                     String secretID = 
ChangeCredentialWindow.this.secretAccessKeyTextField.getText();
 
@@ -152,7 +147,7 @@ public class ChangeCredentialWindow {
                         hide();
                         return;
                     }
-                }
+                }*/
 
                 JOptionPane.showMessageDialog(dialog.getDialog(),"SecretKey 
and AccessKey can not be empty!");
             }

Reply via email to