Author: aching
Date: Tue Apr 10 06:06:55 2012
New Revision: 1311583

URL: http://svn.apache.org/viewvc?rev=1311583&view=rev
Log:
GIRAPH-168: Simplify munge directive usage with new munge flag
HADOOP_SECURE (rather than HADOOP_FACEBOOK) and remove usage of
HADOOP (ekoontz via aching).

Modified:
    incubator/giraph/trunk/CHANGELOG
    incubator/giraph/trunk/README
    incubator/giraph/trunk/pom.xml
    
incubator/giraph/trunk/src/main/java/org/apache/giraph/bsp/ImmutableOutputCommitter.java
    
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/BasicRPCCommunications.java
    
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/CommunicationsInterface.java
    
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/RPCCommunications.java
    incubator/giraph/trunk/src/test/java/org/apache/giraph/TestBspBasic.java

Modified: incubator/giraph/trunk/CHANGELOG
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/CHANGELOG?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- incubator/giraph/trunk/CHANGELOG (original)
+++ incubator/giraph/trunk/CHANGELOG Tue Apr 10 06:06:55 2012
@@ -2,6 +2,10 @@ Giraph Change Log
 
 Release 0.2.0 - unreleased
 
+  GIRAPH-168: Simplify munge directive usage with new munge flag
+  HADOOP_SECURE (rather than HADOOP_FACEBOOK) and remove usage of
+  HADOOP (ekoontz via aching).
+
   GIRAPH-85: Simplify return expression in 
   RPCCommunications::getRPCProxy (Eli Reisman via jghoman)
 

Modified: incubator/giraph/trunk/README
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/README?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- incubator/giraph/trunk/README (original)
+++ incubator/giraph/trunk/README Tue Apr 10 06:06:55 2012
@@ -35,17 +35,37 @@ automatically take over if the current a
 Hadoop versions for use with Giraph:
 
 Secure Hadoop versions:
-- Apache Hadoop 0.20.203, 0.20.204, other secure versions may work as well
--- Other versions reported working include:
----  Cloudera CDH3u0, CDH3u1
+
+- Apache Hadoop 0.20.203.0
+
+  This is the default version used by Giraph: if you do not specify a
+profile with the -P flag, maven will use this version. You may also
+explicitly specify it with "mvn -Phadoop_0.20.203 <goals>".
+
+-Apache Hadoop 0.23.1
+
+  You may tell maven to use this version with "mvn -Phadoop_0.23 <goals>".
+
+-Apache Hadoop 3.0.0-SNAPSHOT
+
+  You may tell maven to use this version with "mvn -Phadoop_trunk <goals>".
 
 Unsecure Hadoop versions:
+
 - Apache Hadoop 0.20.1, 0.20.2, 0.20.3
 
-Facebook Hadoop release (https://github.com/facebook/hadoop-20-warehouse):
-- GitHub master
+  You may tell maven to use 0.20.2 with "mvn -Phadoop_non_secure <goals>".
+
+- Facebook Hadoop releases: https://github.com/facebook/hadoop-20, Master 
branch
+
+  You may tell maven to use this version with:
+
+mvn -Phadoop_facebook 
-Dhadoop.jar.path=/path/to/repo/build/hadoop-0.20.1-dev-core.jar <goals>
+
+-- Other versions reported working include:
+---  Cloudera CDH3u0, CDH3u1
 
-While we provide support for the unsecure and Facebook versions of Hadoop
+While we provide support for unsecure and Facebook versions of Hadoop
 with the maven profiles 'hadoop_non_secure' and 'hadoop_facebook',
 respectively, we have been primarily focusing on secure Hadoop releases
 at this time.
@@ -67,14 +87,15 @@ Use the maven commands with secure Hadoo
 - test (i.e. mvn test)
 
 For the non-secure versions of Hadoop, run the maven commands with the
-additional argument '-Dhadoop=non_secure' to enable the maven profile
-'hadoop_non_secure'.  An example compilation command is
-'mvn -Dhadoop=non_secure compile'.
+additional argument '-Dhadoop=non_secure' or '-Phadoop_non_secure' to enable
+ the maven profile 'hadoop_non_secure'.  Example compilation commands are
+'mvn -Dhadoop=non_secure compile' and 'mvn -Phadoop_non_secure compile'.
 
 For the Facebook Hadoop release, run the maven commands with the
-additional arguments '-Dhadoop=facebook' to enable the maven profile
-'hadoop_facebook' as well as a location for the hadoop core jar file.  An
-example compilation command is 'mvn -Dhadoop=facebook
+additional arguments '-Dhadoop=facebook' or '-Phadoop_facebook' to enable 
+the maven profile 'hadoop_facebook' as well as a location for the hadoop
+core jar file.  Example compilation commands are 'mvn -Dhadoop=facebook
+-Dhadoop.jar.path=/tmp/hadoop-0.20.1-core.jar compile' or 'mvn 
-Phadoop_facebook 
 -Dhadoop.jar.path=/tmp/hadoop-0.20.1-core.jar compile'.
 
 

Modified: incubator/giraph/trunk/pom.xml
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/pom.xml?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- incubator/giraph/trunk/pom.xml (original)
+++ incubator/giraph/trunk/pom.xml Tue Apr 10 06:06:55 2012
@@ -455,6 +455,42 @@ under the License.
 
   <profiles>
     <profile>
+      <id>hadoop_0.20.203</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop.version}</version>
+         <scope>provided</scope>
+        </dependency>
+      </dependencies>
+
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.sonatype.plugins</groupId>
+            <artifactId>munge-maven-plugin</artifactId>
+            <version>${munge-maven-plugin.version}</version>
+            <executions>
+              <execution>
+                <id>munge</id>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>munge</goal>
+                </goals>
+              </execution>
+            </executions>
+            <configuration>
+              
<symbols>HADOOP_NON_SASL_RPC,HADOOP_NON_INTERVERSIONED_RPC</symbols>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+    <profile>
       <id>hadoop_non_secure</id>
        <activation>
         <property>
@@ -465,15 +501,15 @@ under the License.
       <properties>
         <hadoop.version>0.20.2</hadoop.version>
       </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop.version}</version>
+         <scope>provided</scope>
+        </dependency>
+      </dependencies>
       <build>
-        <resources>
-          <resource>
-            <directory>src/main/java/org/apache/giraph/hadoop</directory>
-            <excludes>
-              <exclude>BspTokenSelector.java</exclude>
-            </excludes>
-          </resource>
-        </resources>
         <plugins>
           <plugin>
             <groupId>org.sonatype.plugins</groupId>
@@ -489,7 +525,7 @@ under the License.
               </execution>
             </executions>
             <configuration>
-              <symbols>HADOOP_NON_SECURE</symbols>
+              
<symbols>HADOOP_NON_SECURE,HADOOP_NON_SASL_RPC,HADOOP_NON_INTERVERSIONED_RPC</symbols>
             </configuration>
           </plugin>
           <plugin>
@@ -502,22 +538,8 @@ under the License.
               </excludes>
               <source>${compileSource}</source>
               <target>${compileSource}</target>
-              <showWarnings>true</showWarnings>
             </configuration>
           </plugin>
-         <plugin>
-           <groupId>org.apache.maven.plugins</groupId>
-           <artifactId>maven-surefire-plugin</artifactId>
-           <version>2.6</version>
-           <configuration>
-             <systemProperties>
-               <property>
-                 <name>prop.jarLocation</name>
-                 
<value>target/munged/giraph-${project.version}-jar-with-dependencies.jar</value>
-               </property>
-             </systemProperties>
-           </configuration>
-         </plugin>
         </plugins>
       </build>
     </profile>
@@ -538,6 +560,18 @@ under the License.
           <scope>system</scope>
           <systemPath>${hadoop.jar.path}</systemPath>
         </dependency>
+        <dependency>
+          <groupId>commons-logging</groupId>
+          <artifactId>commons-logging-api</artifactId>
+          <version>1.0.4</version>
+         <scope>runtime</scope>
+        </dependency>
+        <dependency>
+          <groupId>commons-httpclient</groupId>
+          <artifactId>commons-httpclient</artifactId>
+          <version>3.0.1</version>
+         <scope>runtime</scope>
+        </dependency>
       </dependencies>
       <build>
         <resources>
@@ -550,6 +584,18 @@ under the License.
         </resources>
         <plugins>
           <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-compiler-plugin</artifactId>
+            <version>${maven-compiler-plugin.version}</version>
+            <configuration>
+              <excludes>
+                <exclude>**/BspTokenSelector.java</exclude>
+              </excludes>
+              <source>${compileSource}</source>
+              <target>${compileSource}</target>
+            </configuration>
+          </plugin>
+          <plugin>
             <groupId>org.sonatype.plugins</groupId>
             <artifactId>munge-maven-plugin</artifactId>
             <version>${munge-maven-plugin.version}</version>
@@ -563,24 +609,107 @@ under the License.
               </execution>
             </executions>
             <configuration>
-              <symbols>HADOOP_FACEBOOK</symbols>
-            </configuration>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-            <version>${maven-compiler-plugin.version}</version>
-            <configuration>
-              <excludes>
-                <exclude>**/BspTokenSelector.java</exclude>
-              </excludes>
-              <source>${compileSource}</source>
-              <target>${compileSource}</target>
+              <symbols>HADOOP_NON_SECURE,HADOOP_NON_SASL_RPC</symbols>
             </configuration>
           </plugin>
+         <plugin>
+           <groupId>org.apache.maven.plugins</groupId>
+           <artifactId>maven-surefire-plugin</artifactId>
+           <version>2.6</version>
+           <configuration>
+             <systemProperties>
+               <property>
+                 <name>prop.jarLocation</name>
+                 
<value>../target/giraph-${project.version}-jar-with-dependencies.jar</value>
+               </property>
+             </systemProperties>
+           </configuration>
+         </plugin>
         </plugins>
       </build>
     </profile>
+
+    <profile>
+      <id>hadoop_0.23</id>
+       <activation>
+        <property>
+          <name>hadoop</name>
+          <value>0.23</value>
+        </property>
+      </activation>
+      <properties>
+        <hadoop.version>0.23.1</hadoop.version>
+      </properties>
+      <dependencies>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-common</artifactId>
+         <version>${hadoop.version}</version>
+         <scope>provided</scope>
+       </dependency>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-mapreduce-client-core</artifactId>
+         <version>${hadoop.version}</version>
+         <scope>provided</scope>
+       </dependency>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-mapreduce-client-common</artifactId>
+         <version>${hadoop.version}</version>
+       </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hadoop_trunk</id>
+       <activation>
+        <property>
+          <name>hadoop</name>
+          <value>trunk</value>
+        </property>
+      </activation>
+      <properties>
+        <hadoop.version>3.0.0-SNAPSHOT</hadoop.version>
+      </properties>
+      <dependencies>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-common</artifactId>
+         <version>${hadoop.version}</version>
+         <scope>provided</scope>
+       </dependency>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-mapreduce-client-core</artifactId>
+         <version>${hadoop.version}</version>
+         <scope>provided</scope>
+       </dependency>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-mapreduce-client-common</artifactId>
+         <version>${hadoop.version}</version>
+         <scope>provided</scope>
+       </dependency>
+       <dependency>
+         <groupId>org.apache.hadoop</groupId>
+         <artifactId>hadoop-auth</artifactId>
+         <version>${hadoop.version}</version>
+         <scope>provided</scope>
+       </dependency>
+       <dependency>
+         <groupId>commons-configuration</groupId>
+         <artifactId>commons-configuration</artifactId>
+         <version>1.6</version>
+         <scope>runtime</scope>
+       </dependency>
+        <dependency>
+          <groupId>commons-httpclient</groupId>
+          <artifactId>commons-httpclient</artifactId>
+          <version>3.0.1</version>
+         <scope>runtime</scope>
+        </dependency>
+     </dependencies>
+    </profile>
   </profiles>
 
   <dependencies>
@@ -591,12 +720,6 @@ under the License.
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-core</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-core-asl</artifactId>
       <version>${jackson.version}</version>
@@ -661,5 +784,5 @@ under the License.
       <version>1.8.5</version>
       <scope>test</scope>
     </dependency>
-  </dependencies>
+ </dependencies>
 </project>

Modified: 
incubator/giraph/trunk/src/main/java/org/apache/giraph/bsp/ImmutableOutputCommitter.java
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/src/main/java/org/apache/giraph/bsp/ImmutableOutputCommitter.java?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- 
incubator/giraph/trunk/src/main/java/org/apache/giraph/bsp/ImmutableOutputCommitter.java
 (original)
+++ 
incubator/giraph/trunk/src/main/java/org/apache/giraph/bsp/ImmutableOutputCommitter.java
 Tue Apr 10 06:06:55 2012
@@ -52,12 +52,11 @@ public class ImmutableOutputCommitter ex
   public void setupTask(TaskAttemptContext context) throws IOException {
   }
 
-  /*if[HADOOP_NON_SECURE]
-    @Override
-    public void cleanupJob(JobContext jobContext)  throws IOException {
-    }
-    else[HADOOP_NON_SECURE]*/
   @Override
+  /*if[HADOOP_NON_SECURE]
+  public void cleanupJob(JobContext jobContext) throws IOException {
+  }
+  else[HADOOP_NON_SECURE]*/
   /*end[HADOOP_NON_SECURE]*/
   public void commitJob(JobContext jobContext) throws IOException {
   }

Modified: 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/BasicRPCCommunications.java
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/BasicRPCCommunications.java?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/BasicRPCCommunications.java
 (original)
+++ 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/BasicRPCCommunications.java
 Tue Apr 10 06:06:55 2012
@@ -61,9 +61,10 @@ import org.apache.giraph.utils.MemoryUti
 
 import com.google.common.collect.Iterables;
 
-/*if[HADOOP_FACEBOOK]
+/*if[HADOOP_NON_INTERVERSIONED_RPC]
+else[HADOOP_NON_INTERVERSIONED_RPC]*/
 import org.apache.hadoop.ipc.ProtocolSignature;
-end[HADOOP_FACEBOOK]*/
+/*end[HADOOP_NON_INTERVERSIONED_RPC]*/
 
 /**
  * Basic RPC communications object that implements the lower level operations
@@ -693,14 +694,24 @@ public abstract class BasicRPCCommunicat
     return VERSION_ID;
   }
 
-  /*if[HADOOP_FACEBOOK]
-    public ProtocolSignature getProtocolSignature(
-            String protocol,
-            long clientVersion,
-            int clientMethodsHash) throws IOException {
-        return new ProtocolSignature(versionID, null);
-    }
-end[HADOOP_FACEBOOK]*/
+  /*if[HADOOP_NON_INTERVERSIONED_RPC]
+  else[HADOOP_NON_INTERVERSIONED_RPC]*/
+  /**
+   * Get the Protocol Signature for the given protocol,
+   * client version and method.
+   *
+   * @param protocol Protocol.
+   * @param clientVersion Version of Client.
+   * @param clientMethodsHash Hash of Client methods.
+   * @return ProtocolSignature for input parameters.
+   */
+  public ProtocolSignature getProtocolSignature(
+    String protocol,
+    long clientVersion,
+    int clientMethodsHash) throws IOException {
+    return new ProtocolSignature(VERSION_ID, null);
+  }
+  /*end[HADOOP_NON_INTERVERSIONED_RPC]*/
 
   @Override
   public void closeConnections() throws IOException {

Modified: 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/CommunicationsInterface.java
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/CommunicationsInterface.java?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/CommunicationsInterface.java
 (original)
+++ 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/CommunicationsInterface.java
 Tue Apr 10 06:06:55 2012
@@ -22,11 +22,11 @@ import java.io.IOException;
 
 import org.apache.giraph.graph.Edge;
 import org.apache.giraph.graph.BasicVertex;
-/*if_not[HADOOP]
- else[HADOOP]*/
+/*if[HADOOP_NON_SECURE]
+ else[HADOOP_NON_SECURE]*/
 import org.apache.giraph.hadoop.BspTokenSelector;
 import org.apache.hadoop.security.token.TokenInfo;
-/*end[HADOOP]*/
+/*end[HADOOP_NON_SECURE]*/
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.ipc.VersionedProtocol;
@@ -40,10 +40,10 @@ import org.apache.hadoop.ipc.VersionedPr
  * @param <M> Message data
  */
 @SuppressWarnings("rawtypes")
-/*if_not[HADOOP]
- else[HADOOP]*/
+/*if[HADOOP_NON_SECURE]
+ else[HADOOP_NON_SECURE]*/
 @TokenInfo(BspTokenSelector.class)
-/*end[HADOOP]*/
+/*end[HADOOP_NON_SECURE]*/
 public interface CommunicationsInterface<I extends WritableComparable,
     V extends Writable, E extends Writable, M extends Writable>
     extends VersionedProtocol {

Modified: 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/RPCCommunications.java
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/RPCCommunications.java?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/RPCCommunications.java
 (original)
+++ 
incubator/giraph/trunk/src/main/java/org/apache/giraph/comm/RPCCommunications.java
 Tue Apr 10 06:06:55 2012
@@ -22,9 +22,10 @@ import java.io.IOException;
 
 import java.net.InetSocketAddress;
 
-/*if_not[HADOOP]
-else[HADOOP]*/
+/*if[HADOOP_NON_SECURE]
+else[HADOOP_NON_SECURE]*/
 import java.security.PrivilegedExceptionAction;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
 import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
@@ -32,15 +33,21 @@ import org.apache.hadoop.security.Creden
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.apache.hadoop.security.token.Token;
-/*end[HADOOP]*/
+/*end[HADOOP_NON_SECURE]*/
 
 import org.apache.log4j.Logger;
 
 import org.apache.giraph.bsp.CentralizedServiceWorker;
 import org.apache.giraph.graph.GraphState;
+/*if[HADOOP_NON_SECURE]
+else[HADOOP_NON_SECURE]*/
 import org.apache.giraph.hadoop.BspPolicyProvider;
+/*end[HADOOP_NON_SECURE]*/
 import org.apache.hadoop.conf.Configuration;
+/*if[HADOOP_NON_SECURE]
+else[HADOOP_NON_SECURE]*/
 import org.apache.hadoop.io.Text;
+/*end[HADOOP_NON_SECURE]*/
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.ipc.RPC;
@@ -58,11 +65,11 @@ import org.apache.hadoop.mapreduce.Mappe
 @SuppressWarnings("rawtypes")
 public class RPCCommunications<I extends WritableComparable,
     V extends Writable, E extends Writable, M extends Writable>
-  /*if_not[HADOOP]
+  /*if[HADOOP_NON_SASL_RPC]
     extends BasicRPCCommunications<I, V, E, M, Object> {
-    else[HADOOP]*/
+    else[HADOOP_NON_SASL_RPC]*/
     extends BasicRPCCommunications<I, V, E, M, Token<JobTokenIdentifier>> {
-  /*end[HADOOP]*/
+  /*end[HADOOP_NON_SASL_RPC]*/
 
   /** Class logger */
   public static final Logger LOG = Logger.getLogger(RPCCommunications.class);
@@ -84,27 +91,30 @@ public class RPCCommunications<I extends
     super(context, service);
   }
 
-  /*if_not[HADOOP]
-    protected Object createJobToken() throws IOException {
-        return null;
-    }
-    else[HADOOP]*/
   /**
-   * Create the job token.
-   *
-   * @return Job token.
-   */
-  protected Token<JobTokenIdentifier> createJobToken() throws IOException {
+    * Create the job token.
+    *
+    * @return Job token.
+    */
+  protected
+  /*if[HADOOP_NON_SECURE]
+  Object createJobToken() throws IOException {
+  else[HADOOP_NON_SECURE]*/
+  Token<JobTokenIdentifier> createJobToken() throws IOException {
+  /*end[HADOOP_NON_SECURE]*/
+  /*if[HADOOP_NON_SECURE]
+  else[HADOOP_NON_SECURE]*/
     String localJobTokenFile = System.getenv().get(
         UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
     if (localJobTokenFile != null) {
+      JobConf jobConf = new JobConf(conf);
       Credentials credentials =
-          TokenCache.loadTokens(localJobTokenFile, conf);
+          TokenCache.loadTokens(localJobTokenFile, jobConf);
       return TokenCache.getJobToken(credentials);
     }
+  /*end[HADOOP_NON_SECURE]*/
     return null;
   }
-  /*end[HADOOP]*/
 
   /**
    * Get the RPC server.
@@ -115,23 +125,16 @@ public class RPCCommunications<I extends
    * @param jt Jobtoken indentifier.
    * @return RPC server.
    */
+  @Override
   protected Server getRPCServer(
       InetSocketAddress myAddress, int numHandlers, String jobId,
-      /*if_not[HADOOP]
-            Object jt) throws IOException {
-        return RPC.getServer(this, myAddress.getHostName(), 
myAddress.getPort(),
-            numHandlers, false, conf);
-    }
-      else[HADOOP]*/
+      /*if[HADOOP_NON_SASL_RPC]
+      Object jt) throws IOException {
+    return RPC.getServer(this, myAddress.getHostName(), myAddress.getPort(),
+        numHandlers, false, conf);
+    else[HADOOP_NON_SASL_RPC]*/
       Token<JobTokenIdentifier> jt) throws IOException {
     @SuppressWarnings("deprecation")
-    String hadoopSecurityAuthorization =
-      ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG;
-    if (conf.getBoolean(
-        hadoopSecurityAuthorization,
-        false)) {
-      ServiceAuthorizationManager.refresh(conf, new BspPolicyProvider());
-    }
     JobTokenSecretManager jobTokenSecretManager =
         new JobTokenSecretManager();
     if (jt != null) { //could be null in the case of some unit tests
@@ -140,10 +143,18 @@ public class RPCCommunications<I extends
         LOG.info("getRPCServer: Added jobToken " + jt);
       }
     }
-    return RPC.getServer(this, myAddress.getHostName(), myAddress.getPort(),
-        numHandlers, false, conf, jobTokenSecretManager);
+    Server server = RPC.getServer(RPCCommunications.class, this,
+      myAddress.getHostName(), myAddress.getPort(),
+      numHandlers, false, conf, jobTokenSecretManager);
+    String hadoopSecurityAuthorization =
+      ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG;
+    if (conf.getBoolean(hadoopSecurityAuthorization, false)) {
+      server.refreshServiceAcl(conf, new BspPolicyProvider());
+    }
+    return server;
+    /*end[HADOOP_NON_SASL_RPC]*/
   }
-  /*end[HADOOP]*/
+
 
   /**
    * Get the RPC proxy.
@@ -157,17 +168,17 @@ public class RPCCommunications<I extends
   protected CommunicationsInterface<I, V, E, M> getRPCProxy(
     final InetSocketAddress addr,
     String jobId,
-    /*if_not[HADOOP]
+    /*if[HADOOP_NON_SASL_RPC]
     Object jt)
-      else[HADOOP]*/
+      else[HADOOP_NON_SASL_RPC]*/
     Token<JobTokenIdentifier> jt)
-    /*end[HADOOP]*/
+    /*end[HADOOP_NON_SASL_RPC]*/
     throws IOException, InterruptedException {
     final Configuration config = new Configuration(conf);
-    /*if_not[HADOOP]
+    /*if[HADOOP_NON_SASL_RPC]
         return (CommunicationsInterface<I, V, E, M>)RPC.getProxy(
                  CommunicationsInterface.class, VERSION_ID, addr, config);
-      else[HADOOP]*/
+      else[HADOOP_NON_SASL_RPC]*/
     if (jt == null) {
       return (CommunicationsInterface<I, V, E, M>) RPC.getProxy(
           CommunicationsInterface.class, VERSION_ID, addr, config);
@@ -190,6 +201,6 @@ public class RPCCommunications<I extends
             CommunicationsInterface.class, VERSION_ID, addr, config);
         }
       });
-    /*end[HADOOP]*/
+    /*end[HADOOP_NON_SASL_RPC]*/
   }
 }

Modified: 
incubator/giraph/trunk/src/test/java/org/apache/giraph/TestBspBasic.java
URL: 
http://svn.apache.org/viewvc/incubator/giraph/trunk/src/test/java/org/apache/giraph/TestBspBasic.java?rev=1311583&r1=1311582&r2=1311583&view=diff
==============================================================================
--- incubator/giraph/trunk/src/test/java/org/apache/giraph/TestBspBasic.java 
(original)
+++ incubator/giraph/trunk/src/test/java/org/apache/giraph/TestBspBasic.java 
Tue Apr 10 06:06:55 2012
@@ -52,7 +52,10 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobID;
-
+/*if[HADOOP_NON_SASL_RPC]
+else[HADOOP_NON_SASL_RPC]*/
+import org.apache.hadoop.mapreduce.task.JobContextImpl;
+/*end[HADOOP_NON_SASL_RPC]*/
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -112,9 +115,15 @@ public class TestBspBasic extends BspCas
         ", graphState" + gs);
     VertexInputFormat<LongWritable, IntWritable, FloatWritable, IntWritable>
     inputFormat = BspUtils.createVertexInputFormat(job.getConfiguration());
-    List<InputSplit> splitArray =
-        inputFormat.getSplits(
-            new JobContext(new Configuration(), new JobID()), 1);
+    /*if[HADOOP_NON_SASL_RPC]
+      List<InputSplit> splitArray =
+          inputFormat.getSplits(
+              new JobContext(new Configuration(), new JobID()), 1);
+    else[HADOOP_NON_SASL_RPC]*/
+      List<InputSplit> splitArray =
+          inputFormat.getSplits(
+              new JobContextImpl(new Configuration(), new JobID()), 1);
+      /*end[HADOOP_NON_SASL_RPC]*/
     ByteArrayOutputStream byteArrayOutputStream =
         new ByteArrayOutputStream();
     DataOutputStream outputStream =


Reply via email to