HDFS-12384. Fixing compilation issue with BanDuplicateClasses. Contributed by Inigo Goiri.
(cherry picked from commit fabe02c8fafa807198054da0c02b2ebaafda76aa) (cherry picked from commit cc58e7a983d8f1351089462f531993f7b4f0a9c5) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c778f9dd Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c778f9dd Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c778f9dd Branch: refs/heads/branch-2 Commit: c778f9dd3739829333679f29bdf0147823af4aea Parents: b71a753 Author: Inigo Goiri <inigo...@apache.org> Authored: Thu Sep 7 13:53:08 2017 -0700 Committer: vrushali <vrush...@apache.org> Committed: Fri Oct 20 11:22:32 2017 -0700 ---------------------------------------------------------------------- hadoop-hdfs-project/hadoop-hdfs/pom.xml | 4 ---- .../server/federation/router/RouterRpcServer.java | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/c778f9dd/hadoop-hdfs-project/hadoop-hdfs/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index 64ad6fb..8ae3db8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -222,10 +222,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> </dependency> <dependency> <groupId>org.apache.curator</groupId> - <artifactId>curator-framework</artifactId> - </dependency> - <dependency> - <groupId>org.apache.curator</groupId> <artifactId>curator-test</artifactId> <scope>test</scope> </dependency> http://git-wip-us.apache.org/repos/asf/hadoop/blob/c778f9dd/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java index c77d255..f9b4a5d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java @@ -81,6 +81,7 @@ import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; +import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -91,6 +92,7 @@ import org.apache.hadoop.hdfs.protocol.OpenFileEntry; import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; +import org.apache.hadoop.hdfs.protocol.ZoneReencryptionStatus; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB; @@ -1607,6 +1609,19 @@ public class RouterRpcServer extends AbstractService implements ClientProtocol { } @Override // ClientProtocol + public void reencryptEncryptionZone(String zone, ReencryptAction action) + throws IOException { + checkOperation(OperationCategory.WRITE, false); + } + + @Override // ClientProtocol + public BatchedEntries<ZoneReencryptionStatus> listReencryptionStatus( + long prevId) throws IOException { + checkOperation(OperationCategory.READ, false); + return null; + } + + @Override // ClientProtocol public void setXAttr(String src, XAttr xAttr, EnumSet<XAttrSetFlag> flag) throws IOException { checkOperation(OperationCategory.WRITE); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org