http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java deleted file mode 100644 index 0ef9406..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/OzoneContainer.java +++ /dev/null @@ -1,270 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ - -package org.apache.hadoop.ozone.container.ozoneimpl; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hdfs.server.datanode.StorageLocation; -import org.apache.hadoop.hdsl.protocol.DatanodeDetails; -import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.ozone.container.common.helpers.ContainerData; -import org.apache.hadoop.ozone.container.common.impl.ChunkManagerImpl; -import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl; -import org.apache.hadoop.ozone.container.common.impl.Dispatcher; -import org.apache.hadoop.ozone.container.common.impl.KeyManagerImpl; -import org.apache.hadoop.ozone.container.common.interfaces.ChunkManager; -import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher; -import org.apache.hadoop.ozone.container.common.interfaces.ContainerManager; -import org.apache.hadoop.ozone.container.common.interfaces.KeyManager; -import org.apache.hadoop.ozone.container.common.statemachine.background.BlockDeletingService; -import org.apache.hadoop.ozone.container.common.transport.server.XceiverServer; -import org.apache.hadoop.ozone.container.common.transport.server.ratis.XceiverServerRatis; - -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerSpi; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.nio.file.Paths; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; -import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ROOT_PREFIX; -import static org.apache.hadoop.ozone.OzoneConfigKeys - .OZONE_BLOCK_DELETING_SERVICE_INTERVAL; -import static org.apache.hadoop.ozone.OzoneConfigKeys - .OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT; -import static org.apache.hadoop.ozone.OzoneConsts.INVALID_PORT; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_TIMEOUT; -import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT; - -/** - * Ozone main class sets up the network server and initializes the container - * layer. - */ -public class OzoneContainer { - private static final Logger LOG = - LoggerFactory.getLogger(OzoneContainer.class); - - private final Configuration ozoneConfig; - private final ContainerDispatcher dispatcher; - private final ContainerManager manager; - private final XceiverServerSpi[] server; - private final ChunkManager chunkManager; - private final KeyManager keyManager; - private final BlockDeletingService blockDeletingService; - - /** - * Creates a network endpoint and enables Ozone container. - * - * @param ozoneConfig - Config - * @throws IOException - */ - public OzoneContainer( - DatanodeDetails datanodeDetails, Configuration ozoneConfig) - throws IOException { - this.ozoneConfig = ozoneConfig; - List<StorageLocation> locations = new LinkedList<>(); - String[] paths = ozoneConfig.getStrings( - OzoneConfigKeys.OZONE_METADATA_DIRS); - if (paths != null && paths.length > 0) { - for (String p : paths) { - locations.add(StorageLocation.parse( - Paths.get(p).resolve(CONTAINER_ROOT_PREFIX).toString())); - } - } else { - getDataDir(locations); - } - - manager = new ContainerManagerImpl(); - manager.init(this.ozoneConfig, locations, datanodeDetails); - this.chunkManager = new ChunkManagerImpl(manager); - manager.setChunkManager(this.chunkManager); - - this.keyManager = new KeyManagerImpl(manager, ozoneConfig); - manager.setKeyManager(this.keyManager); - - long svcInterval = - ozoneConfig.getTimeDuration(OZONE_BLOCK_DELETING_SERVICE_INTERVAL, - OZONE_BLOCK_DELETING_SERVICE_INTERVAL_DEFAULT, TimeUnit.MILLISECONDS); - long serviceTimeout = ozoneConfig.getTimeDuration( - OZONE_BLOCK_DELETING_SERVICE_TIMEOUT, - OZONE_BLOCK_DELETING_SERVICE_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS); - this.blockDeletingService = new BlockDeletingService(manager, - svcInterval, serviceTimeout, ozoneConfig); - - this.dispatcher = new Dispatcher(manager, this.ozoneConfig); - - server = new XceiverServerSpi[]{ - new XceiverServer(datanodeDetails, this.ozoneConfig, this.dispatcher), - XceiverServerRatis - .newXceiverServerRatis(datanodeDetails, this.ozoneConfig, dispatcher) - }; - } - - /** - * Starts serving requests to ozone container. - * - * @throws IOException - */ - public void start() throws IOException { - for (XceiverServerSpi serverinstance : server) { - serverinstance.start(); - } - blockDeletingService.start(); - dispatcher.init(); - } - - /** - * Stops the ozone container. - * <p> - * Shutdown logic is not very obvious from the following code. if you need to - * modify the logic, please keep these comments in mind. Here is the shutdown - * sequence. - * <p> - * 1. We shutdown the network ports. - * <p> - * 2. Now we need to wait for all requests in-flight to finish. - * <p> - * 3. The container manager lock is a read-write lock with "Fairness" - * enabled. - * <p> - * 4. This means that the waiting threads are served in a "first-come-first - * -served" manner. Please note that this applies to waiting threads only. - * <p> - * 5. Since write locks are exclusive, if we are waiting to get a lock it - * implies that we are waiting for in-flight operations to complete. - * <p> - * 6. if there are other write operations waiting on the reader-writer lock, - * fairness guarantees that they will proceed before the shutdown lock - * request. - * <p> - * 7. Since all operations either take a reader or writer lock of container - * manager, we are guaranteed that we are the last operation since we have - * closed the network port, and we wait until close is successful. - * <p> - * 8. We take the writer lock and call shutdown on each of the managers in - * reverse order. That is chunkManager, keyManager and containerManager is - * shutdown. - */ - public void stop() { - LOG.info("Attempting to stop container services."); - for(XceiverServerSpi serverinstance: server) { - serverinstance.stop(); - } - dispatcher.shutdown(); - - try { - this.manager.writeLock(); - this.chunkManager.shutdown(); - this.keyManager.shutdown(); - this.manager.shutdown(); - this.blockDeletingService.shutdown(); - LOG.info("container services shutdown complete."); - } catch (IOException ex) { - LOG.warn("container service shutdown error:", ex); - } finally { - this.manager.writeUnlock(); - } - } - - /** - * Returns a paths to data dirs. - * - * @param pathList - List of paths. - * @throws IOException - */ - private void getDataDir(List<StorageLocation> pathList) throws IOException { - for (String dir : ozoneConfig.getStrings(DFS_DATANODE_DATA_DIR_KEY)) { - StorageLocation location = StorageLocation.parse(dir); - pathList.add(location); - } - } - - /** - * Returns node report of container storage usage. - */ - public SCMNodeReport getNodeReport() throws IOException { - return this.manager.getNodeReport(); - } - - private int getPortbyType(HdslProtos.ReplicationType replicationType) { - for (XceiverServerSpi serverinstance : server) { - if (serverinstance.getServerType() == replicationType) { - return serverinstance.getIPCPort(); - } - } - return INVALID_PORT; - } - - /** - * Returns the container server IPC port. - * - * @return Container server IPC port. - */ - public int getContainerServerPort() { - return getPortbyType(HdslProtos.ReplicationType.STAND_ALONE); - } - - /** - * Returns the Ratis container Server IPC port. - * - * @return Ratis port. - */ - public int getRatisContainerServerPort() { - return getPortbyType(HdslProtos.ReplicationType.RATIS); - } - - /** - * Returns container report. - * @return - container report. - * @throws IOException - */ - public ContainerReportsRequestProto getContainerReport() throws IOException { - return this.manager.getContainerReport(); - } - -// TODO: remove getContainerReports - /** - * Returns the list of closed containers. - * @return - List of closed containers. - * @throws IOException - */ - public List<ContainerData> getContainerReports() throws IOException { - return this.manager.getContainerReports(); - } - - @VisibleForTesting - public ContainerManager getContainerManager() { - return this.manager; - } - - /** - * Get the container report state to send via HB to SCM. - * @return the container report state. - */ - public ReportState getContainerReportState() { - return this.manager.getContainerReportState(); - } -}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java deleted file mode 100644 index c99c038..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/package-info.java +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.container.ozoneimpl; -/** - Ozone main that calls into the container layer -**/ \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java deleted file mode 100644 index 1a51012..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.ozone; - -/** - * Generic ozone specific classes. - */ \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java deleted file mode 100644 index fbb8426..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java +++ /dev/null @@ -1,84 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ -package org.apache.hadoop.ozone.protocol; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; -import java.io.IOException; - -/** - * The protocol spoken between datanodes and SCM. For specifics please the - * Protoc file that defines this protocol. - */ -@InterfaceAudience.Private -public interface StorageContainerDatanodeProtocol { - /** - * Returns SCM version. - * @return Version info. - */ - SCMVersionResponseProto getVersion(SCMVersionRequestProto versionRequest) - throws IOException; - - /** - * Used by data node to send a Heartbeat. - * @param datanodeDetails - Datanode Details. - * @param nodeReport - node report state - * @param reportState - container report state. - * @return - SCMHeartbeatResponseProto - * @throws IOException - */ - SCMHeartbeatResponseProto sendHeartbeat(DatanodeDetailsProto datanodeDetails, - SCMNodeReport nodeReport, ReportState reportState) throws IOException; - - /** - * Register Datanode. - * @param datanodeDetails - Datanode Details. - * @param scmAddresses - List of SCMs this datanode is configured to - * communicate. - * @return SCM Command. - */ - SCMRegisteredCmdResponseProto register(DatanodeDetailsProto datanodeDetails, - String[] scmAddresses) throws IOException; - - /** - * Send a container report. - * @param reports -- Container report. - * @return container reports response. - * @throws IOException - */ - ContainerReportsResponseProto sendContainerReport( - ContainerReportsRequestProto reports) throws IOException; - - /** - * Used by datanode to send block deletion ACK to SCM. - * @param request block deletion transactions. - * @return block deletion transaction response. - * @throws IOException - */ - ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK( - ContainerBlocksDeletionACKProto request) throws IOException; -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java deleted file mode 100644 index fffbfd1..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerNodeProtocol.java +++ /dev/null @@ -1,68 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.protocol; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; -import org.apache.hadoop.ozone.protocol.commands.SCMCommand; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMNodeReport; - -import java.util.List; - -/** - * The protocol spoken between datanodes and SCM. - * - * Please note that the full protocol spoken between a datanode and SCM is - * separated into 2 interfaces. One interface that deals with node state and - * another interface that deals with containers. - * - * This interface has functions that deals with the state of datanode. - */ -@InterfaceAudience.Private -public interface StorageContainerNodeProtocol { - /** - * Gets the version info from SCM. - * @param versionRequest - version Request. - * @return - returns SCM version info and other required information needed - * by datanode. - */ - VersionResponse getVersion(SCMVersionRequestProto versionRequest); - - /** - * Register the node if the node finds that it is not registered with any SCM. - * @param datanodeDetails DatanodeDetails - * @return SCMHeartbeatResponseProto - */ - SCMCommand register(DatanodeDetailsProto datanodeDetails); - - /** - * Send heartbeat to indicate the datanode is alive and doing well. - * @param datanodeDetails - Datanode ID. - * @param nodeReport - node report. - * @param reportState - container report. - * @return SCMheartbeat response list - */ - List<SCMCommand> sendHeartbeat(DatanodeDetailsProto datanodeDetails, - SCMNodeReport nodeReport, ReportState reportState); - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java deleted file mode 100644 index c2dc2b3..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/VersionResponse.java +++ /dev/null @@ -1,150 +0,0 @@ - -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.ozone.protocol; - -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.KeyValue; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -/** - * Version response class. - */ -public class VersionResponse { - private final int version; - private final Map<String, String> values; - - /** - * Creates a version response class. - * @param version - * @param values - */ - public VersionResponse(int version, Map<String, String> values) { - this.version = version; - this.values = values; - } - - /** - * Creates a version Response class. - * @param version - */ - public VersionResponse(int version) { - this.version = version; - this.values = new HashMap<>(); - } - - /** - * Returns a new Builder. - * @return - Builder. - */ - public static Builder newBuilder() { - return new Builder(); - } - - /** - * Returns this class from protobuf message. - * @param response - SCMVersionResponseProto - * @return VersionResponse - */ - public static VersionResponse getFromProtobuf(SCMVersionResponseProto - response) { - return new VersionResponse(response.getSoftwareVersion(), - response.getKeysList().stream() - .collect(Collectors.toMap(KeyValue::getKey, - KeyValue::getValue))); - } - - /** - * Adds a value to version Response. - * @param key - String - * @param value - String - */ - public void put(String key, String value) { - if (this.values.containsKey(key)) { - throw new IllegalArgumentException("Duplicate key in version response"); - } - values.put(key, value); - } - - /** - * Return a protobuf message. - * @return SCMVersionResponseProto. - */ - public SCMVersionResponseProto getProtobufMessage() { - - List<KeyValue> list = new LinkedList<>(); - for (Map.Entry<String, String> entry : values.entrySet()) { - list.add(KeyValue.newBuilder().setKey(entry.getKey()). - setValue(entry.getValue()).build()); - } - return - SCMVersionResponseProto.newBuilder() - .setSoftwareVersion(this.version) - .addAllKeys(list).build(); - } - - /** - * Builder class. - */ - public static class Builder { - private int version; - private Map<String, String> values; - - Builder() { - values = new HashMap<>(); - } - - /** - * Sets the version. - * @param ver - version - * @return Builder - */ - public Builder setVersion(int ver) { - this.version = ver; - return this; - } - - /** - * Adds a value to version Response. - * @param key - String - * @param value - String - */ - public Builder addValue(String key, String value) { - if (this.values.containsKey(key)) { - throw new IllegalArgumentException("Duplicate key in version response"); - } - values.put(key, value); - return this; - } - - /** - * Builds the version response. - * @return VersionResponse. - */ - public VersionResponse build() { - return new VersionResponse(this.version, this.values); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java deleted file mode 100644 index f9571e9..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.protocol.commands; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCloseContainerCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; -import static org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType.closeContainerCommand; - -/** - * Asks datanode to close a container. - */ -public class CloseContainerCommand - extends SCMCommand<SCMCloseContainerCmdResponseProto> { - - private String containerName; - - public CloseContainerCommand(String containerName) { - this.containerName = containerName; - } - - /** - * Returns the type of this command. - * - * @return Type - */ - @Override - public SCMCmdType getType() { - return closeContainerCommand; - } - - /** - * Gets the protobuf message of this object. - * - * @return A protobuf message. - */ - @Override - public byte[] getProtoBufMessage() { - return getProto().toByteArray(); - } - - public SCMCloseContainerCmdResponseProto getProto() { - return SCMCloseContainerCmdResponseProto.newBuilder() - .setContainerName(containerName).build(); - } - - public static CloseContainerCommand getFromProtobuf( - SCMCloseContainerCmdResponseProto closeContainerProto) { - Preconditions.checkNotNull(closeContainerProto); - return new CloseContainerCommand(closeContainerProto.getContainerName()); - - } - - public String getContainerName() { - return containerName; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java deleted file mode 100644 index c581658..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.protocol.commands; - -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMDeleteBlocksCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMCmdType; - -import java.util.List; - -/** - * A SCM command asks a datanode to delete a number of blocks. - */ -public class DeleteBlocksCommand extends - SCMCommand<SCMDeleteBlocksCmdResponseProto> { - - private List<DeletedBlocksTransaction> blocksTobeDeleted; - - - public DeleteBlocksCommand(List<DeletedBlocksTransaction> blocks) { - this.blocksTobeDeleted = blocks; - } - - public List<DeletedBlocksTransaction> blocksTobeDeleted() { - return this.blocksTobeDeleted; - } - - @Override - public SCMCmdType getType() { - return SCMCmdType.deleteBlocksCommand; - } - - @Override - public byte[] getProtoBufMessage() { - return getProto().toByteArray(); - } - - public static DeleteBlocksCommand getFromProtobuf( - SCMDeleteBlocksCmdResponseProto deleteBlocksProto) { - return new DeleteBlocksCommand(deleteBlocksProto - .getDeletedBlocksTransactionsList()); - } - - public SCMDeleteBlocksCmdResponseProto getProto() { - return SCMDeleteBlocksCmdResponseProto.newBuilder() - .addAllDeletedBlocksTransactions(blocksTobeDeleted).build(); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java deleted file mode 100644 index c4db6f7..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RegisteredCommand.java +++ /dev/null @@ -1,177 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.protocol.commands; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto - .ErrorCode; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; - -/** - * Response to Datanode Register call. - */ -public class RegisteredCommand extends - SCMCommand<SCMRegisteredCmdResponseProto> { - private String datanodeUUID; - private String clusterID; - private ErrorCode error; - - public RegisteredCommand(final ErrorCode error, final String datanodeUUID, - final String clusterID) { - this.datanodeUUID = datanodeUUID; - this.clusterID = clusterID; - this.error = error; - } - - /** - * Returns a new builder. - * - * @return - Builder - */ - public static Builder newBuilder() { - return new Builder(); - } - - /** - * Returns the type of this command. - * - * @return Type - */ - @Override - public SCMCmdType getType() { - return SCMCmdType.registeredCommand; - } - - /** - * Returns datanode UUID. - * - * @return - Datanode ID. - */ - public String getDatanodeUUID() { - return datanodeUUID; - } - - /** - * Returns cluster ID. - * - * @return -- ClusterID - */ - public String getClusterID() { - return clusterID; - } - - /** - * Returns ErrorCode. - * - * @return - ErrorCode - */ - public ErrorCode getError() { - return error; - } - - /** - * Gets the protobuf message of this object. - * - * @return A protobuf message. - */ - @Override - public byte[] getProtoBufMessage() { - return SCMRegisteredCmdResponseProto.newBuilder() - .setClusterID(this.clusterID) - .setDatanodeUUID(this.datanodeUUID) - .setErrorCode(this.error) - .build().toByteArray(); - } - - /** - * A builder class to verify all values are sane. - */ - public static class Builder { - private String datanodeUUID; - private String clusterID; - private ErrorCode error; - - /** - * sets UUID. - * - * @param dnUUID - datanode UUID - * @return Builder - */ - public Builder setDatanodeUUID(String dnUUID) { - this.datanodeUUID = dnUUID; - return this; - } - - /** - * Create this object from a Protobuf message. - * - * @param response - RegisteredCmdResponseProto - * @return RegisteredCommand - */ - public RegisteredCommand getFromProtobuf(SCMRegisteredCmdResponseProto - response) { - Preconditions.checkNotNull(response); - return new RegisteredCommand(response.getErrorCode(), - response.hasDatanodeUUID() ? response.getDatanodeUUID(): "", - response.hasClusterID() ? response.getClusterID(): ""); - } - - /** - * Sets cluster ID. - * - * @param cluster - clusterID - * @return Builder - */ - public Builder setClusterID(String cluster) { - this.clusterID = cluster; - return this; - } - - /** - * Sets Error code. - * - * @param errorCode - error code - * @return Builder - */ - public Builder setErrorCode(ErrorCode errorCode) { - this.error = errorCode; - return this; - } - - /** - * Build the command object. - * - * @return RegisteredCommand - */ - public RegisteredCommand build() { - if ((this.error == ErrorCode.success) && - (this.datanodeUUID == null || this.datanodeUUID.isEmpty()) || - (this.clusterID == null || this.clusterID.isEmpty())) { - throw new IllegalArgumentException("On success, RegisteredCommand " + - "needs datanodeUUID and ClusterID."); - } - - return new - RegisteredCommand(this.error, this.datanodeUUID, this.clusterID); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java deleted file mode 100644 index 0c55a90..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.protocol.commands; - -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; -import static org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMReregisterCmdResponseProto; -import static org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType.reregisterCommand; - -/** - * Informs a datanode to register itself with SCM again. - */ -public class ReregisterCommand extends - SCMCommand<SCMReregisterCmdResponseProto>{ - - /** - * Returns the type of this command. - * - * @return Type - */ - @Override - public SCMCmdType getType() { - return reregisterCommand; - } - - /** - * Gets the protobuf message of this object. - * - * @return A protobuf message. - */ - @Override - public byte[] getProtoBufMessage() { - return getProto().toByteArray(); - } - - public SCMReregisterCmdResponseProto getProto() { - return SCMReregisterCmdResponseProto - .newBuilder() - .build(); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java deleted file mode 100644 index e79a157..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SCMCommand.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.protocol.commands; - -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; -import com.google.protobuf.GeneratedMessage; - -/** - * A class that acts as the base class to convert between Java and SCM - * commands in protobuf format. - * @param <T> - */ -public abstract class SCMCommand<T extends GeneratedMessage> { - /** - * Returns the type of this command. - * @return Type - */ - public abstract SCMCmdType getType(); - - /** - * Gets the protobuf message of this object. - * @return A protobuf message. - */ - public abstract byte[] getProtoBufMessage(); -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java deleted file mode 100644 index fa2245b..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SendContainerCommand.java +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.ozone.protocol.commands; - -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SendContainerReportProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMCmdType; - -/** - * Allows a Datanode to send in the container report. - */ -public class SendContainerCommand extends SCMCommand<SendContainerReportProto> { - /** - * Returns a NullCommand class from NullCommandResponse Proto. - * @param unused - unused - * @return NullCommand - */ - public static SendContainerCommand getFromProtobuf( - final SendContainerReportProto unused) { - return new SendContainerCommand(); - } - - /** - * returns a new builder. - * @return Builder - */ - public static SendContainerCommand.Builder newBuilder() { - return new SendContainerCommand.Builder(); - } - - /** - * Returns the type of this command. - * - * @return Type - */ - @Override - public SCMCmdType getType() { - return SCMCmdType.sendContainerReport; - } - - /** - * Gets the protobuf message of this object. - * - * @return A protobuf message. - */ - @Override - public byte[] getProtoBufMessage() { - return SendContainerReportProto.newBuilder().build().toByteArray(); - } - - /** - * A Builder class this is the standard pattern we are using for all commands. - */ - public static class Builder { - /** - * Return a null command. - * @return - NullCommand. - */ - public SendContainerCommand build() { - return new SendContainerCommand(); - } - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java deleted file mode 100644 index 7083c1b..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/package-info.java +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.protocol.commands; -/** - Set of classes that help in protoc conversions. - **/ http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java deleted file mode 100644 index 03854a0..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocol/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.ozone.protocol; - -/** - * This package contains classes for HDSL protocol definitions. - */ http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java deleted file mode 100644 index 4abd8a6..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolClientSideTranslatorPB.java +++ /dev/null @@ -1,197 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ -package org.apache.hadoop.ozone.protocolPB; - -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; -import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto; -import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtocolTranslator; -import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.ReportState; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMNodeReport; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMRegisterRequestProto; -import org.apache.hadoop.hdsl.protocol.proto - .StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; - -import java.io.Closeable; -import java.io.IOException; - -/** - * This class is the client-side translator to translate the requests made on - * the {@link StorageContainerDatanodeProtocol} interface to the RPC server - * implementing {@link StorageContainerDatanodeProtocolPB}. - */ -public class StorageContainerDatanodeProtocolClientSideTranslatorPB - implements StorageContainerDatanodeProtocol, ProtocolTranslator, Closeable { - - /** - * RpcController is not used and hence is set to null. - */ - private static final RpcController NULL_RPC_CONTROLLER = null; - private final StorageContainerDatanodeProtocolPB rpcProxy; - - /** - * Constructs a Client side interface that calls into SCM datanode protocol. - * - * @param rpcProxy - Proxy for RPC. - */ - public StorageContainerDatanodeProtocolClientSideTranslatorPB( - StorageContainerDatanodeProtocolPB rpcProxy) { - this.rpcProxy = rpcProxy; - } - - /** - * Closes this stream and releases any system resources associated with it. If - * the stream is already closed then invoking this method has no effect. - * <p> - * <p> As noted in {@link AutoCloseable#close()}, cases where the close may - * fail require careful attention. It is strongly advised to relinquish the - * underlying resources and to internally <em>mark</em> the {@code Closeable} - * as closed, prior to throwing the {@code IOException}. - * - * @throws IOException if an I/O error occurs - */ - @Override - public void close() throws IOException { - RPC.stopProxy(rpcProxy); - } - - /** - * Return the proxy object underlying this protocol translator. - * - * @return the proxy object underlying this protocol translator. - */ - @Override - public Object getUnderlyingProxyObject() { - return rpcProxy; - } - - /** - * Returns SCM version. - * - * @param unused - set to null and unused. - * @return Version info. - */ - @Override - public SCMVersionResponseProto getVersion(SCMVersionRequestProto - unused) throws IOException { - SCMVersionRequestProto request = - SCMVersionRequestProto.newBuilder().build(); - final SCMVersionResponseProto response; - try { - response = rpcProxy.getVersion(NULL_RPC_CONTROLLER, request); - } catch (ServiceException ex) { - throw ProtobufHelper.getRemoteException(ex); - } - return response; - } - - /** - * Send by datanode to SCM. - * - * @param datanodeDetailsProto - Datanode Details - * @param nodeReport - node report - * @throws IOException - */ - - @Override - public SCMHeartbeatResponseProto sendHeartbeat( - DatanodeDetailsProto datanodeDetailsProto, - SCMNodeReport nodeReport, ReportState reportState) throws IOException { - SCMHeartbeatRequestProto.Builder req = SCMHeartbeatRequestProto - .newBuilder(); - req.setDatanodeDetails(datanodeDetailsProto); - req.setNodeReport(nodeReport); - req.setContainerReportState(reportState); - final SCMHeartbeatResponseProto resp; - try { - resp = rpcProxy.sendHeartbeat(NULL_RPC_CONTROLLER, req.build()); - } catch (ServiceException e) { - throw ProtobufHelper.getRemoteException(e); - } - return resp; - } - - /** - * Register Datanode. - * - * @param datanodeDetailsProto - Datanode Details - * @return SCM Command. - */ - @Override - public SCMRegisteredCmdResponseProto register( - DatanodeDetailsProto datanodeDetailsProto, - String[] scmAddresses) throws IOException { - SCMRegisterRequestProto.Builder req = - SCMRegisterRequestProto.newBuilder(); - req.setDatanodeDetails(datanodeDetailsProto); - final SCMRegisteredCmdResponseProto response; - try { - response = rpcProxy.register(NULL_RPC_CONTROLLER, req.build()); - } catch (ServiceException e) { - throw ProtobufHelper.getRemoteException(e); - } - return response; - } - - /** - * Send a container report. - * - * @param reports -- Container report - * @return HeartbeatRespose.nullcommand. - * @throws IOException - */ - @Override - public ContainerReportsResponseProto sendContainerReport( - ContainerReportsRequestProto reports) throws IOException { - final ContainerReportsResponseProto resp; - try { - resp = rpcProxy.sendContainerReport(NULL_RPC_CONTROLLER, reports); - } catch (ServiceException e) { - throw ProtobufHelper.getRemoteException(e); - } - return resp; - } - - @Override - public ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK( - ContainerBlocksDeletionACKProto deletedBlocks) throws IOException { - final ContainerBlocksDeletionACKResponseProto resp; - try { - resp = rpcProxy.sendContainerBlocksDeletionACK(NULL_RPC_CONTROLLER, - deletedBlocks); - } catch (ServiceException e) { - throw ProtobufHelper.getRemoteException(e); - } - return resp; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java deleted file mode 100644 index 677a364..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * <p/> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p/> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ -package org.apache.hadoop.ozone.protocolPB; - -import org.apache.hadoop.ipc.ProtocolInfo; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.StorageContainerDatanodeProtocolService; - -/** - * Protocol used from a datanode to StorageContainerManager. This extends - * the Protocol Buffers service interface to add Hadoop-specific annotations. - */ - -@ProtocolInfo(protocolName = - "org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol", - protocolVersion = 1) -public interface StorageContainerDatanodeProtocolPB extends - StorageContainerDatanodeProtocolService.BlockingInterface { -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java deleted file mode 100644 index cd2fb59..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolServerSideTranslatorPB.java +++ /dev/null @@ -1,111 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ -package org.apache.hadoop.ozone.protocolPB; - -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; -import org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatRequestProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMHeartbeatResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKResponseProto; -import org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto; - -import java.io.IOException; - -/** - * This class is the server-side translator that forwards requests received on - * {@link StorageContainerDatanodeProtocolPB} to the {@link - * StorageContainerDatanodeProtocol} server implementation. - */ -public class StorageContainerDatanodeProtocolServerSideTranslatorPB - implements StorageContainerDatanodeProtocolPB { - - private final StorageContainerDatanodeProtocol impl; - - public StorageContainerDatanodeProtocolServerSideTranslatorPB( - StorageContainerDatanodeProtocol impl) { - this.impl = impl; - } - - @Override - public StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto - getVersion(RpcController controller, - StorageContainerDatanodeProtocolProtos.SCMVersionRequestProto request) - throws ServiceException { - try { - return impl.getVersion(request); - } catch (IOException e) { - throw new ServiceException(e); - } - } - - @Override - public StorageContainerDatanodeProtocolProtos.SCMRegisteredCmdResponseProto - register(RpcController controller, StorageContainerDatanodeProtocolProtos - .SCMRegisterRequestProto request) throws ServiceException { - String[] addressArray = null; - - if (request.hasAddressList()) { - addressArray = request.getAddressList().getAddressListList() - .toArray(new String[0]); - } - - try { - return impl.register(request.getDatanodeDetails(), addressArray); - } catch (IOException e) { - throw new ServiceException(e); - } - } - - @Override - public SCMHeartbeatResponseProto - sendHeartbeat(RpcController controller, - SCMHeartbeatRequestProto request) throws ServiceException { - try { - return impl.sendHeartbeat(request.getDatanodeDetails(), - request.getNodeReport(), - request.getContainerReportState()); - } catch (IOException e) { - throw new ServiceException(e); - } - } - - @Override - public ContainerReportsResponseProto sendContainerReport( - RpcController controller, ContainerReportsRequestProto request) - throws ServiceException { - try { - return impl.sendContainerReport(request); - } catch (IOException e) { - throw new ServiceException(e); - } - } - - @Override - public ContainerBlocksDeletionACKResponseProto sendContainerBlocksDeletionACK( - RpcController controller, ContainerBlocksDeletionACKProto request) - throws ServiceException { - try { - return impl.sendContainerBlocksDeletionACK(request); - } catch (IOException e) { - throw new ServiceException(e); - } - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java deleted file mode 100644 index 21123eb..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/HdslServerUtil.java +++ /dev/null @@ -1,304 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ - -package org.apache.hadoop.ozone.scm; - -import com.google.common.base.Optional; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.apache.hadoop.scm.ScmConfigKeys; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.InetSocketAddress; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.apache.hadoop.hdsl.HdslUtils.*; -import static org.apache.hadoop.hdsl.server.ServerUtils.sanitizeUserArgs; -import static org.apache.hadoop.scm.ScmConfigKeys.*; - -/** - * Hdsl stateless helper functions for server side components. - */ -public class HdslServerUtil { - - private static final Logger LOG = LoggerFactory.getLogger( - HdslServerUtil.class); - - /** - * Retrieve the socket address that should be used by DataNodes to connect - * to the SCM. - * - * @param conf - * @return Target InetSocketAddress for the SCM service endpoint. - */ - public static InetSocketAddress getScmAddressForDataNodes( - Configuration conf) { - // We try the following settings in decreasing priority to retrieve the - // target host. - // - OZONE_SCM_DATANODE_ADDRESS_KEY - // - OZONE_SCM_CLIENT_ADDRESS_KEY - // - final Optional<String> host = getHostNameFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY, - ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY); - - if (!host.isPresent()) { - throw new IllegalArgumentException( - ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY + - " must be defined. See" + - " https://wiki.apache.org/hadoop/Ozone#Configuration " - + "for details on configuring Ozone."); - } - - // If no port number is specified then we'll just try the defaultBindPort. - final Optional<Integer> port = getPortNumberFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY); - - InetSocketAddress addr = NetUtils.createSocketAddr(host.get() + ":" + - port.or(ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT)); - - return addr; - } - - /** - * Retrieve the socket address that should be used by clients to connect - * to the SCM. - * - * @param conf - * @return Target InetSocketAddress for the SCM client endpoint. - */ - public static InetSocketAddress getScmClientBindAddress( - Configuration conf) { - final Optional<String> host = getHostNameFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY); - - final Optional<Integer> port = getPortNumberFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY); - - return NetUtils.createSocketAddr( - host.or(ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_DEFAULT) + ":" + - port.or(ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT)); - } - - /** - * Retrieve the socket address that should be used by clients to connect - * to the SCM Block service. - * - * @param conf - * @return Target InetSocketAddress for the SCM block client endpoint. - */ - public static InetSocketAddress getScmBlockClientBindAddress( - Configuration conf) { - final Optional<String> host = getHostNameFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_BIND_HOST_KEY); - - final Optional<Integer> port = getPortNumberFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_ADDRESS_KEY); - - return NetUtils.createSocketAddr( - host.or(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_BIND_HOST_DEFAULT) + - ":" + port.or(ScmConfigKeys.OZONE_SCM_BLOCK_CLIENT_PORT_DEFAULT)); - } - - /** - * Retrieve the socket address that should be used by DataNodes to connect - * to the SCM. - * - * @param conf - * @return Target InetSocketAddress for the SCM service endpoint. - */ - public static InetSocketAddress getScmDataNodeBindAddress( - Configuration conf) { - final Optional<String> host = getHostNameFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_KEY); - - // If no port number is specified then we'll just try the defaultBindPort. - final Optional<Integer> port = getPortNumberFromConfigKeys(conf, - ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY); - - return NetUtils.createSocketAddr( - host.or(ScmConfigKeys.OZONE_SCM_DATANODE_BIND_HOST_DEFAULT) + ":" + - port.or(ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT)); - } - - - /** - * Returns the interval in which the heartbeat processor thread runs. - * - * @param conf - Configuration - * @return long in Milliseconds. - */ - public static long getScmheartbeatCheckerInterval(Configuration conf) { - return conf.getTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, - ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL_DEFAULT, - TimeUnit.MILLISECONDS); - } - - /** - * Heartbeat Interval - Defines the heartbeat frequency from a datanode to - * SCM. - * - * @param conf - Ozone Config - * @return - HB interval in seconds. - */ - public static long getScmHeartbeatInterval(Configuration conf) { - return conf.getTimeDuration(OZONE_SCM_HEARTBEAT_INTERVAL, - ScmConfigKeys.OZONE_SCM_HEARBEAT_INTERVAL_DEFAULT, - TimeUnit.SECONDS); - } - - /** - * Get the Stale Node interval, which is used by SCM to flag a datanode as - * stale, if the heartbeat from that node has been missing for this duration. - * - * @param conf - Configuration. - * @return - Long, Milliseconds to wait before flagging a node as stale. - */ - public static long getStaleNodeInterval(Configuration conf) { - - long staleNodeIntervalMs = - conf.getTimeDuration(OZONE_SCM_STALENODE_INTERVAL, - OZONE_SCM_STALENODE_INTERVAL_DEFAULT, TimeUnit.MILLISECONDS); - - long heartbeatThreadFrequencyMs = getScmheartbeatCheckerInterval(conf); - - long heartbeatIntervalMs = getScmHeartbeatInterval(conf) * 1000; - - - // Make sure that StaleNodeInterval is configured way above the frequency - // at which we run the heartbeat thread. - // - // Here we check that staleNodeInterval is at least five times more than the - // frequency at which the accounting thread is going to run. - try { - sanitizeUserArgs(staleNodeIntervalMs, heartbeatThreadFrequencyMs, - 5, 1000); - } catch (IllegalArgumentException ex) { - LOG.error("Stale Node Interval is cannot be honored due to " + - "mis-configured {}. ex: {}", - OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, ex); - throw ex; - } - - // Make sure that stale node value is greater than configured value that - // datanodes are going to send HBs. - try { - sanitizeUserArgs(staleNodeIntervalMs, heartbeatIntervalMs, 3, 1000); - } catch (IllegalArgumentException ex) { - LOG.error("Stale Node Interval MS is cannot be honored due to " + - "mis-configured {}. ex: {}", OZONE_SCM_HEARTBEAT_INTERVAL, ex); - throw ex; - } - return staleNodeIntervalMs; - } - - /** - * Gets the interval for dead node flagging. This has to be a value that is - * greater than stale node value, and by transitive relation we also know - * that this value is greater than heartbeat interval and heartbeatProcess - * Interval. - * - * @param conf - Configuration. - * @return - the interval for dead node flagging. - */ - public static long getDeadNodeInterval(Configuration conf) { - long staleNodeIntervalMs = getStaleNodeInterval(conf); - long deadNodeIntervalMs = conf.getTimeDuration(OZONE_SCM_DEADNODE_INTERVAL, - OZONE_SCM_DEADNODE_INTERVAL_DEFAULT, - TimeUnit.MILLISECONDS); - - try { - // Make sure that dead nodes Ms is at least twice the time for staleNodes - // with a max of 1000 times the staleNodes. - sanitizeUserArgs(deadNodeIntervalMs, staleNodeIntervalMs, 2, 1000); - } catch (IllegalArgumentException ex) { - LOG.error("Dead Node Interval MS is cannot be honored due to " + - "mis-configured {}. ex: {}", OZONE_SCM_STALENODE_INTERVAL, ex); - throw ex; - } - return deadNodeIntervalMs; - } - - /** - * Returns the maximum number of heartbeat to process per loop of the process - * thread. - * @param conf Configuration - * @return - int -- Number of HBs to process - */ - public static int getMaxHBToProcessPerLoop(Configuration conf) { - return conf.getInt(ScmConfigKeys.OZONE_SCM_MAX_HB_COUNT_TO_PROCESS, - ScmConfigKeys.OZONE_SCM_MAX_HB_COUNT_TO_PROCESS_DEFAULT); - } - - /** - * Timeout value for the RPC from Datanode to SCM, primarily used for - * Heartbeats and container reports. - * - * @param conf - Ozone Config - * @return - Rpc timeout in Milliseconds. - */ - public static long getScmRpcTimeOutInMilliseconds(Configuration conf) { - return conf.getTimeDuration(OZONE_SCM_HEARTBEAT_RPC_TIMEOUT, - OZONE_SCM_HEARTBEAT_RPC_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS); - } - - /** - * Log Warn interval. - * - * @param conf - Ozone Config - * @return - Log warn interval. - */ - public static int getLogWarnInterval(Configuration conf) { - return conf.getInt(OZONE_SCM_HEARTBEAT_LOG_WARN_INTERVAL_COUNT, - OZONE_SCM_HEARTBEAT_LOG_WARN_DEFAULT); - } - - /** - * returns the Container port. - * @param conf - Conf - * @return port number. - */ - public static int getContainerPort(Configuration conf) { - return conf.getInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, - OzoneConfigKeys.DFS_CONTAINER_IPC_PORT_DEFAULT); - } - - - /** - * Return the list of service addresses for the Ozone SCM. This method is used - * by the DataNodes to determine the service instances to connect to. - * - * @param conf - * @return list of SCM service addresses. - */ - public static Map<String, ? extends Map<String, InetSocketAddress>> - getScmServiceRpcAddresses(Configuration conf) { - - final Map<String, InetSocketAddress> serviceInstances = new HashMap<>(); - serviceInstances.put(OZONE_SCM_SERVICE_INSTANCE_ID, - getScmAddressForDataNodes(conf)); - - final Map<String, Map<String, InetSocketAddress>> services = - new HashMap<>(); - services.put(OZONE_SCM_SERVICE_ID, serviceInstances); - return services; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/651a05a1/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/VersionInfo.java ---------------------------------------------------------------------- diff --git a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/VersionInfo.java b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/VersionInfo.java deleted file mode 100644 index 6bb3a22..0000000 --- a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/scm/VersionInfo.java +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.ozone.scm; - -/** - * This is a class that tracks versions of SCM. - */ -public final class VersionInfo { - - // We will just be normal and use positive counting numbers for versions. - private final static VersionInfo[] VERSION_INFOS = - {new VersionInfo("First version of SCM", 1)}; - - - public static final String DESCRIPTION_KEY = "Description"; - private final String description; - private final int version; - - /** - * Never created outside this class. - * - * @param description -- description - * @param version -- version number - */ - private VersionInfo(String description, int version) { - this.description = description; - this.version = version; - } - - /** - * Returns all versions. - * - * @return Version info array. - */ - public static VersionInfo[] getAllVersions() { - return VERSION_INFOS.clone(); - } - - /** - * Returns the latest version. - * - * @return versionInfo - */ - public static VersionInfo getLatestVersion() { - return VERSION_INFOS[VERSION_INFOS.length - 1]; - } - - /** - * Return description. - * - * @return String - */ - public String getDescription() { - return description; - } - - /** - * Return the version. - * - * @return int. - */ - public int getVersion() { - return version; - } - -} --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org