timmylicheng commented on a change in pull request #28: HDDS-1569 Support 
creating multiple pipelines with same datanode
URL: https://github.com/apache/hadoop-ozone/pull/28#discussion_r335846893
 
 

 ##########
 File path: 
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java
 ##########
 @@ -147,10 +152,33 @@ private void initializePipelineState() throws 
IOException {
     }
   }
 
+  private boolean exceedPipelineNumberLimit(ReplicationFactor factor) {
+    if (heavyNodeCriteria > 0 && factor == ReplicationFactor.THREE) {
+      return (stateManager.getPipelines(ReplicationType.RATIS, factor).size() -
+          stateManager.getPipelines(ReplicationType.RATIS, factor,
+              Pipeline.PipelineState.CLOSED).size()) >= heavyNodeCriteria *
+          nodeManager.getNodeCount(HddsProtos.NodeState.HEALTHY);
+    }
+
+    if (pipelineNumberLimit > 0) {
+      return (stateManager.getPipelines(ReplicationType.RATIS).size() -
+          stateManager.getPipelines(ReplicationType.RATIS,
+              Pipeline.PipelineState.CLOSED).size()) >= pipelineNumberLimit;
+    }
+
+    return false;
+  }
+
   @Override
   public synchronized Pipeline createPipeline(
       ReplicationType type, ReplicationFactor factor) throws IOException {
     lock.writeLock().lock();
+    if (type == ReplicationType.RATIS && exceedPipelineNumberLimit(factor)) {
+      lock.writeLock().unlock();
+      throw new SCMException("Pipeline number meets the limit: " +
+          pipelineNumberLimit,
+          SCMException.ResultCodes.FAILED_TO_FIND_HEALTHY_NODES);
 
 Review comment:
   Updated.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-dev-unsubscr...@hadoop.apache.org
For additional commands, e-mail: hdfs-dev-h...@hadoop.apache.org

Reply via email to