[ https://issues.apache.org/jira/browse/HDFS-15877?focusedWorklogId=598085&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-598085 ]
ASF GitHub Bot logged work on HDFS-15877: ----------------------------------------- Author: ASF GitHub Bot Created on: 17/May/21 18:30 Start Date: 17/May/21 18:30 Worklog Time Spent: 10m Work Description: hadoop-yetus commented on pull request #2747: URL: https://github.com/apache/hadoop/pull/2747#issuecomment-842490494 :confetti_ball: **+1 overall** | Vote | Subsystem | Runtime | Logfile | Comment | |:----:|----------:|--------:|:--------:|:-------:| | +0 :ok: | reexec | 0m 37s | | Docker mode activated. | |||| _ Prechecks _ | | +1 :green_heart: | dupname | 0m 0s | | No case conflicting files found. | | +0 :ok: | codespell | 0m 1s | | codespell was not available. | | +1 :green_heart: | @author | 0m 0s | | The patch does not contain any @author tags. | | +1 :green_heart: | test4tests | 0m 0s | | The patch appears to include 1 new or modified test files. | |||| _ trunk Compile Tests _ | | +1 :green_heart: | mvninstall | 37m 2s | | trunk passed | | +1 :green_heart: | compile | 1m 32s | | trunk passed with JDK Ubuntu-11.0.10+9-Ubuntu-0ubuntu1.20.04 | | +1 :green_heart: | compile | 1m 24s | | trunk passed with JDK Private Build-1.8.0_282-8u282-b08-0ubuntu1~20.04-b08 | | +1 :green_heart: | checkstyle | 1m 6s | | trunk passed | | +1 :green_heart: | mvnsite | 1m 32s | | trunk passed | | +1 :green_heart: | javadoc | 0m 57s | | trunk passed with JDK Ubuntu-11.0.10+9-Ubuntu-0ubuntu1.20.04 | | +1 :green_heart: | javadoc | 1m 30s | | trunk passed with JDK Private Build-1.8.0_282-8u282-b08-0ubuntu1~20.04-b08 | | +1 :green_heart: | spotbugs | 3m 39s | | trunk passed | | +1 :green_heart: | shadedclient | 18m 34s | | branch has no errors when building and testing our client artifacts. | |||| _ Patch Compile Tests _ | | +1 :green_heart: | mvninstall | 1m 21s | | the patch passed | | +1 :green_heart: | compile | 1m 26s | | the patch passed with JDK Ubuntu-11.0.10+9-Ubuntu-0ubuntu1.20.04 | | +1 :green_heart: | javac | 1m 26s | | the patch passed | | +1 :green_heart: | compile | 1m 13s | | the patch passed with JDK Private Build-1.8.0_282-8u282-b08-0ubuntu1~20.04-b08 | | +1 :green_heart: | javac | 1m 13s | | the patch passed | | +1 :green_heart: | blanks | 0m 0s | | The patch has no blanks issues. | | +1 :green_heart: | checkstyle | 1m 0s | | the patch passed | | +1 :green_heart: | mvnsite | 1m 22s | | the patch passed | | +1 :green_heart: | javadoc | 0m 51s | | the patch passed with JDK Ubuntu-11.0.10+9-Ubuntu-0ubuntu1.20.04 | | +1 :green_heart: | javadoc | 1m 22s | | the patch passed with JDK Private Build-1.8.0_282-8u282-b08-0ubuntu1~20.04-b08 | | +1 :green_heart: | spotbugs | 3m 38s | | the patch passed | | +1 :green_heart: | shadedclient | 18m 42s | | patch has no errors when building and testing our client artifacts. | |||| _ Other Tests _ | | +1 :green_heart: | unit | 230m 32s | | hadoop-hdfs in the patch passed. | | +1 :green_heart: | asflicense | 0m 44s | | The patch does not generate ASF License warnings. | | | | 327m 7s | | | | Subsystem | Report/Notes | |----------:|:-------------| | Docker | ClientAPI=1.41 ServerAPI=1.41 base: https://ci-hadoop.apache.org/job/hadoop-multibranch/job/PR-2747/3/artifact/out/Dockerfile | | GITHUB PR | https://github.com/apache/hadoop/pull/2747 | | Optional Tests | dupname asflicense compile javac javadoc mvninstall mvnsite unit shadedclient spotbugs checkstyle codespell | | uname | Linux 01da3e889f77 4.15.0-60-generic #67-Ubuntu SMP Thu Aug 22 16:55:30 UTC 2019 x86_64 x86_64 x86_64 GNU/Linux | | Build tool | maven | | Personality | dev-support/bin/hadoop.sh | | git revision | trunk / 5f18053ba0061442c19be76a08b71190a1cf8225 | | Default Java | Private Build-1.8.0_282-8u282-b08-0ubuntu1~20.04-b08 | | Multi-JDK versions | /usr/lib/jvm/java-11-openjdk-amd64:Ubuntu-11.0.10+9-Ubuntu-0ubuntu1.20.04 /usr/lib/jvm/java-8-openjdk-amd64:Private Build-1.8.0_282-8u282-b08-0ubuntu1~20.04-b08 | | Test Results | https://ci-hadoop.apache.org/job/hadoop-multibranch/job/PR-2747/3/testReport/ | | Max. process+thread count | 3783 (vs. ulimit of 5500) | | modules | C: hadoop-hdfs-project/hadoop-hdfs U: hadoop-hdfs-project/hadoop-hdfs | | Console output | https://ci-hadoop.apache.org/job/hadoop-multibranch/job/PR-2747/3/console | | versions | git=2.25.1 maven=3.6.3 spotbugs=4.2.2 | | Powered by | Apache Yetus 0.14.0-SNAPSHOT https://yetus.apache.org | This message was automatically generated. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org Issue Time Tracking ------------------- Worklog Id: (was: 598085) Time Spent: 2h 20m (was: 2h 10m) > BlockReconstructionWork should resetTargets() before > BlockManager#validateReconstructionWork return false > --------------------------------------------------------------------------------------------------------- > > Key: HDFS-15877 > URL: https://issues.apache.org/jira/browse/HDFS-15877 > Project: Hadoop HDFS > Issue Type: Improvement > Reporter: Haiyang Hu > Assignee: Haiyang Hu > Priority: Minor > Labels: pull-request-available > Time Spent: 2h 20m > Remaining Estimate: 0h > > BlockReconstructionWork should resetTargets() before > BlockManager#validateReconstructionWork return false > the related code is in BlockManager.java > {code:java} > // code placeholder > private boolean validateReconstructionWork(BlockReconstructionWork rw) { > BlockInfo block = rw.getBlock(); > int priority = rw.getPriority(); > ... > if (block.isDeleted() || !block.isCompleteOrCommitted()) { > neededReconstruction.remove(block, priority); > rw.resetTargets(); > return false; > } > // do not schedule more if enough replicas is already pending > ... > if (hasEnoughEffectiveReplicas(block, numReplicas, pendingNum)) { > neededReconstruction.remove(block, priority); > rw.resetTargets(); > blockLog.debug("BLOCK* Removing {} from neededReconstruction as" + > " it has enough replicas", block); > return false; > } > DatanodeStorageInfo[] targets = rw.getTargets(); > BlockPlacementStatus placementStatus = getBlockPlacementStatus(block); > if ((numReplicas.liveReplicas() >= requiredRedundancy) && > (!placementStatus.isPlacementPolicySatisfied())) { > BlockPlacementStatus newPlacementStatus = > getBlockPlacementStatus(block, targets); > if (!newPlacementStatus.isPlacementPolicySatisfied() && > (newPlacementStatus.getAdditionalReplicasRequired() >= > placementStatus.getAdditionalReplicasRequired())) { > ... > //Here to add rw.resetTargets(); > return false; > } > > rw.setNotEnoughRack(); > } > ... > return true; > } > {code} > -- This message was sent by Atlassian Jira (v8.3.4#803005) --------------------------------------------------------------------- To unsubscribe, e-mail: hdfs-issues-unsubscr...@hadoop.apache.org For additional commands, e-mail: hdfs-issues-h...@hadoop.apache.org