hadoop git commit: HDFS-12529. Get source for config tags from file name. Contributed by Ajay Kumar.
Repository: hadoop Updated Branches: refs/heads/trunk ac05a51bb -> 0889e5a8b HDFS-12529. Get source for config tags from file name. Contributed by Ajay Kumar. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0889e5a8 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0889e5a8 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0889e5a8 Branch: refs/heads/trunk Commit: 0889e5a8b7102ca1b64af6806537ad99c2018dfd Parents: ac05a51 Author: Anu EngineerAuthored: Mon Sep 25 13:55:09 2017 -0700 Committer: Anu Engineer Committed: Mon Sep 25 13:55:09 2017 -0700 -- .../org/apache/hadoop/conf/Configuration.java | 52 +++- .../apache/hadoop/conf/TestConfiguration.java | 11 - 2 files changed, 39 insertions(+), 24 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/0889e5a8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index a339dac..2890853 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -81,6 +81,7 @@ import javax.xml.transform.stream.StreamResult; import com.google.common.base.Charsets; import org.apache.commons.collections.map.UnmodifiableMap; +import org.apache.commons.io.FilenameUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -2811,6 +2812,7 @@ public class Configuration implements Iterable >, String confName = null; String confValue = null; String confInclude = null; + String confTag = null; boolean confFinal = false; boolean fallbackAllowed = false; boolean fallbackEntered = false; @@ -2825,6 +2827,7 @@ public class Configuration implements Iterable >, confName = null; confValue = null; confFinal = false; +confTag = null; confSource.clear(); // First test for short format configuration @@ -2843,9 +2846,8 @@ public class Configuration implements Iterable >, confSource.add(StringInterner.weakIntern( reader.getAttributeValue(i))); } else if ("tag".equals(propertyAttr)) { -//Read tags and put them in propertyTagsMap -readTagFromConfig(reader.getAttributeValue(i), confName, -confValue, confSource); +confTag = StringInterner +.weakIntern(reader.getAttributeValue(i)); } } break; @@ -2937,9 +2939,7 @@ public class Configuration implements Iterable >, break; case "tag": if (token.length() > 0) { - //Read tags and put them in propertyTagsMap - readTagFromConfig(token.toString(), confName, - confValue, confSource); + confTag = StringInterner.weakIntern(token.toString()); } break; case "include": @@ -2956,6 +2956,11 @@ public class Configuration implements Iterable >, break; } confSource.add(name); +//Read tags and put them in propertyTagsMap +if (confTag != null) { + readTagFromConfig(confTag, confName, confValue, confSource); +} + DeprecatedKeyInfo keyInfo = deprecations.getDeprecatedKeyMap().get(confName); if (keyInfo != null) { @@ -3001,21 +3006,24 @@ public class Configuration implements Iterable >, if (confSource.size() > 0) { for (String source : confSource) { PropertyTag tag1 = this.getPropertyTag(tagStr, -source.split("-")[0]); -if (propertyTagsMap.containsKey(tag1)) { - propertyTagsMap.get(tag1) - .setProperty(confName, confValue); -} else { - Properties props = new Properties(); - props.setProperty(confName, confValue); - propertyTagsMap.put(tag1, props); +FilenameUtils.getName(source).split("-")[0]); +if (tag1 !=
hadoop git commit: HADOOP-14901. ReuseObjectMapper in Hadoop Common. Contributed by Hanisha Koneru.
Repository: hadoop Updated Branches: refs/heads/branch-2 09be0acae -> 17b17aed0 HADOOP-14901. ReuseObjectMapper in Hadoop Common. Contributed by Hanisha Koneru. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/17b17aed Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/17b17aed Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/17b17aed Branch: refs/heads/branch-2 Commit: 17b17aed0f136c6a17341bf45f56e8119366781b Parents: 09be0ac Author: Anu EngineerAuthored: Mon Sep 25 13:44:51 2017 -0700 Committer: Anu Engineer Committed: Mon Sep 25 13:44:51 2017 -0700 -- .../hadoop/crypto/key/kms/KMSClientProvider.java | 7 +-- .../java/org/apache/hadoop/ipc/DecayRpcScheduler.java | 9 + .../src/main/java/org/apache/hadoop/log/Log4Json.java | 5 +++-- .../apache/hadoop/metrics2/MetricsJsonBuilder.java| 14 +- .../delegation/web/DelegationTokenAuthenticator.java | 7 +-- .../org/apache/hadoop/util/HttpExceptionUtils.java| 13 + 6 files changed, 36 insertions(+), 19 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/17b17aed/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index 026cbbf..f57472d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -43,6 +43,7 @@ import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.KMSUtil; import org.apache.http.client.utils.URIBuilder; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -127,6 +128,9 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, private final ValueQueue encKeyVersionQueue; + private static final ObjectWriter WRITER = + new ObjectMapper().writerWithDefaultPrettyPrinter(); + private class EncryptedQueueRefiller implements ValueQueue.QueueRefiller { @@ -282,8 +286,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, private static void writeJson(Map map, OutputStream os) throws IOException { Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8); -ObjectMapper jsonMapper = new ObjectMapper(); -jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map); +WRITER.writeValue(writer, map); } /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/17b17aed/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java index fe9298f..79562ae 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java @@ -50,6 +50,7 @@ import org.apache.hadoop.metrics2.util.Metrics2Util.NameValuePair; import org.apache.hadoop.metrics2.util.Metrics2Util.TopN; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectWriter; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -128,6 +129,8 @@ public class DecayRpcScheduler implements RpcScheduler, public static final Logger LOG = LoggerFactory.getLogger(DecayRpcScheduler.class); + private static final ObjectWriter WRITER = new ObjectMapper().writer(); + // Track the decayed and raw (no decay) number of calls for each schedulable // identity from all previous decay windows: idx 0 for decayed call count and // idx 1 for the raw call count @@ -909,8 +912,7 @@ public class DecayRpcScheduler implements RpcScheduler, return "{}"; } else { try { -ObjectMapper om = new ObjectMapper(); -return om.writeValueAsString(decisions); +return WRITER.writeValueAsString(decisions); } catch (Exception e) { return "Error: " + e.getMessage(); } @@ -919,8 +921,7 @@ public class
hadoop git commit: HADOOP-14881. LoadGenerator should use Time.monotonicNow() to measure durations. Contributed by Bharat Viswanadham
Repository: hadoop Updated Branches: refs/heads/branch-2 c0dacda54 -> 09be0acae HADOOP-14881. LoadGenerator should use Time.monotonicNow() to measure durations. Contributed by Bharat Viswanadham (cherry picked from commit ac05a51bbb2a3fad4e85f9334a3408571967900a) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/09be0aca Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/09be0aca Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/09be0aca Branch: refs/heads/branch-2 Commit: 09be0acae0d6df3e101a8832dce58fc36407cf50 Parents: c0dacda Author: Jason LoweAuthored: Mon Sep 25 15:35:44 2017 -0500 Committer: Jason Lowe Committed: Mon Sep 25 15:38:52 2017 -0500 -- .../hadoop/fs/loadGenerator/LoadGenerator.java | 20 ++-- 1 file changed, 10 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/09be0aca/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java -- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index 6da5182..0bb1b46 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -277,9 +277,9 @@ public class LoadGenerator extends Configured implements Tool { * the entire file */ private void read() throws IOException { String fileName = files.get(r.nextInt(files.size())); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); InputStream in = fc.open(new Path(fileName)); - executionTime[OPEN] += (Time.now()-startTime); + executionTime[OPEN] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[OPEN]++; while (in.read(buffer) != -1) {} in.close(); @@ -299,9 +299,9 @@ public class LoadGenerator extends Configured implements Tool { double fileSize = 0; while ((fileSize = r.nextGaussian()+2)<=0) {} genFile(file, (long)(fileSize*BLOCK_SIZE)); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); fc.delete(file, true); - executionTime[DELETE] += (Time.now()-startTime); + executionTime[DELETE] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[DELETE]++; } @@ -310,9 +310,9 @@ public class LoadGenerator extends Configured implements Tool { */ private void list() throws IOException { String dirName = dirs.get(r.nextInt(dirs.size())); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); fc.listStatus(new Path(dirName)); - executionTime[LIST] += (Time.now()-startTime); + executionTime[LIST] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[LIST]++; } @@ -320,14 +320,14 @@ public class LoadGenerator extends Configured implements Tool { * The file is filled with 'a'. */ private void genFile(Path file, long fileSize) throws IOException { - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); FSDataOutputStream out = null; try { out = fc.create(file, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), CreateOpts.createParent(), CreateOpts.bufferSize(4096), CreateOpts.repFac((short) 3)); -executionTime[CREATE] += (Time.now() - startTime); +executionTime[CREATE] += (Time.monotonicNow() - startTimestamp); numOfOps[CREATE]++; long i = fileSize; @@ -337,8 +337,8 @@ public class LoadGenerator extends Configured implements Tool { i -= s; } -startTime = Time.now(); -executionTime[WRITE_CLOSE] += (Time.now() - startTime); +startTimestamp = Time.monotonicNow(); +executionTime[WRITE_CLOSE] += (Time.monotonicNow() - startTimestamp); numOfOps[WRITE_CLOSE]++; } finally { IOUtils.cleanupWithLogger(LOG, out); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HADOOP-14881. LoadGenerator should use Time.monotonicNow() to measure durations. Contributed by Bharat Viswanadham
Repository: hadoop Updated Branches: refs/heads/branch-3.0 149d3ad6b -> bfad0ca3b HADOOP-14881. LoadGenerator should use Time.monotonicNow() to measure durations. Contributed by Bharat Viswanadham (cherry picked from commit ac05a51bbb2a3fad4e85f9334a3408571967900a) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bfad0ca3 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bfad0ca3 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bfad0ca3 Branch: refs/heads/branch-3.0 Commit: bfad0ca3b0d2b9e70b505c21ad1bec93bf8b7ed2 Parents: 149d3ad Author: Jason LoweAuthored: Mon Sep 25 15:35:44 2017 -0500 Committer: Jason Lowe Committed: Mon Sep 25 15:38:24 2017 -0500 -- .../hadoop/fs/loadGenerator/LoadGenerator.java | 20 ++-- 1 file changed, 10 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfad0ca3/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java -- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index 6da5182..0bb1b46 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -277,9 +277,9 @@ public class LoadGenerator extends Configured implements Tool { * the entire file */ private void read() throws IOException { String fileName = files.get(r.nextInt(files.size())); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); InputStream in = fc.open(new Path(fileName)); - executionTime[OPEN] += (Time.now()-startTime); + executionTime[OPEN] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[OPEN]++; while (in.read(buffer) != -1) {} in.close(); @@ -299,9 +299,9 @@ public class LoadGenerator extends Configured implements Tool { double fileSize = 0; while ((fileSize = r.nextGaussian()+2)<=0) {} genFile(file, (long)(fileSize*BLOCK_SIZE)); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); fc.delete(file, true); - executionTime[DELETE] += (Time.now()-startTime); + executionTime[DELETE] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[DELETE]++; } @@ -310,9 +310,9 @@ public class LoadGenerator extends Configured implements Tool { */ private void list() throws IOException { String dirName = dirs.get(r.nextInt(dirs.size())); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); fc.listStatus(new Path(dirName)); - executionTime[LIST] += (Time.now()-startTime); + executionTime[LIST] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[LIST]++; } @@ -320,14 +320,14 @@ public class LoadGenerator extends Configured implements Tool { * The file is filled with 'a'. */ private void genFile(Path file, long fileSize) throws IOException { - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); FSDataOutputStream out = null; try { out = fc.create(file, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), CreateOpts.createParent(), CreateOpts.bufferSize(4096), CreateOpts.repFac((short) 3)); -executionTime[CREATE] += (Time.now() - startTime); +executionTime[CREATE] += (Time.monotonicNow() - startTimestamp); numOfOps[CREATE]++; long i = fileSize; @@ -337,8 +337,8 @@ public class LoadGenerator extends Configured implements Tool { i -= s; } -startTime = Time.now(); -executionTime[WRITE_CLOSE] += (Time.now() - startTime); +startTimestamp = Time.monotonicNow(); +executionTime[WRITE_CLOSE] += (Time.monotonicNow() - startTimestamp); numOfOps[WRITE_CLOSE]++; } finally { IOUtils.cleanupWithLogger(LOG, out); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HADOOP-14881. LoadGenerator should use Time.monotonicNow() to measure durations. Contributed by Bharat Viswanadham
Repository: hadoop Updated Branches: refs/heads/trunk e928ee583 -> ac05a51bb HADOOP-14881. LoadGenerator should use Time.monotonicNow() to measure durations. Contributed by Bharat Viswanadham Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ac05a51b Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ac05a51b Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ac05a51b Branch: refs/heads/trunk Commit: ac05a51bbb2a3fad4e85f9334a3408571967900a Parents: e928ee5 Author: Jason LoweAuthored: Mon Sep 25 15:35:44 2017 -0500 Committer: Jason Lowe Committed: Mon Sep 25 15:35:44 2017 -0500 -- .../hadoop/fs/loadGenerator/LoadGenerator.java | 20 ++-- 1 file changed, 10 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac05a51b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java -- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index 6da5182..0bb1b46 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -277,9 +277,9 @@ public class LoadGenerator extends Configured implements Tool { * the entire file */ private void read() throws IOException { String fileName = files.get(r.nextInt(files.size())); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); InputStream in = fc.open(new Path(fileName)); - executionTime[OPEN] += (Time.now()-startTime); + executionTime[OPEN] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[OPEN]++; while (in.read(buffer) != -1) {} in.close(); @@ -299,9 +299,9 @@ public class LoadGenerator extends Configured implements Tool { double fileSize = 0; while ((fileSize = r.nextGaussian()+2)<=0) {} genFile(file, (long)(fileSize*BLOCK_SIZE)); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); fc.delete(file, true); - executionTime[DELETE] += (Time.now()-startTime); + executionTime[DELETE] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[DELETE]++; } @@ -310,9 +310,9 @@ public class LoadGenerator extends Configured implements Tool { */ private void list() throws IOException { String dirName = dirs.get(r.nextInt(dirs.size())); - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); fc.listStatus(new Path(dirName)); - executionTime[LIST] += (Time.now()-startTime); + executionTime[LIST] += (Time.monotonicNow() - startTimestamp); totalNumOfOps[LIST]++; } @@ -320,14 +320,14 @@ public class LoadGenerator extends Configured implements Tool { * The file is filled with 'a'. */ private void genFile(Path file, long fileSize) throws IOException { - long startTime = Time.now(); + long startTimestamp = Time.monotonicNow(); FSDataOutputStream out = null; try { out = fc.create(file, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), CreateOpts.createParent(), CreateOpts.bufferSize(4096), CreateOpts.repFac((short) 3)); -executionTime[CREATE] += (Time.now() - startTime); +executionTime[CREATE] += (Time.monotonicNow() - startTimestamp); numOfOps[CREATE]++; long i = fileSize; @@ -337,8 +337,8 @@ public class LoadGenerator extends Configured implements Tool { i -= s; } -startTime = Time.now(); -executionTime[WRITE_CLOSE] += (Time.now() - startTime); +startTimestamp = Time.monotonicNow(); +executionTime[WRITE_CLOSE] += (Time.monotonicNow() - startTimestamp); numOfOps[WRITE_CLOSE]++; } finally { IOUtils.cleanupWithLogger(LOG, out); - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: YARN-7153. Remove duplicated code in AMRMClientAsyncImpl.java. Contributed by Sen Zhao.
Repository: hadoop Updated Branches: refs/heads/branch-2 6004fb467 -> c0dacda54 YARN-7153. Remove duplicated code in AMRMClientAsyncImpl.java. Contributed by Sen Zhao. (cherry picked from commit e928ee583c5a1367e24eab34057f8d8496891452) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c0dacda5 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c0dacda5 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c0dacda5 Branch: refs/heads/branch-2 Commit: c0dacda54652e32ddd0ac1bc8c1d1af404cc22bd Parents: 6004fb4 Author: Akira AjisakaAuthored: Tue Sep 26 02:03:30 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 02:05:35 2017 +0900 -- .../hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java | 6 +- 1 file changed, 1 insertion(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/c0dacda5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java index c075be4..089884a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java @@ -141,11 +141,7 @@ extends AMRMClientAsync { handlerThread.interrupt(); super.serviceStop(); } - - public void setHeartbeatInterval(int interval) { -heartbeatIntervalMs.set(interval); - } - + public List> getMatchingRequests( Priority priority, String resourceName, - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: YARN-7153. Remove duplicated code in AMRMClientAsyncImpl.java. Contributed by Sen Zhao.
Repository: hadoop Updated Branches: refs/heads/branch-3.0 366ce11ac -> 149d3ad6b YARN-7153. Remove duplicated code in AMRMClientAsyncImpl.java. Contributed by Sen Zhao. (cherry picked from commit e928ee583c5a1367e24eab34057f8d8496891452) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/149d3ad6 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/149d3ad6 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/149d3ad6 Branch: refs/heads/branch-3.0 Commit: 149d3ad6bad1980702dcd307d5de9ff60dc050b4 Parents: 366ce11 Author: Akira AjisakaAuthored: Tue Sep 26 02:03:30 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 02:04:23 2017 +0900 -- .../hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java | 6 +- 1 file changed, 1 insertion(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/149d3ad6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java index d12b108..e44f3cd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java @@ -142,11 +142,7 @@ extends AMRMClientAsync { handlerThread.interrupt(); super.serviceStop(); } - - public void setHeartbeatInterval(int interval) { -heartbeatIntervalMs.set(interval); - } - + public List> getMatchingRequests( Priority priority, String resourceName, - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: YARN-7153. Remove duplicated code in AMRMClientAsyncImpl.java. Contributed by Sen Zhao.
Repository: hadoop Updated Branches: refs/heads/trunk 3a10367a1 -> e928ee583 YARN-7153. Remove duplicated code in AMRMClientAsyncImpl.java. Contributed by Sen Zhao. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e928ee58 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e928ee58 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e928ee58 Branch: refs/heads/trunk Commit: e928ee583c5a1367e24eab34057f8d8496891452 Parents: 3a10367 Author: Akira AjisakaAuthored: Tue Sep 26 02:03:30 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 02:03:30 2017 +0900 -- .../hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java | 6 +- 1 file changed, 1 insertion(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/e928ee58/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java index d12b108..e44f3cd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/AMRMClientAsyncImpl.java @@ -142,11 +142,7 @@ extends AMRMClientAsync { handlerThread.interrupt(); super.serviceStop(); } - - public void setHeartbeatInterval(int interval) { -heartbeatIntervalMs.set(interval); - } - + public List> getMatchingRequests( Priority priority, String resourceName, - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari.
Repository: hadoop Updated Branches: refs/heads/branch-2.8 44b51b004 -> ea7e65589 HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari. (cherry picked from commit 3a10367a17b99e2c3035ec54048f94f334d93227) (cherry picked from commit 6004fb46761810ef87367c5207fafcdbb6507c39) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ea7e6558 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ea7e6558 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ea7e6558 Branch: refs/heads/branch-2.8 Commit: ea7e655897e1084063b2a570dc59c0fc8fcc7ad4 Parents: 44b51b0 Author: Akira AjisakaAuthored: Tue Sep 26 01:47:02 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 01:48:54 2017 +0900 -- .../org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java | 8 1 file changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea7e6558/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index e97e948..15d27aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -415,10 +415,10 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { private void snapshotMetrics(MetricsSourceAdapter sa, MetricsBufferBuilder bufferBuilder) { -long startTime = Time.now(); +long startTime = Time.monotonicNow(); bufferBuilder.add(sa.name(), sa.getMetrics(collector, true)); collector.clear(); -snapshotStat.add(Time.now() - startTime); +snapshotStat.add(Time.monotonicNow() - startTime); LOG.debug("Snapshotted source "+ sa.name()); } @@ -431,7 +431,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { synchronized void publishMetrics(MetricsBuffer buffer, boolean immediate) { int dropped = 0; for (MetricsSinkAdapter sa : sinks.values()) { - long startTime = Time.now(); + long startTime = Time.monotonicNow(); boolean result; if (immediate) { result = sa.putMetricsImmediate(buffer); @@ -439,7 +439,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { result = sa.putMetrics(buffer, logicalTime); } dropped += result ? 0 : 1; - publishStat.add(Time.now() - startTime); + publishStat.add(Time.monotonicNow() - startTime); } droppedPubAll.incr(dropped); } - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari.
Repository: hadoop Updated Branches: refs/heads/branch-2 de5744c90 -> 6004fb467 HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari. (cherry picked from commit 3a10367a17b99e2c3035ec54048f94f334d93227) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6004fb46 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6004fb46 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6004fb46 Branch: refs/heads/branch-2 Commit: 6004fb46761810ef87367c5207fafcdbb6507c39 Parents: de5744c Author: Akira AjisakaAuthored: Tue Sep 26 01:47:02 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 01:48:24 2017 +0900 -- .../org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java | 8 1 file changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/6004fb46/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index 1166ff9..91102c8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -415,10 +415,10 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { private void snapshotMetrics(MetricsSourceAdapter sa, MetricsBufferBuilder bufferBuilder) { -long startTime = Time.now(); +long startTime = Time.monotonicNow(); bufferBuilder.add(sa.name(), sa.getMetrics(collector, true)); collector.clear(); -snapshotStat.add(Time.now() - startTime); +snapshotStat.add(Time.monotonicNow() - startTime); LOG.debug("Snapshotted source "+ sa.name()); } @@ -431,7 +431,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { synchronized void publishMetrics(MetricsBuffer buffer, boolean immediate) { int dropped = 0; for (MetricsSinkAdapter sa : sinks.values()) { - long startTime = Time.now(); + long startTime = Time.monotonicNow(); boolean result; if (immediate) { result = sa.putMetricsImmediate(buffer); @@ -439,7 +439,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { result = sa.putMetrics(buffer, logicalTime); } dropped += result ? 0 : 1; - publishStat.add(Time.now() - startTime); + publishStat.add(Time.monotonicNow() - startTime); } droppedPubAll.incr(dropped); } - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari.
Repository: hadoop Updated Branches: refs/heads/branch-3.0 51b3a3806 -> 366ce11ac HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari. (cherry picked from commit 3a10367a17b99e2c3035ec54048f94f334d93227) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/366ce11a Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/366ce11a Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/366ce11a Branch: refs/heads/branch-3.0 Commit: 366ce11ac0e96ea553d257dab06ac90886d108b5 Parents: 51b3a38 Author: Akira AjisakaAuthored: Tue Sep 26 01:47:02 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 01:47:59 2017 +0900 -- .../org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java | 8 1 file changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/366ce11a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index 2248122..ee1672e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -415,10 +415,10 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { private void snapshotMetrics(MetricsSourceAdapter sa, MetricsBufferBuilder bufferBuilder) { -long startTime = Time.now(); +long startTime = Time.monotonicNow(); bufferBuilder.add(sa.name(), sa.getMetrics(collector, true)); collector.clear(); -snapshotStat.add(Time.now() - startTime); +snapshotStat.add(Time.monotonicNow() - startTime); LOG.debug("Snapshotted source "+ sa.name()); } @@ -431,7 +431,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { synchronized void publishMetrics(MetricsBuffer buffer, boolean immediate) { int dropped = 0; for (MetricsSinkAdapter sa : sinks.values()) { - long startTime = Time.now(); + long startTime = Time.monotonicNow(); boolean result; if (immediate) { result = sa.putMetricsImmediate(buffer); @@ -439,7 +439,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { result = sa.putMetrics(buffer, logicalTime); } dropped += result ? 0 : 1; - publishStat.add(Time.now() - startTime); + publishStat.add(Time.monotonicNow() - startTime); } droppedPubAll.incr(dropped); } - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari.
Repository: hadoop Updated Branches: refs/heads/trunk 02e2a9b11 -> 3a10367a1 HADOOP-14892. MetricsSystemImpl should use Time.monotonicNow for measuring durations. Contributed by Chetna Chaudhari. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3a10367a Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3a10367a Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3a10367a Branch: refs/heads/trunk Commit: 3a10367a17b99e2c3035ec54048f94f334d93227 Parents: 02e2a9b Author: Akira AjisakaAuthored: Tue Sep 26 01:47:02 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 01:47:02 2017 +0900 -- .../org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java | 8 1 file changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/3a10367a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index 2248122..ee1672e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -415,10 +415,10 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { private void snapshotMetrics(MetricsSourceAdapter sa, MetricsBufferBuilder bufferBuilder) { -long startTime = Time.now(); +long startTime = Time.monotonicNow(); bufferBuilder.add(sa.name(), sa.getMetrics(collector, true)); collector.clear(); -snapshotStat.add(Time.now() - startTime); +snapshotStat.add(Time.monotonicNow() - startTime); LOG.debug("Snapshotted source "+ sa.name()); } @@ -431,7 +431,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { synchronized void publishMetrics(MetricsBuffer buffer, boolean immediate) { int dropped = 0; for (MetricsSinkAdapter sa : sinks.values()) { - long startTime = Time.now(); + long startTime = Time.monotonicNow(); boolean result; if (immediate) { result = sa.putMetricsImmediate(buffer); @@ -439,7 +439,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { result = sa.putMetrics(buffer, logicalTime); } dropped += result ? 0 : 1; - publishStat.add(Time.now() - startTime); + publishStat.add(Time.monotonicNow() - startTime); } droppedPubAll.incr(dropped); } - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HDFS-12304. Remove unused parameter from FsDatasetImpl#addVolume. Contributed by Chen Liang.
Repository: hadoop Updated Branches: refs/heads/branch-3.0 dca2fb5ac -> 51b3a3806 HDFS-12304. Remove unused parameter from FsDatasetImpl#addVolume. Contributed by Chen Liang. (cherry picked from commit 02e2a9b1152b0e144fcf43bec2fce26d8a6c6dbc) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/51b3a380 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/51b3a380 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/51b3a380 Branch: refs/heads/branch-3.0 Commit: 51b3a3806495ff5acdfca91b5be4ad6541e68836 Parents: dca2fb5 Author: Akira AjisakaAuthored: Tue Sep 26 01:25:07 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 01:26:13 2017 +0900 -- .../hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/51b3a380/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java index 16df709..41c41e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java @@ -315,7 +315,7 @@ class FsDatasetImpl implements FsDatasetSpi { deletingBlock = new HashMap (); for (int idx = 0; idx < storage.getNumStorageDirs(); idx++) { - addVolume(dataLocations, storage.getStorageDir(idx)); + addVolume(storage.getStorageDir(idx)); } setupAsyncLazyPersistThreads(); @@ -413,8 +413,7 @@ class FsDatasetImpl implements FsDatasetSpi { } } - private void addVolume(Collection dataLocations, - Storage.StorageDirectory sd) throws IOException { + private void addVolume(Storage.StorageDirectory sd) throws IOException { final StorageLocation storageLocation = sd.getStorageLocation(); // If IOException raises from FsVolumeImpl() or getVolumeMap(), there is - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HDFS-12304. Remove unused parameter from FsDatasetImpl#addVolume. Contributed by Chen Liang.
Repository: hadoop Updated Branches: refs/heads/trunk 080747080 -> 02e2a9b11 HDFS-12304. Remove unused parameter from FsDatasetImpl#addVolume. Contributed by Chen Liang. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/02e2a9b1 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/02e2a9b1 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/02e2a9b1 Branch: refs/heads/trunk Commit: 02e2a9b1152b0e144fcf43bec2fce26d8a6c6dbc Parents: 0807470 Author: Akira AjisakaAuthored: Tue Sep 26 01:25:07 2017 +0900 Committer: Akira Ajisaka Committed: Tue Sep 26 01:25:07 2017 +0900 -- .../hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/02e2a9b1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java index 16df709..41c41e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java @@ -315,7 +315,7 @@ class FsDatasetImpl implements FsDatasetSpi { deletingBlock = new HashMap (); for (int idx = 0; idx < storage.getNumStorageDirs(); idx++) { - addVolume(dataLocations, storage.getStorageDir(idx)); + addVolume(storage.getStorageDir(idx)); } setupAsyncLazyPersistThreads(); @@ -413,8 +413,7 @@ class FsDatasetImpl implements FsDatasetSpi { } } - private void addVolume(Collection dataLocations, - Storage.StorageDirectory sd) throws IOException { + private void addVolume(Storage.StorageDirectory sd) throws IOException { final StorageLocation storageLocation = sd.getStorageLocation(); // If IOException raises from FsVolumeImpl() or getVolumeMap(), there is - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: Revert "YARN-6570. No logs were found for running application, running"
Repository: hadoop Updated Branches: refs/heads/branch-2.8 3ecc1431f -> 44b51b004 Revert "YARN-6570. No logs were found for running application, running" This reverts commit e103acc8f495135e52ec1e700dc33747e735967e. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/44b51b00 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/44b51b00 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/44b51b00 Branch: refs/heads/branch-2.8 Commit: 44b51b004796a28d7bc4bfdcf0a36cfd0e2fab81 Parents: 3ecc143 Author: Jason LoweAuthored: Mon Sep 25 11:13:50 2017 -0500 Committer: Jason Lowe Committed: Mon Sep 25 11:13:50 2017 -0500 -- .../hadoop/yarn/api/records/ContainerState.java| 17 ++--- .../src/main/proto/yarn_protos.proto | 1 - .../containermanager/container/ContainerImpl.java | 2 -- .../yarn/server/nodemanager/TestEventFlow.java | 3 +-- .../server/nodemanager/TestNodeManagerResync.java | 8 .../nodemanager/TestNodeManagerShutdown.java | 2 +- .../containermanager/container/TestContainer.java | 3 --- 7 files changed, 8 insertions(+), 28 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/44b51b00/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerState.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerState.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerState.java index 3ced112..323d31d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerState.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerState.java @@ -20,7 +20,6 @@ package org.apache.hadoop.yarn.api.records; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; -import org.apache.hadoop.classification.InterfaceStability.Unstable; /** * State of a Container. @@ -29,15 +28,11 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; @Stable public enum ContainerState { /** New container */ - NEW, - + NEW, + /** Running container */ - RUNNING, - + RUNNING, + /** Completed container */ - COMPLETE, - - /** Scheduled (awaiting resources) at the NM. */ - @Unstable - SCHEDULED -} + COMPLETE +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/44b51b00/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto index 8727505..8c847b2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto @@ -82,7 +82,6 @@ enum ContainerStateProto { C_NEW = 1; C_RUNNING = 2; C_COMPLETE = 3; - C_SCHEDULED = 4; } message ContainerProto { http://git-wip-us.apache.org/repos/asf/hadoop/blob/44b51b00/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java index 9e72b13..7dc8ab6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java @@ -363,11 +363,9 @@ public class ContainerImpl implements Container { public org.apache.hadoop.yarn.api.records.ContainerState getCurrentState() { switch (stateMachine.getCurrentState()) { case NEW: - return
hadoop git commit: HADOOP-14897. Loosen compatibility guidelines for native dependencies
Repository: hadoop Updated Branches: refs/heads/branch-3.0 972250923 -> dca2fb5ac HADOOP-14897. Loosen compatibility guidelines for native dependencies (cherry picked from commit 0807470808156af13e0992285cac070052d86c26) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dca2fb5a Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dca2fb5a Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dca2fb5a Branch: refs/heads/branch-3.0 Commit: dca2fb5ac53572d6573cf3b56507ca4d696d9aa0 Parents: 9722509 Author: Daniel TempletonAuthored: Mon Sep 25 14:12:25 2017 +0200 Committer: Daniel Templeton Committed: Mon Sep 25 16:15:56 2017 +0200 -- .../hadoop-common/src/site/markdown/Compatibility.md | 10 +++--- 1 file changed, 7 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/dca2fb5a/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md -- diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md b/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md index 4fa8c02..47fa09a 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md @@ -191,9 +191,13 @@ dependencies is part of the Hadoop ABI. The minimum required versions of the native components on which Hadoop depends at compile time and/or runtime SHALL be considered -[Stable](./InterfaceClassification.html#Stable). Changes to the minimum -required versions MUST NOT increase between minor releases within a major -version. +[Evolving](./InterfaceClassification.html#Evolving). Changes to the minimum +required versions SHOULD NOT increase between minor releases within a major +version, though updates because of security issues, license issues, or other +reasons may occur. When the native components on which Hadoop depends must +be updated between minor releases within a major release, where possible the +changes SHOULD only change the minor versions of the components without +changing the major versions. ### Wire Protocols - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HADOOP-14897. Loosen compatibility guidelines for native dependencies
Repository: hadoop Updated Branches: refs/heads/trunk d0b2c5850 -> 080747080 HADOOP-14897. Loosen compatibility guidelines for native dependencies Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/08074708 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/08074708 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/08074708 Branch: refs/heads/trunk Commit: 0807470808156af13e0992285cac070052d86c26 Parents: d0b2c58 Author: Daniel TempletonAuthored: Mon Sep 25 14:12:25 2017 +0200 Committer: Daniel Templeton Committed: Mon Sep 25 14:12:25 2017 +0200 -- .../hadoop-common/src/site/markdown/Compatibility.md | 10 +++--- 1 file changed, 7 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/08074708/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md -- diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md b/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md index 4fa8c02..47fa09a 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/Compatibility.md @@ -191,9 +191,13 @@ dependencies is part of the Hadoop ABI. The minimum required versions of the native components on which Hadoop depends at compile time and/or runtime SHALL be considered -[Stable](./InterfaceClassification.html#Stable). Changes to the minimum -required versions MUST NOT increase between minor releases within a major -version. +[Evolving](./InterfaceClassification.html#Evolving). Changes to the minimum +required versions SHOULD NOT increase between minor releases within a major +version, though updates because of security issues, license issues, or other +reasons may occur. When the native components on which Hadoop depends must +be updated between minor releases within a major release, where possible the +changes SHOULD only change the minor versions of the components without +changing the major versions. ### Wire Protocols - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org
hadoop git commit: HDFS-12506. Ozone: ListBucket is too slow. Contributed by Weiwei Yang.
Repository: hadoop Updated Branches: refs/heads/HDFS-7240 cf1001e45 -> e01245495 HDFS-12506. Ozone: ListBucket is too slow. Contributed by Weiwei Yang. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e0124549 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e0124549 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e0124549 Branch: refs/heads/HDFS-7240 Commit: e01245495f71a20a5478c29c32d849d4b2720c57 Parents: cf1001e Author: Weiwei YangAuthored: Mon Sep 25 16:50:58 2017 +0800 Committer: Weiwei Yang Committed: Mon Sep 25 16:50:58 2017 +0800 -- .../org/apache/hadoop/ozone/OzoneConsts.java| 25 +- .../ozone/ksm/KSMMetadataManagerImpl.java | 49 ++-- .../org/apache/hadoop/ozone/scm/cli/SQLCLI.java | 20 ++--- .../org/apache/hadoop/utils/LevelDBStore.java | 40 -- .../org/apache/hadoop/utils/MetadataStore.java | 21 + .../org/apache/hadoop/utils/RocksDBStore.java | 34 +++- .../apache/hadoop/ozone/TestMetadataStore.java | 20 + .../hadoop/ozone/ksm/TestBucketManagerImpl.java | 4 +- .../hadoop/ozone/web/client/TestBuckets.java| 13 .../hadoop/ozone/web/client/TestVolume.java | 81 10 files changed, 219 insertions(+), 88 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/e0124549/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java index de8061a..044fc07 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java @@ -98,10 +98,29 @@ public final class OzoneConsts { /** * KSM LevelDB prefixes. + * + * KSM DB stores metadata as KV pairs with certain prefixes, + * prefix is used to improve the performance to get related + * metadata. + * + * KSM DB Schema: + * -- + * | KEY | VALUE | + * -- + * | $userName| VolumeList | + * -- + * | /#volumeName | VolumeInfo | + * -- + * | /#volumeName/#bucketName | BucketInfo | + * -- + * | /volumeName/bucketName/keyName | KeyInfo| + * -- + * | #deleting#/volumeName/bucketName/keyName | KeyInfo| + * -- */ - public static final String KSM_VOLUME_PREFIX = "/"; - public static final String KSM_BUCKET_PREFIX = KSM_VOLUME_PREFIX; - public static final String KSM_KEY_PREFIX = KSM_VOLUME_PREFIX; + public static final String KSM_VOLUME_PREFIX = "/#"; + public static final String KSM_BUCKET_PREFIX = "/#"; + public static final String KSM_KEY_PREFIX = "/"; public static final String KSM_USER_PREFIX = "$"; /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/e0124549/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java index 9413e1d..b8eaeba 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/ksm/KSMMetadataManagerImpl.java @@ -21,7 +21,10 @@ import com.google.common.base.Strings; import com.google.common.collect.Lists; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.hdfs.DFSUtil; -import org.apache.hadoop.ozone.ksm.helpers.*; +import org.apache.hadoop.ozone.ksm.helpers.KsmKeyInfo; +import org.apache.hadoop.ozone.ksm.helpers.KsmBucketInfo; +import org.apache.hadoop.ozone.ksm.helpers.KsmVolumeArgs; +import org.apache.hadoop.ozone.ksm.helpers.KsmKeyLocationInfo; import org.apache.hadoop.ozone.common.BlockGroup; import org.apache.hadoop.ozone.OzoneConfiguration;
hadoop git commit: HDFS-11563. Ozone: enforce DependencyConvergence uniqueVersions.
Repository: hadoop Updated Branches: refs/heads/HDFS-7240 97ff55eab -> cf1001e45 HDFS-11563. Ozone: enforce DependencyConvergence uniqueVersions. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cf1001e4 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cf1001e4 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cf1001e4 Branch: refs/heads/HDFS-7240 Commit: cf1001e450feb318ea2b439cdfb022f6d54ab15e Parents: 97ff55e Author: Tsz-Wo Nicholas SzeAuthored: Mon Sep 25 14:12:55 2017 +0800 Committer: Tsz-Wo Nicholas Sze Committed: Mon Sep 25 14:12:55 2017 +0800 -- hadoop-hdfs-project/hadoop-hdfs-client/pom.xml | 14 +- hadoop-project/pom.xml | 4 +--- 2 files changed, 2 insertions(+), 16 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf1001e4/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml -- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml index 18fe57c..1944b0e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml @@ -114,11 +114,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;> - org.apache.ratis - ratis-proto-shaded - - - ratis-common + ratis-server org.apache.ratis @@ -128,14 +124,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;> - ratis-client - org.apache.ratis - - - ratis-server - org.apache.ratis - - ratis-netty org.apache.ratis http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf1001e4/hadoop-project/pom.xml -- diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 9c5ee56..5cc51f1 100755 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -1615,9 +1615,7 @@ - + true - To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org