This is an automated email from the ASF dual-hosted git repository.
adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 0bac7ef8e4 HDDS-10405. ozone admin has hard-coded info loglevel (#6254)
0bac7ef8e4 is described below
commit 0bac7ef8e47cd4c685e38ca8d2a618baf87f8cf4
Author: Doroszlai, Attila <[email protected]>
AuthorDate: Fri Feb 23 22:00:53 2024 +0100
HDDS-10405. ozone admin has hard-coded info loglevel (#6254)
---
.../org/apache/hadoop/hdds/cli/OzoneAdmin.java | 12 ---
.../scm/cli/ReplicationManagerStartSubcommand.java | 7 +-
.../cli/ReplicationManagerStatusSubcommand.java | 9 +--
.../scm/cli/ReplicationManagerStopSubcommand.java | 9 +--
.../hdds/scm/cli/SafeModeCheckSubcommand.java | 11 +--
.../hdds/scm/cli/SafeModeExitSubcommand.java | 7 +-
.../hdds/scm/cli/SafeModeWaitSubcommand.java | 19 ++---
.../scm/cli/cert/CleanExpiredCertsSubcommand.java | 9 +--
.../hadoop/hdds/scm/cli/cert/InfoSubcommand.java | 16 +---
.../hadoop/hdds/scm/cli/cert/ListSubcommand.java | 11 +--
.../hdds/scm/cli/cert/ScmCertSubcommand.java | 21 +++---
.../hdds/scm/cli/container/CreateSubcommand.java | 7 +-
.../hdds/scm/cli/container/InfoSubcommand.java | 35 ++++-----
.../hdds/scm/cli/container/ListSubcommand.java | 7 +-
.../hdds/scm/cli/container/TestInfoSubCommand.java | 85 ++++++----------------
15 files changed, 73 insertions(+), 192 deletions(-)
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java
index 093dd93430..cc496a28e7 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java
@@ -22,13 +22,7 @@ import java.io.IOException;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.tracing.TracingUtil;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
import picocli.CommandLine;
/**
@@ -75,12 +69,6 @@ public class OzoneAdmin extends GenericCli {
* @param argv - System Args Strings[]
*/
public static void main(String[] argv) {
- LogManager.resetConfiguration();
- Logger.getRootLogger().setLevel(Level.INFO);
- Logger.getRootLogger()
- .addAppender(new ConsoleAppender(new PatternLayout("%m%n")));
- Logger.getLogger(NativeCodeLoader.class).setLevel(Level.ERROR);
-
new OzoneAdmin().run(argv);
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java
index ff82b82ec8..29f2f3d457 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hdds.scm.cli;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import java.io.IOException;
@@ -35,12 +33,9 @@ import java.io.IOException;
versionProvider = HddsVersionProvider.class)
public class ReplicationManagerStartSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(ReplicationManagerStartSubcommand.class);
-
@Override
public void execute(ScmClient scmClient) throws IOException {
scmClient.startReplicationManager();
- LOG.info("Starting ReplicationManager...");
+ System.out.println("Starting ReplicationManager...");
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java
index 9bc3649dd9..b2e308e142 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hdds.scm.cli;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import java.io.IOException;
@@ -35,18 +33,15 @@ import java.io.IOException;
versionProvider = HddsVersionProvider.class)
public class ReplicationManagerStatusSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(ReplicationManagerStatusSubcommand.class);
-
@Override
public void execute(ScmClient scmClient) throws IOException {
boolean execReturn = scmClient.getReplicationManagerStatus();
// Output data list
if (execReturn) {
- LOG.info("ReplicationManager is Running.");
+ System.out.println("ReplicationManager is Running.");
} else {
- LOG.info("ReplicationManager is Not Running.");
+ System.out.println("ReplicationManager is Not Running.");
}
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java
index 7d3063a763..12de13c07d 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hdds.scm.cli;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import java.io.IOException;
@@ -35,14 +33,11 @@ import java.io.IOException;
versionProvider = HddsVersionProvider.class)
public class ReplicationManagerStopSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(ReplicationManagerStopSubcommand.class);
-
@Override
public void execute(ScmClient scmClient) throws IOException {
scmClient.stopReplicationManager();
- LOG.info("Stopping ReplicationManager...");
- LOG.info("Requested SCM to stop ReplicationManager, " +
+ System.out.println("Stopping ReplicationManager...");
+ System.out.println("Requested SCM to stop ReplicationManager, " +
"it might take sometime for the ReplicationManager to stop.");
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java
index db2f02c5e1..747215dcac 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java
@@ -24,8 +24,6 @@ import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.Command;
@@ -39,9 +37,6 @@ import picocli.CommandLine.Command;
versionProvider = HddsVersionProvider.class)
public class SafeModeCheckSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(SafeModeCheckSubcommand.class);
-
@CommandLine.Option(names = {"--verbose"},
description = "Show detailed status of rules.")
private boolean verbose;
@@ -52,17 +47,17 @@ public class SafeModeCheckSubcommand extends ScmSubcommand {
// Output data list
if (execReturn) {
- LOG.info("SCM is in safe mode.");
+ System.out.println("SCM is in safe mode.");
if (verbose) {
for (Map.Entry<String, Pair<Boolean, String>> entry :
scmClient.getSafeModeRuleStatuses().entrySet()) {
Pair<Boolean, String> value = entry.getValue();
- LOG.info("validated:{}, {}, {}",
+ System.out.printf("validated:%s, %s, %s%n",
value.getLeft(), entry.getKey(), value.getRight());
}
}
} else {
- LOG.info("SCM is out of safe mode.");
+ System.out.println("SCM is out of safe mode.");
}
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java
index bcf64deb85..e4173c9767 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java
@@ -22,8 +22,6 @@ import java.io.IOException;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
/**
@@ -36,14 +34,11 @@ import picocli.CommandLine.Command;
versionProvider = HddsVersionProvider.class)
public class SafeModeExitSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(SafeModeExitSubcommand.class);
-
@Override
public void execute(ScmClient scmClient) throws IOException {
boolean execReturn = scmClient.forceExitSafeMode();
if (execReturn) {
- LOG.info("SCM exit safe mode successfully.");
+ System.out.println("SCM exit safe mode successfully.");
}
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java
index abaca08cfb..ad94d4fffd 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java
@@ -23,8 +23,6 @@ import java.util.concurrent.TimeoutException;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
import picocli.CommandLine.Mixin;
@@ -39,9 +37,6 @@ import picocli.CommandLine.Mixin;
versionProvider = HddsVersionProvider.class)
public class SafeModeWaitSubcommand implements Callable<Void> {
- private static final Logger LOG =
- LoggerFactory.getLogger(SafeModeWaitSubcommand.class);
-
@Option(description =
"Define timeout (in second) to wait until (exit code 1) "
+ "or until safemode is ended (exit code 0).", defaultValue = "30",
@@ -62,26 +57,26 @@ public class SafeModeWaitSubcommand implements
Callable<Void> {
long remainingTime;
do {
if (!scmClient.inSafeMode()) {
- LOG.info("SCM is out of safe mode.");
+ System.out.println("SCM is out of safe mode.");
return null;
}
remainingTime = getRemainingTimeInSec();
if (remainingTime > 0) {
- LOG.info(
+ System.out.printf(
"SCM is in safe mode. Will retry in 1 sec. Remaining time "
- + "(sec): {}",
+ + "(sec): %s%n",
remainingTime);
Thread.sleep(1000);
} else {
- LOG.info("SCM is in safe mode. No more retries.");
+ System.out.println("SCM is in safe mode. No more retries.");
}
} while (remainingTime > 0);
} catch (InterruptedException ex) {
- LOG.info(
- "SCM is not available (yet?). Error is {}. Will retry in 1 sec. "
- + "Remaining time (sec): {}",
+ System.out.printf(
+ "SCM is not available (yet?). Error is %s. Will retry in 1 sec. "
+ + "Remaining time (sec): %s%n",
ex.getMessage(), getRemainingTimeInSec());
Thread.sleep(1000);
Thread.currentThread().interrupt();
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java
index cab7a29a4e..09caf8147a 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hdds.scm.cli.cert;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.protocol.SCMSecurityProtocol;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import java.io.IOException;
@@ -36,13 +34,10 @@ import java.util.List;
versionProvider = HddsVersionProvider.class)
public class CleanExpiredCertsSubcommand extends ScmCertSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(CleanExpiredCertsSubcommand.class);
-
@Override
protected void execute(SCMSecurityProtocol client) throws IOException {
List<String> pemEncodedCerts = client.removeExpiredCertificates();
- LOG.info("List of removed expired certificates:");
- printCertList(LOG, pemEncodedCerts);
+ System.out.println("List of removed expired certificates:");
+ printCertList(pemEncodedCerts);
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java
index 6177c8f7ff..c708d424d9 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java
@@ -26,12 +26,8 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.protocol.SCMSecurityProtocol;
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
-import picocli.CommandLine.Model.CommandSpec;
import picocli.CommandLine.Parameters;
-import picocli.CommandLine.Spec;
/**
* This is the handler that process certificate info command.
@@ -44,12 +40,6 @@ import picocli.CommandLine.Spec;
class InfoSubcommand extends ScmCertSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(InfoSubcommand.class);
-
- @Spec
- private CommandSpec spec;
-
@Parameters(description = "Serial id of the certificate in decimal.")
private String serialId;
@@ -61,12 +51,12 @@ class InfoSubcommand extends ScmCertSubcommand {
"Certificate can't be found");
// Print container report info.
- LOG.info("Certificate id: {}", serialId);
+ System.out.printf("Certificate id: %s%n", serialId);
try {
X509Certificate cert = CertificateCodec.getX509Certificate(certPemStr);
- LOG.info(cert.toString());
+ System.out.println(cert);
} catch (CertificateException ex) {
- LOG.error("Failed to get certificate id " + serialId);
+ System.err.println("Failed to get certificate id " + serialId);
throw new IOException("Fail to get certificate id " + serialId, ex);
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java
index c2e0bd7fad..ea08983814 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java
@@ -36,8 +36,6 @@ import org.apache.hadoop.hdds.protocol.SCMSecurityProtocol;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
import org.apache.hadoop.hdds.server.JsonUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import picocli.CommandLine.Help.Visibility;
import picocli.CommandLine.Option;
@@ -54,9 +52,6 @@ import static java.lang.System.err;
versionProvider = HddsVersionProvider.class)
public class ListSubcommand extends ScmCertSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(ListSubcommand.class);
-
@Option(names = {"-s", "--start"},
description = "Certificate serial id to start the iteration",
defaultValue = "0", showDefaultValue = Visibility.ALWAYS)
@@ -114,7 +109,7 @@ public class ListSubcommand extends ScmCertSubcommand {
CertificateCodec.getX509Certificate(certPemStr);
certList.add(new Certificate(cert));
} catch (CertificateException ex) {
- LOG.error("Failed to parse certificate.");
+ err.println("Failed to parse certificate.");
}
}
System.out.println(
@@ -122,9 +117,9 @@ public class ListSubcommand extends ScmCertSubcommand {
return;
}
- LOG.info("Certificate list:(Type={}, BatchSize={}, CertCount={})",
+ System.out.printf("Certificate list:(Type=%s, BatchSize=%s,
CertCount=%s)%n",
type.toUpperCase(), count, certPemList.size());
- printCertList(LOG, certPemList);
+ printCertList(certPemList);
}
private static class BigIntJsonSerializer extends JsonSerializer<BigInteger>
{
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java
index d7ebb44e0f..354adbb5d6 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hdds.scm.cli.cert;
import org.apache.hadoop.hdds.protocol.SCMSecurityProtocol;
import org.apache.hadoop.hdds.scm.cli.ScmOption;
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
-import org.slf4j.Logger;
import picocli.CommandLine;
import java.io.IOException;
@@ -37,29 +36,29 @@ public abstract class ScmCertSubcommand implements
Callable<Void> {
@CommandLine.Mixin
private ScmOption scmOption;
- private static final String OUTPUT_FORMAT = "%-17s %-30s %-30s %-110s
%-110s";
+ private static final String OUTPUT_FORMAT = "%-17s %-30s %-30s %-110s
%-110s%n";
- protected void printCertList(Logger log, List<String> pemEncodedCerts) {
+ protected void printCertList(List<String> pemEncodedCerts) {
if (pemEncodedCerts.isEmpty()) {
- log.info("No certificates to list");
+ System.out.println("No certificates to list");
return;
}
- log.info(String.format(OUTPUT_FORMAT, "SerialNumber", "Valid From",
- "Expiry", "Subject", "Issuer"));
+ System.out.printf(OUTPUT_FORMAT, "SerialNumber", "Valid From",
+ "Expiry", "Subject", "Issuer");
for (String certPemStr : pemEncodedCerts) {
try {
X509Certificate cert = CertificateCodec.getX509Certificate(certPemStr);
- printCert(cert, log);
+ printCert(cert);
} catch (CertificateException e) {
- log.error("Failed to parse certificate.", e);
+ System.err.println("Failed to parse certificate: " + e.getMessage());
}
}
}
- protected void printCert(X509Certificate cert, Logger log) {
- log.info(String.format(OUTPUT_FORMAT, cert.getSerialNumber(),
+ protected void printCert(X509Certificate cert) {
+ System.out.printf(OUTPUT_FORMAT, cert.getSerialNumber(),
cert.getNotBefore(), cert.getNotAfter(), cert.getSubjectDN(),
- cert.getIssuerDN()));
+ cert.getIssuerDN());
}
protected abstract void execute(SCMSecurityProtocol client)
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java
index 9eedbf8589..313dc64c9f 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java
@@ -25,8 +25,6 @@ import org.apache.hadoop.hdds.scm.client.ScmClient;
import org.apache.hadoop.hdds.scm.container.common.helpers
.ContainerWithPipeline;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
@@ -40,9 +38,6 @@ import picocli.CommandLine.Option;
versionProvider = HddsVersionProvider.class)
public class CreateSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(CreateSubcommand.class);
-
@Option(description = "Owner of the new container", defaultValue = "OZONE",
names = { "-o", "--owner"})
private String owner;
@@ -50,7 +45,7 @@ public class CreateSubcommand extends ScmSubcommand {
@Override
public void execute(ScmClient scmClient) throws IOException {
ContainerWithPipeline container = scmClient.createContainer(owner);
- LOG.info("Container {} is created.",
+ System.out.printf("Container %s is created.%n",
container.getContainerInfo().getContainerID());
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
index 8ed9f520b2..0e67661bba 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
@@ -45,8 +45,6 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException;
import org.apache.hadoop.hdds.server.JsonUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Model.CommandSpec;
@@ -63,9 +61,6 @@ import picocli.CommandLine.Spec;
versionProvider = HddsVersionProvider.class)
public class InfoSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(InfoSubcommand.class);
-
@Spec
private CommandSpec spec;
@@ -126,13 +121,13 @@ public class InfoSubcommand extends ScmSubcommand {
private void printHeader() {
if (json && multiContainer) {
- LOG.info("[");
+ System.out.println("[");
}
}
private void printFooter() {
if (json && multiContainer) {
- LOG.info("]");
+ System.out.println("]");
}
}
@@ -142,9 +137,9 @@ public class InfoSubcommand extends ScmSubcommand {
private void printBreak() {
if (json) {
- LOG.info(",");
+ System.out.println(",");
} else {
- LOG.info("");
+ System.out.println("");
}
}
@@ -175,47 +170,47 @@ public class InfoSubcommand extends ScmSubcommand {
new ContainerWithPipelineAndReplicas(container.getContainerInfo(),
container.getPipeline(), replicas,
container.getContainerInfo().getPipelineID());
- LOG.info(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper));
+
System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper));
} else {
ContainerWithoutDatanodes wrapper =
new ContainerWithoutDatanodes(container.getContainerInfo(),
container.getPipeline(), replicas,
container.getContainerInfo().getPipelineID());
- LOG.info(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper));
+
System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper));
}
} else {
// Print container report info.
- LOG.info("Container id: {}", containerID);
+ System.out.printf("Container id: %s%n", containerID);
boolean verbose = spec != null
&& spec.root().userObject() instanceof GenericParentCommand
&& ((GenericParentCommand) spec.root().userObject()).isVerbose();
if (verbose) {
- LOG.info("Pipeline Info: {}", container.getPipeline());
+ System.out.printf("Pipeline Info: %s%n", container.getPipeline());
} else {
- LOG.info("Pipeline id: {}", container.getPipeline().getId().getId());
+ System.out.printf("Pipeline id: %s%n",
container.getPipeline().getId().getId());
}
- LOG.info("Write PipelineId: {}",
+ System.out.printf("Write PipelineId: %s%n",
container.getContainerInfo().getPipelineID().getId());
try {
String pipelineState = scmClient.getPipeline(
container.getContainerInfo().getPipelineID().getProtobuf())
.getPipelineState().toString();
- LOG.info("Write Pipeline State: {}", pipelineState);
+ System.out.printf("Write Pipeline State: %s%n", pipelineState);
} catch (IOException ioe) {
if (SCMHAUtils.unwrapException(
ioe) instanceof PipelineNotFoundException) {
- LOG.info("Write Pipeline State: CLOSED");
+ System.out.println("Write Pipeline State: CLOSED");
} else {
printError("Failed to retrieve pipeline info");
}
}
- LOG.info("Container State: {}", container.getContainerInfo().getState());
+ System.out.printf("Container State: %s%n",
container.getContainerInfo().getState());
// Print pipeline of an existing container.
String machinesStr = container.getPipeline().getNodes().stream().map(
InfoSubcommand::buildDatanodeDetails)
.collect(Collectors.joining(",\n"));
- LOG.info("Datanodes: [{}]", machinesStr);
+ System.out.printf("Datanodes: [%s]%n", machinesStr);
// Print the replica details if available
if (replicas != null) {
@@ -223,7 +218,7 @@ public class InfoSubcommand extends ScmSubcommand {
.sorted(Comparator.comparing(ContainerReplicaInfo::getReplicaIndex))
.map(InfoSubcommand::buildReplicaDetails)
.collect(Collectors.joining(",\n"));
- LOG.info("Replicas: [{}]", replicaStr);
+ System.out.printf("Replicas: [%s]%n", replicaStr);
}
}
}
diff --git
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java
index b120fe4169..ecc43d0408 100644
---
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java
+++
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java
@@ -36,8 +36,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import picocli.CommandLine.Help.Visibility;
import picocli.CommandLine.Option;
@@ -52,9 +50,6 @@ import picocli.CommandLine.Option;
versionProvider = HddsVersionProvider.class)
public class ListSubcommand extends ScmSubcommand {
- private static final Logger LOG =
- LoggerFactory.getLogger(ListSubcommand.class);
-
@Option(names = {"-s", "--start"},
description = "Container id to start the iteration")
private long startId;
@@ -94,7 +89,7 @@ public class ListSubcommand extends ScmSubcommand {
private void outputContainerInfo(ContainerInfo containerInfo)
throws IOException {
// Print container report info.
- LOG.info("{}", WRITER.writeValueAsString(containerInfo));
+ System.out.println(WRITER.writeValueAsString(containerInfo));
}
@Override
diff --git
a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java
index d8c1addb78..efc11d550f 100644
---
a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java
+++
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java
@@ -28,9 +28,6 @@ import
org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -44,6 +41,7 @@ import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.regex.Matcher;
@@ -52,6 +50,7 @@ import java.util.stream.Collectors;
import static
org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.CLOSED;
import static
org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor.THREE;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -69,8 +68,6 @@ public class TestInfoSubCommand {
private ScmClient scmClient;
private InfoSubcommand cmd;
private List<DatanodeDetails> datanodes;
- private Logger logger;
- private TestAppender appender;
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
@@ -88,18 +85,12 @@ public class TestInfoSubCommand {
when(scmClient.getContainerWithPipeline(anyLong())).then(i ->
getContainerWithPipeline(i.getArgument(0)));
when(scmClient.getPipeline(any())).thenThrow(new
PipelineNotFoundException("Pipeline not found."));
- appender = new TestAppender();
- logger = Logger.getLogger(
- org.apache.hadoop.hdds.scm.cli.container.InfoSubcommand.class);
- logger.addAppender(appender);
-
System.setOut(new PrintStream(outContent, false, DEFAULT_ENCODING));
System.setErr(new PrintStream(errContent, false, DEFAULT_ENCODING));
}
@AfterEach
public void after() {
- logger.removeAppender(appender);
System.setOut(originalOut);
System.setErr(originalErr);
System.setIn(originalIn);
@@ -150,10 +141,8 @@ public class TestInfoSubCommand {
private void validateMultiOutput() throws UnsupportedEncodingException {
// Ensure we have a log line for each containerID
- List<LoggingEvent> logs = appender.getLog();
- List<LoggingEvent> replica = logs.stream()
- .filter(m -> m.getRenderedMessage()
- .matches("(?s)^Container id: (1|123|456|789).*"))
+ List<String> replica =
Arrays.stream(outContent.toString(DEFAULT_ENCODING).split("\n"))
+ .filter(m -> m.matches("(?s)^Container id: (1|123|456|789).*"))
.collect(Collectors.toList());
assertEquals(4, replica.size());
@@ -191,10 +180,8 @@ public class TestInfoSubCommand {
private void validateJsonMultiOutput() throws UnsupportedEncodingException {
// Ensure we have a log line for each containerID
- List<LoggingEvent> logs = appender.getLog();
- List<LoggingEvent> replica = logs.stream()
- .filter(m -> m.getRenderedMessage()
- .matches("(?s)^.*\"containerInfo\".*"))
+ List<String> replica =
Arrays.stream(outContent.toString(DEFAULT_ENCODING).split("\n"))
+ .filter(m -> m.matches("(?s)^.*\"containerInfo\".*"))
.collect(Collectors.toList());
assertEquals(4, replica.size());
@@ -213,34 +200,33 @@ public class TestInfoSubCommand {
cmd.execute(scmClient);
// Ensure we have a line for Replicas:
- List<LoggingEvent> logs = appender.getLog();
- List<LoggingEvent> replica = logs.stream()
- .filter(m -> m.getRenderedMessage().matches("(?s)^Replicas:.*"))
- .collect(Collectors.toList());
- assertEquals(1, replica.size());
+ String output = outContent.toString(DEFAULT_ENCODING);
+ Pattern pattern = Pattern.compile("Replicas: \\[.*\\]", Pattern.DOTALL);
+ Matcher matcher = pattern.matcher(output);
+ assertTrue(matcher.find());
+ String replica = matcher.group();
// Ensure each DN UUID is mentioned in the message:
for (DatanodeDetails dn : datanodes) {
- Pattern pattern = Pattern.compile(".*" + dn.getUuid().toString() + ".*",
+ Pattern uuidPattern = Pattern.compile(".*" + dn.getUuid().toString() +
".*",
Pattern.DOTALL);
- Matcher matcher = pattern.matcher(replica.get(0).getRenderedMessage());
- assertTrue(matcher.matches());
+ assertThat(replica).matches(uuidPattern);
}
// Ensure the replicaIndex output is in order
if (includeIndex) {
List<Integer> indexList = new ArrayList<>();
for (int i = 1; i < datanodes.size() + 1; i++) {
String temp = "ReplicaIndex: " + i;
- indexList.add(replica.get(0).getRenderedMessage().indexOf(temp));
+ indexList.add(replica.indexOf(temp));
}
assertEquals(datanodes.size(), indexList.size());
assertTrue(inSort(indexList));
}
// Ensure ReplicaIndex is not mentioned as it was not passed in the proto:
- Pattern pattern = Pattern.compile(".*ReplicaIndex.*",
- Pattern.DOTALL);
- Matcher matcher = pattern.matcher(replica.get(0).getRenderedMessage());
- assertEquals(includeIndex, matcher.matches());
+ assertEquals(includeIndex,
+ Pattern.compile(".*ReplicaIndex.*", Pattern.DOTALL)
+ .matcher(replica)
+ .matches());
}
@Test
@@ -253,9 +239,8 @@ public class TestInfoSubCommand {
cmd.execute(scmClient);
// Ensure we have no lines for Replicas:
- List<LoggingEvent> logs = appender.getLog();
- List<LoggingEvent> replica = logs.stream()
- .filter(m -> m.getRenderedMessage().matches("(?s)^Replicas:.*"))
+ List<String> replica =
Arrays.stream(outContent.toString(DEFAULT_ENCODING).split("\n"))
+ .filter(m -> m.matches("(?s)^Replicas:.*"))
.collect(Collectors.toList());
assertEquals(0, replica.size());
@@ -274,9 +259,7 @@ public class TestInfoSubCommand {
c.parseArgs("1", "--json");
cmd.execute(scmClient);
- List<LoggingEvent> logs = appender.getLog();
- assertEquals(1, logs.size());
- String json = logs.get(0).getRenderedMessage();
+ String json = outContent.toString(DEFAULT_ENCODING);
assertFalse(json.matches("(?s).*replicas.*"));
}
@@ -310,11 +293,8 @@ public class TestInfoSubCommand {
c.parseArgs("1", "--json");
cmd.execute(scmClient);
- List<LoggingEvent> logs = appender.getLog();
- assertEquals(1, logs.size());
-
// Ensure each DN UUID is mentioned in the message after replicas:
- String json = logs.get(0).getRenderedMessage();
+ String json = outContent.toString(DEFAULT_ENCODING);
assertTrue(json.matches("(?s).*replicas.*"));
for (DatanodeDetails dn : datanodes) {
Pattern pattern = Pattern.compile(
@@ -409,25 +389,4 @@ public class TestInfoSubCommand {
return dns;
}
- private static class TestAppender extends AppenderSkeleton {
- private final List<LoggingEvent> log = new ArrayList<>();
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-
- @Override
- protected void append(final LoggingEvent loggingEvent) {
- log.add(loggingEvent);
- }
-
- @Override
- public void close() {
- }
-
- public List<LoggingEvent> getLog() {
- return new ArrayList<>(log);
- }
- }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]