HDDS-379. Simplify and improve the cli arg parsing of ozone scmcli. Contributed by Elek, Marton.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/76bae4cc Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/76bae4cc Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/76bae4cc Branch: refs/heads/trunk Commit: 76bae4ccb1d929260038b1869be8070c2320b617 Parents: 50d2e3e Author: Anu Engineer <aengin...@apache.org> Authored: Fri Aug 31 18:11:01 2018 -0700 Committer: Anu Engineer <aengin...@apache.org> Committed: Fri Aug 31 18:11:01 2018 -0700 ---------------------------------------------------------------------- .../common/dev-support/findbugsExcludeFile.xml | 4 + .../org/apache/hadoop/hdds/cli/GenericCli.java | 82 +++ .../hadoop/hdds/cli/HddsVersionProvider.java | 35 ++ .../apache/hadoop/hdds/cli/package-info.java | 22 + hadoop-hdds/pom.xml | 5 + .../hadoop/hdds/scm/cli/OzoneBaseCLI.java | 43 -- .../hdds/scm/cli/OzoneCommandHandler.java | 87 ---- .../apache/hadoop/hdds/scm/cli/ResultCode.java | 31 -- .../org/apache/hadoop/hdds/scm/cli/SCMCLI.java | 246 +++------ .../cli/container/CloseContainerHandler.java | 85 --- .../hdds/scm/cli/container/CloseSubcommand.java | 54 ++ .../cli/container/ContainerCommandHandler.java | 128 ----- .../cli/container/CreateContainerHandler.java | 67 --- .../scm/cli/container/CreateSubcommand.java | 65 +++ .../cli/container/DeleteContainerHandler.java | 95 ---- .../scm/cli/container/DeleteSubcommand.java | 60 +++ .../scm/cli/container/InfoContainerHandler.java | 114 ---- .../hdds/scm/cli/container/InfoSubcommand.java | 94 ++++ .../scm/cli/container/ListContainerHandler.java | 117 ----- .../hdds/scm/cli/container/ListSubcommand.java | 83 +++ .../hdds/scm/cli/container/package-info.java | 3 + .../hadoop/hdds/scm/cli/package-info.java | 12 +- hadoop-ozone/common/src/main/bin/ozone | 2 +- .../org/apache/hadoop/ozone/scm/TestSCMCli.java | 518 ------------------- 24 files changed, 596 insertions(+), 1456 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml index daf6fec..c7db679 100644 --- a/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml +++ b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml @@ -21,4 +21,8 @@ <Match> <Package name="org.apache.hadoop.hdds.protocol.datanode.proto"/> </Match> + <Match> + <Class name="org.apache.hadoop.hdds.cli.GenericCli"></Class> + <Bug pattern="DM_EXIT" /> + </Match> </FindBugsFilter> http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/GenericCli.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/GenericCli.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/GenericCli.java new file mode 100644 index 0000000..2b3e6c0 --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/GenericCli.java @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.hdds.cli; + +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.Callable; + +import org.apache.hadoop.hdds.conf.OzoneConfiguration; + +import picocli.CommandLine; +import picocli.CommandLine.ExecutionException; +import picocli.CommandLine.Option; +import picocli.CommandLine.ParameterException; +import picocli.CommandLine.RunLast; + +/** + * This is a generic parent class for all the ozone related cli tools. + */ +public class GenericCli implements Callable<Void> { + + @Option(names = {"--verbose"}, + description = "More verbose output. Show the stack trace of the errors.") + private boolean verbose; + + @Option(names = {"-D", "--set"}) + private Map<String, String> configurationOverrides = new HashMap<>(); + + private final CommandLine cmd; + + public GenericCli() { + cmd = new CommandLine(this); + } + + public void run(String[] argv) { + try { + cmd.parseWithHandler(new RunLast(), argv); + } catch (ExecutionException ex) { + printError(ex.getCause()); + System.exit(-1); + } + } + + private void printError(Throwable error) { + if (verbose) { + error.printStackTrace(System.err); + } else { + System.err.println(error.getMessage().split("\n")[0]); + } + } + + @Override + public Void call() throws Exception { + throw new ParameterException(cmd, "Please choose a subcommand"); + } + + public OzoneConfiguration createOzoneConfiguration() { + OzoneConfiguration ozoneConf = new OzoneConfiguration(); + if (configurationOverrides != null) { + for (Entry<String, String> entry : configurationOverrides.entrySet()) { + ozoneConf + .set(entry.getKey(), configurationOverrides.get(entry.getValue())); + } + } + return ozoneConf; + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/HddsVersionProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/HddsVersionProvider.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/HddsVersionProvider.java new file mode 100644 index 0000000..7110839 --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/HddsVersionProvider.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.cli; + +import org.apache.hadoop.utils.HddsVersionInfo; + +import picocli.CommandLine.IVersionProvider; + +/** + * Version provider for the CLI interface. + */ +public class HddsVersionProvider implements IVersionProvider { + @Override + public String[] getVersion() throws Exception { + String[] result = new String[] { + HddsVersionInfo.getBuildVersion() + }; + return result; + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java new file mode 100644 index 0000000..8dcc1d1 --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java @@ -0,0 +1,22 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Generic helper class to make instantiate picocli based cli tools. + */ +package org.apache.hadoop.hdds.cli; \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-hdds/pom.xml b/hadoop-hdds/pom.xml index 428fd11..d97e6d8 100644 --- a/hadoop-hdds/pom.xml +++ b/hadoop-hdds/pom.xml @@ -69,6 +69,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> <type>test-jar</type> </dependency> <dependency> + <groupId>info.picocli</groupId> + <artifactId>picocli</artifactId> + <version>3.5.2</version> + </dependency> + <dependency> <groupId>com.google.protobuf</groupId> <artifactId>protobuf-java</artifactId> <scope>compile</scope> http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java deleted file mode 100644 index 7828445..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneBaseCLI.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.util.Tool; - -import java.io.IOException; -import java.net.URISyntaxException; - -/** - * This class is the base CLI for scm, om and scmadm. - */ -public abstract class OzoneBaseCLI extends Configured implements Tool { - - protected abstract int dispatch(CommandLine cmd, Options opts) - throws IOException, URISyntaxException; - - protected abstract CommandLine parseArgs(String[] argv, Options opts) - throws ParseException; - - protected abstract Options getOptions(); - - protected abstract void displayHelp(); -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java deleted file mode 100644 index 641dd0e..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/OzoneCommandHandler.java +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli; - -import org.apache.commons.cli.CommandLine; -import org.apache.hadoop.hdds.scm.client.ScmClient; - -import java.io.IOException; -import java.io.PrintStream; - -/** - * The abstract class of all SCM CLI commands. - */ -public abstract class OzoneCommandHandler { - - private ScmClient scmClient; - private PrintStream out = System.out; - private PrintStream err = System.err; - - /** - * Constructs a handler object. - */ - public OzoneCommandHandler(ScmClient scmClient) { - this.scmClient = scmClient; - } - - protected ScmClient getScmClient() { - return scmClient; - } - - /** - * Sets customized output stream to redirect the stdout to somewhere else. - * @param out - */ - public void setOut(PrintStream out) { - this.out = out; - } - - /** - * Sets customized error stream to redirect the stderr to somewhere else. - * @param err - */ - public void setErr(PrintStream err) { - this.err = err; - } - - public void logOut(String msg, String... variable) { - this.out.println(String.format(msg, variable)); - } - - /** - * Executes the Client command. - * - * @param cmd - CommandLine. - * @throws IOException throws exception. - */ - public abstract void execute(CommandLine cmd) throws IOException; - - /** - * Display a help message describing the options the command takes. - * TODO : currently only prints to standard out, may want to change this. - */ - public abstract void displayHelp(); - - public PrintStream getOut() { - return out; - } - - public PrintStream getErr() { - return err; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java deleted file mode 100644 index 27df88c..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ResultCode.java +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli; - -/** - * The possible result code of SCM CLI. - */ -public final class ResultCode { - public static final int SUCCESS = 1; - - public static final int UNRECOGNIZED_CMD = 2; - - public static final int EXECUTION_ERROR = 3; - - private ResultCode() {} -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java index f54322c..59cd0ba 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SCMCLI.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -17,69 +17,73 @@ */ package org.apache.hadoop.hdds.scm.cli; -import org.apache.commons.cli.BasicParser; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; +import java.io.IOException; +import java.net.InetSocketAddress; + import org.apache.hadoop.conf.StorageUnit; +import org.apache.hadoop.hdds.HddsUtils; +import org.apache.hadoop.hdds.cli.GenericCli; +import org.apache.hadoop.hdds.cli.HddsVersionProvider; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.XceiverClientManager; -import org.apache.hadoop.hdds.scm.cli.container.ContainerCommandHandler; -import org.apache.hadoop.hdds.scm.cli.container.CreateContainerHandler; +import org.apache.hadoop.hdds.scm.cli.container.CloseSubcommand; +import org.apache.hadoop.hdds.scm.cli.container.CreateSubcommand; +import org.apache.hadoop.hdds.scm.cli.container.DeleteSubcommand; +import org.apache.hadoop.hdds.scm.cli.container.InfoSubcommand; +import org.apache.hadoop.hdds.scm.cli.container.ListSubcommand; import org.apache.hadoop.hdds.scm.client.ContainerOperationClient; import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.protocolPB .StorageContainerLocationProtocolClientSideTranslatorPB; import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; -import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ipc.Client; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.ToolRunner; - -import java.io.IOException; -import java.io.PrintStream; -import java.net.InetSocketAddress; -import java.net.URISyntaxException; -import java.util.Arrays; +import org.apache.hadoop.util.NativeCodeLoader; +import org.apache.commons.lang3.StringUtils; +import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients; import static org.apache.hadoop.hdds.scm.ScmConfigKeys - .OZONE_SCM_CONTAINER_SIZE_DEFAULT; + .OZONE_SCM_CLIENT_ADDRESS_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE; import static org.apache.hadoop.hdds.scm.ScmConfigKeys - .OZONE_SCM_CONTAINER_SIZE; -import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients; -import static org.apache.hadoop.hdds.scm.cli.ResultCode.EXECUTION_ERROR; -import static org.apache.hadoop.hdds.scm.cli.ResultCode.SUCCESS; -import static org.apache.hadoop.hdds.scm.cli.ResultCode.UNRECOGNIZED_CMD; + .OZONE_SCM_CONTAINER_SIZE_DEFAULT; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.LogManager; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; +import picocli.CommandLine.Command; +import picocli.CommandLine.Option; /** * This class is the CLI of SCM. */ -public class SCMCLI extends OzoneBaseCLI { - - public static final String HELP_OP = "help"; - public static final int CMD_WIDTH = 80; - private final ScmClient scmClient; - private final PrintStream out; - private final PrintStream err; - - private final Options options; - - public SCMCLI(ScmClient scmClient) { - this(scmClient, System.out, System.err); - } - - public SCMCLI(ScmClient scmClient, PrintStream out, PrintStream err) { - this.scmClient = scmClient; - this.out = out; - this.err = err; - this.options = getOptions(); - } +/** + * Container subcommand. + */ +@Command(name = "ozone scmcli", hidden = true, description = + "Developer tools to handle SCM specific " + + "operations.", + versionProvider = HddsVersionProvider.class, + subcommands = { + ListSubcommand.class, + InfoSubcommand.class, + DeleteSubcommand.class, + CreateSubcommand.class, + CloseSubcommand.class + }, + mixinStandardHelpOptions = true) +public class SCMCLI extends GenericCli { + + @Option(names = {"--scm"}, description = "The destination scm (host:port)") + private String scm = ""; /** * Main for the scm shell Command handling. @@ -88,30 +92,40 @@ public class SCMCLI extends OzoneBaseCLI { * @throws Exception */ public static void main(String[] argv) throws Exception { - OzoneConfiguration conf = new OzoneConfiguration(); - ScmClient scmClient = getScmClient(conf); - SCMCLI shell = new SCMCLI(scmClient); - conf.setQuietMode(false); - shell.setConf(conf); - int res = 0; - try { - res = ToolRunner.run(shell, argv); - } catch (Exception ex) { - System.exit(1); - } - System.exit(res); + + LogManager.resetConfiguration(); + Logger.getRootLogger().setLevel(Level.INFO); + Logger.getRootLogger() + .addAppender(new ConsoleAppender(new PatternLayout("%m%n"))); + Logger.getLogger(NativeCodeLoader.class).setLevel(Level.ERROR); + + new SCMCLI().run(argv); } - private static ScmClient getScmClient(OzoneConfiguration ozoneConf) + public ScmClient createScmClient() throws IOException { + + OzoneConfiguration ozoneConf = createOzoneConfiguration(); + if (StringUtils.isNotEmpty(scm)) { + ozoneConf.set(OZONE_SCM_CLIENT_ADDRESS_KEY, scm); + } + if (!HddsUtils.getHostNameFromConfigKeys(ozoneConf, + ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY).isPresent()) { + + throw new IllegalArgumentException( + ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY + + " should be set in ozone-site.xml or with the --scm option"); + } + long version = RPC.getProtocolVersion( StorageContainerLocationProtocolPB.class); InetSocketAddress scmAddress = getScmAddressForClients(ozoneConf); - int containerSizeGB = (int)ozoneConf.getStorageSize( + int containerSizeGB = (int) ozoneConf.getStorageSize( OZONE_SCM_CONTAINER_SIZE, OZONE_SCM_CONTAINER_SIZE_DEFAULT, StorageUnit.GB); - ContainerOperationClient.setContainerSizeB(containerSizeGB*OzoneConsts.GB); + ContainerOperationClient + .setContainerSizeB(containerSizeGB * OzoneConsts.GB); RPC.setProtocolEngine(ozoneConf, StorageContainerLocationProtocolPB.class, ProtobufRpcEngine.class); @@ -121,116 +135,16 @@ public class SCMCLI extends OzoneBaseCLI { scmAddress, UserGroupInformation.getCurrentUser(), ozoneConf, NetUtils.getDefaultSocketFactory(ozoneConf), Client.getRpcTimeout(ozoneConf))); - ScmClient storageClient = new ContainerOperationClient( + return new ContainerOperationClient( client, new XceiverClientManager(ozoneConf)); - return storageClient; - } - - /** - * Adds ALL the options that hdfs scm command supports. Given the hierarchy - * of commands, the options are added in a cascading manner, e.g.: - * {@link SCMCLI} asks {@link ContainerCommandHandler} to add it's options, - * which then asks it's sub command, such as - * {@link CreateContainerHandler} - * to add it's own options. - * - * We need to do this because {@link BasicParser} need to take all the options - * when paring args. - * @return ALL the options supported by this CLI. - */ - @Override - protected Options getOptions() { - Options newOptions = new Options(); - // add the options - addTopLevelOptions(newOptions); - ContainerCommandHandler.addOptions(newOptions); - // TODO : add pool, node and pipeline commands. - addHelpOption(newOptions); - return newOptions; - } - - private static void addTopLevelOptions(Options options) { - Option containerOps = - new Option(ContainerCommandHandler.CONTAINER_CMD, false, - "Container related options"); - options.addOption(containerOps); - // TODO : add pool, node and pipeline commands. - } - - private static void addHelpOption(Options options) { - Option helpOp = new Option(HELP_OP, false, "display help message"); - options.addOption(helpOp); } - @Override - protected void displayHelp() { - HelpFormatter helpFormatter = new HelpFormatter(); - Options topLevelOptions = new Options(); - addTopLevelOptions(topLevelOptions); - helpFormatter.printHelp(CMD_WIDTH, "hdfs scmcli <commands> [<options>]", - "where <commands> can be one of the following", - topLevelOptions, ""); - } - - @Override - public int run(String[] args) throws Exception { - CommandLine cmd = parseArgs(args, options); - if (cmd == null) { - err.println("Unrecognized options:" + Arrays.asList(args)); - displayHelp(); - return UNRECOGNIZED_CMD; - } - return dispatch(cmd, options); - } - - /** - * This function parses all command line arguments - * and returns the appropriate values. - * - * @param argv - Argv from main - * - * @return CommandLine - */ - @Override - protected CommandLine parseArgs(String[] argv, Options opts) - throws ParseException { - try { - BasicParser parser = new BasicParser(); - return parser.parse(opts, argv); - } catch (ParseException ex) { - err.println(ex.getMessage()); + public void checkContainerExists(ScmClient scmClient, long containerId) + throws IOException { + ContainerInfo container = scmClient.getContainer(containerId); + if (container == null) { + throw new IllegalArgumentException("No such container " + containerId); } - return null; } - @Override - protected int dispatch(CommandLine cmd, Options opts) - throws IOException, URISyntaxException { - OzoneCommandHandler handler = null; - try { - if (cmd.hasOption(ContainerCommandHandler.CONTAINER_CMD)) { - handler = new ContainerCommandHandler(scmClient); - } - - if (handler == null) { - if (cmd.hasOption(HELP_OP)) { - displayHelp(); - return SUCCESS; - } else { - displayHelp(); - err.println("Unrecognized command: " + Arrays.asList(cmd.getArgs())); - return UNRECOGNIZED_CMD; - } - } else { - // Redirect stdout and stderr if necessary. - handler.setOut(this.out); - handler.setErr(this.err); - handler.execute(cmd); - return SUCCESS; - } - } catch (IOException ioe) { - err.println("Error executing command:" + ioe); - return EXECUTION_ERROR; - } - } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java deleted file mode 100644 index e2267da..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseContainerHandler.java +++ /dev/null @@ -1,85 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli.container; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler; -import org.apache.hadoop.hdds.scm.cli.SCMCLI; -import org.apache.hadoop.hdds.scm.client.ScmClient; - -import java.io.IOException; -import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline; - -/** - * The handler of close container command. - */ -public class CloseContainerHandler extends OzoneCommandHandler { - - public static final String CONTAINER_CLOSE = "close"; - public static final String OPT_CONTAINER_ID = "c"; - - @Override - public void execute(CommandLine cmd) throws IOException { - if (!cmd.hasOption(CONTAINER_CLOSE)) { - throw new IOException("Expecting container close"); - } - if (!cmd.hasOption(OPT_CONTAINER_ID)) { - displayHelp(); - if (!cmd.hasOption(SCMCLI.HELP_OP)) { - throw new IOException("Expecting container id"); - } else { - return; - } - } - String containerID = cmd.getOptionValue(OPT_CONTAINER_ID); - - ContainerWithPipeline container = getScmClient(). - getContainerWithPipeline(Long.parseLong(containerID)); - if (container == null) { - throw new IOException("Cannot close an non-exist container " - + containerID); - } - logOut("Closing container : %s.", containerID); - getScmClient() - .closeContainer(container.getContainerInfo().getContainerID()); - logOut("Container closed."); - } - - @Override - public void displayHelp() { - Options options = new Options(); - addOptions(options); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter - .printHelp(SCMCLI.CMD_WIDTH, "hdfs scm -container -close <option>", - "where <option> is", options, ""); - } - - public static void addOptions(Options options) { - Option containerNameOpt = new Option(OPT_CONTAINER_ID, - true, "Specify container ID"); - options.addOption(containerNameOpt); - } - - CloseContainerHandler(ScmClient client) { - super(client); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java new file mode 100644 index 0000000..173d0ce --- /dev/null +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.cli.container; + +import java.util.concurrent.Callable; + +import org.apache.hadoop.hdds.cli.HddsVersionProvider; +import org.apache.hadoop.hdds.scm.cli.SCMCLI; +import org.apache.hadoop.hdds.scm.client.ScmClient; + +import picocli.CommandLine.Command; +import picocli.CommandLine.Parameters; +import picocli.CommandLine.ParentCommand; + +/** + * The handler of close container command. + */ +@Command( + name = "close", + description = "close container", + mixinStandardHelpOptions = true, + versionProvider = HddsVersionProvider.class) +public class CloseSubcommand implements Callable<Void> { + + @ParentCommand + private SCMCLI parent; + + @Parameters(description = "Id of the container to close") + private long containerId; + + @Override + public Void call() throws Exception { + try (ScmClient scmClient = parent.createScmClient()) { + parent.checkContainerExists(scmClient, containerId); + scmClient.closeContainer(containerId); + return null; + } + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommandHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommandHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommandHandler.java deleted file mode 100644 index 428f179..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommandHandler.java +++ /dev/null @@ -1,128 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli.container; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler; -import org.apache.hadoop.hdds.scm.client.ScmClient; - -import java.io.IOException; -import java.util.Arrays; - -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH; -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP; -import static org.apache.hadoop.hdds.scm.cli.container.CloseContainerHandler - .CONTAINER_CLOSE; -import static org.apache.hadoop.hdds.scm.cli.container.CreateContainerHandler - .CONTAINER_CREATE; -import static org.apache.hadoop.hdds.scm.cli.container.DeleteContainerHandler - .CONTAINER_DELETE; -import static org.apache.hadoop.hdds.scm.cli.container.InfoContainerHandler - .CONTAINER_INFO; -import static org.apache.hadoop.hdds.scm.cli.container.ListContainerHandler - .CONTAINER_LIST; - -/** - * The handler class of container-specific commands, e.g. addContainer. - */ -public class ContainerCommandHandler extends OzoneCommandHandler { - - public static final String CONTAINER_CMD = "container"; - - public ContainerCommandHandler(ScmClient scmClient) { - super(scmClient); - } - - @Override - public void execute(CommandLine cmd) throws IOException { - // all container commands should contain -container option - if (!cmd.hasOption(CONTAINER_CMD)) { - throw new IOException("Expecting container cmd"); - } - // check which each the sub command it is - OzoneCommandHandler handler = null; - if (cmd.hasOption(CONTAINER_CREATE)) { - handler = new CreateContainerHandler(getScmClient()); - } else if (cmd.hasOption(CONTAINER_DELETE)) { - handler = new DeleteContainerHandler(getScmClient()); - } else if (cmd.hasOption(CONTAINER_INFO)) { - handler = new InfoContainerHandler(getScmClient()); - } else if (cmd.hasOption(CONTAINER_LIST)) { - handler = new ListContainerHandler(getScmClient()); - } else if (cmd.hasOption(CONTAINER_CLOSE)) { - handler = new CloseContainerHandler(getScmClient()); - } - - // execute the sub command, throw exception if no sub command found - // unless -help option is given. - if (handler != null) { - handler.setOut(this.getOut()); - handler.setErr(this.getErr()); - handler.execute(cmd); - } else { - displayHelp(); - if (!cmd.hasOption(HELP_OP)) { - throw new IOException("Unrecognized command " - + Arrays.asList(cmd.getArgs())); - } - } - } - - @Override - public void displayHelp() { - Options options = new Options(); - addCommandsOption(options); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(CMD_WIDTH, - "hdfs scm -container <commands> <options>", - "where <commands> can be one of the following", options, ""); - } - - private static void addCommandsOption(Options options) { - Option createContainer = - new Option(CONTAINER_CREATE, false, "Create container"); - Option infoContainer = - new Option(CONTAINER_INFO, false, "Info container"); - Option deleteContainer = - new Option(CONTAINER_DELETE, false, "Delete container"); - Option listContainer = - new Option(CONTAINER_LIST, false, "List container"); - Option closeContainer = - new Option(CONTAINER_CLOSE, false, "Close container"); - - options.addOption(createContainer); - options.addOption(deleteContainer); - options.addOption(infoContainer); - options.addOption(listContainer); - options.addOption(closeContainer); - // Every new option should add it's option here. - } - - public static void addOptions(Options options) { - addCommandsOption(options); - // for create container options. - DeleteContainerHandler.addOptions(options); - InfoContainerHandler.addOptions(options); - ListContainerHandler.addOptions(options); - CloseContainerHandler.addOptions(options); - // Every new option should add it's option here. - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateContainerHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateContainerHandler.java deleted file mode 100644 index 278ee30..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateContainerHandler.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli.container; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler; -import org.apache.hadoop.hdds.scm.client.ScmClient; - -import java.io.IOException; - -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH; -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP; - -/** - * This is the handler that process container creation command. - */ -public class CreateContainerHandler extends OzoneCommandHandler { - - public static final String CONTAINER_CREATE = "create"; - public static final String CONTAINER_OWNER = "OZONE"; - // TODO Support an optional -p <pipelineID> option to create - // container on given datanodes. - - public CreateContainerHandler(ScmClient scmClient) { - super(scmClient); - } - - @Override - public void execute(CommandLine cmd) throws IOException { - if (cmd.hasOption(HELP_OP)) { - displayHelp(); - } - - if (!cmd.hasOption(CONTAINER_CREATE)) { - throw new IOException("Expecting container create"); - } - - logOut("Creating container..."); - getScmClient().createContainer(CONTAINER_OWNER); - logOut("Container created."); - } - - @Override - public void displayHelp() { - Options options = new Options(); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -create", - null, options, null); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java new file mode 100644 index 0000000..1dda9c4 --- /dev/null +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.cli.container; + +import java.util.concurrent.Callable; + +import org.apache.hadoop.hdds.cli.HddsVersionProvider; +import org.apache.hadoop.hdds.scm.cli.SCMCLI; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.common.helpers + .ContainerWithPipeline; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import picocli.CommandLine.Command; +import picocli.CommandLine.Option; +import picocli.CommandLine.ParentCommand; + +/** + * This is the handler that process container creation command. + */ +@Command( + name = "create", + description = "Create container", + mixinStandardHelpOptions = true, + versionProvider = HddsVersionProvider.class) +public class CreateSubcommand implements Callable<Void> { + + private static final Logger LOG = + LoggerFactory.getLogger(CreateSubcommand.class); + + @ParentCommand + private SCMCLI parent; + + @Option(description = "Owner of the new container", defaultValue = "OZONE", + required = false, names = { + "-o", "--owner"}) + + private String owner; + + @Override + public Void call() throws Exception { + try (ScmClient scmClient = parent.createScmClient()) { + ContainerWithPipeline container = scmClient.createContainer(owner); + LOG.info("Container {} is created.", + container.getContainerInfo().getContainerID()); + return null; + } + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteContainerHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteContainerHandler.java deleted file mode 100644 index 1b26665..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteContainerHandler.java +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hdds.scm.cli.container; - -import com.google.common.base.Preconditions; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler; -import org.apache.hadoop.hdds.scm.client.ScmClient; - -import java.io.IOException; -import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline; - -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH; -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP; - -/** - * This is the handler that process delete container command. - */ -public class DeleteContainerHandler extends OzoneCommandHandler { - - protected static final String CONTAINER_DELETE = "delete"; - protected static final String OPT_FORCE = "f"; - protected static final String OPT_CONTAINER_ID = "c"; - - public DeleteContainerHandler(ScmClient scmClient) { - super(scmClient); - } - - @Override - public void execute(CommandLine cmd) throws IOException { - Preconditions.checkArgument(cmd.hasOption(CONTAINER_DELETE), - "Expecting command delete"); - if (!cmd.hasOption(OPT_CONTAINER_ID)) { - displayHelp(); - if (!cmd.hasOption(HELP_OP)) { - throw new IOException("Expecting container name"); - } else { - return; - } - } - - String containerID = cmd.getOptionValue(OPT_CONTAINER_ID); - - ContainerWithPipeline container = getScmClient().getContainerWithPipeline( - Long.parseLong(containerID)); - if (container == null) { - throw new IOException("Cannot delete an non-exist container " - + containerID); - } - - logOut("Deleting container : %s.", containerID); - getScmClient() - .deleteContainer(container.getContainerInfo().getContainerID(), - container.getPipeline(), cmd.hasOption(OPT_FORCE)); - logOut("Container %s deleted.", containerID); - } - - @Override - public void displayHelp() { - Options options = new Options(); - addOptions(options); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -delete <option>", - "where <option> is", options, ""); - } - - public static void addOptions(Options options) { - Option forceOpt = new Option(OPT_FORCE, - false, - "forcibly delete a container"); - options.addOption(forceOpt); - Option containerNameOpt = new Option(OPT_CONTAINER_ID, - true, "Specify container id"); - options.addOption(containerNameOpt); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java new file mode 100644 index 0000000..c163a3a --- /dev/null +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hdds.scm.cli.container; + +import java.util.concurrent.Callable; + +import org.apache.hadoop.hdds.cli.HddsVersionProvider; +import org.apache.hadoop.hdds.scm.cli.SCMCLI; +import org.apache.hadoop.hdds.scm.client.ScmClient; + +import picocli.CommandLine.Command; +import picocli.CommandLine.Option; +import picocli.CommandLine.Parameters; +import picocli.CommandLine.ParentCommand; + +/** + * This is the handler that process delete container command. + */ +@Command( + name = "delete", + description = "Delete container", + mixinStandardHelpOptions = true, + versionProvider = HddsVersionProvider.class) +public class DeleteSubcommand implements Callable<Void> { + + @Parameters(description = "Id of the container to close") + private long containerId; + + @Option(names = {"-f", + "--force"}, description = "forcibly delete the container") + private boolean force; + + @ParentCommand + private SCMCLI parent; + + @Override + public Void call() throws Exception { + try (ScmClient scmClient = parent.createScmClient()) { + parent.checkContainerExists(scmClient, containerId); + scmClient.deleteContainer(containerId, force); + return null; + } + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoContainerHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoContainerHandler.java deleted file mode 100644 index 89215fa..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoContainerHandler.java +++ /dev/null @@ -1,114 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli.container; - -import com.google.common.base.Preconditions; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler; -import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.apache.hadoop.hdds.protocol.DatanodeDetails; -import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos - .ContainerData; -import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos - .ContainerLifeCycleState; - -import java.io.IOException; -import java.util.stream.Collectors; -import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline; - -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH; -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP; - -/** - * This is the handler that process container info command. - */ -public class InfoContainerHandler extends OzoneCommandHandler { - - public static final String CONTAINER_INFO = "info"; - protected static final String OPT_CONTAINER_ID = "c"; - - /** - * Constructs a handler object. - * - * @param scmClient scm client. - */ - public InfoContainerHandler(ScmClient scmClient) { - super(scmClient); - } - - @Override - public void execute(CommandLine cmd) throws IOException { - if (!cmd.hasOption(CONTAINER_INFO)) { - throw new IOException("Expecting container info"); - } - if (!cmd.hasOption(OPT_CONTAINER_ID)) { - displayHelp(); - if (!cmd.hasOption(HELP_OP)) { - throw new IOException("Expecting container name"); - } else { - return; - } - } - String containerID = cmd.getOptionValue(OPT_CONTAINER_ID); - ContainerWithPipeline container = getScmClient(). - getContainerWithPipeline(Long.parseLong(containerID)); - Preconditions.checkNotNull(container, "Container cannot be null"); - - ContainerData containerData = getScmClient().readContainer(container - .getContainerInfo().getContainerID(), container.getPipeline()); - - // Print container report info. - logOut("Container id: %s", containerID); - String openStatus = - containerData.getState() == ContainerLifeCycleState.OPEN ? "OPEN" : - "CLOSED"; - logOut("Container State: %s", openStatus); - logOut("Container Path: %s", containerData.getContainerPath()); - - // Output meta data. - String metadataStr = containerData.getMetadataList().stream().map( - p -> p.getKey() + ":" + p.getValue()).collect(Collectors.joining(", ")); - logOut("Container Metadata: {%s}", metadataStr); - - // Print pipeline of an existing container. - logOut("LeaderID: %s", container.getPipeline() - .getLeader().getHostName()); - String machinesStr = container.getPipeline() - .getMachines().stream().map( - DatanodeDetails::getHostName).collect(Collectors.joining(",")); - logOut("Datanodes: [%s]", machinesStr); - } - - @Override - public void displayHelp() { - Options options = new Options(); - addOptions(options); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -info <option>", - "where <option> is", options, ""); - } - - public static void addOptions(Options options) { - Option containerIdOpt = new Option(OPT_CONTAINER_ID, - true, "Specify container id"); - options.addOption(containerIdOpt); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java new file mode 100644 index 0000000..0135df3 --- /dev/null +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.cli.container; + +import java.util.concurrent.Callable; +import java.util.stream.Collectors; + +import org.apache.hadoop.hdds.cli.HddsVersionProvider; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos + .ContainerData; +import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos + .ContainerLifeCycleState; +import org.apache.hadoop.hdds.scm.cli.SCMCLI; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.common.helpers + .ContainerWithPipeline; + +import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import picocli.CommandLine.Command; +import picocli.CommandLine.Parameters; +import picocli.CommandLine.ParentCommand; + +/** + * This is the handler that process container info command. + */ +@Command( + name = "info", + description = "Show information about a specific container", + mixinStandardHelpOptions = true, + versionProvider = HddsVersionProvider.class) +public class InfoSubcommand implements Callable<Void> { + + private static final Logger LOG = + LoggerFactory.getLogger(InfoSubcommand.class); + + @ParentCommand + private SCMCLI parent; + + @Parameters(description = "Decimal id of the container.") + private long containerID; + + @Override + public Void call() throws Exception { + try (ScmClient scmClient = parent.createScmClient()) { + ContainerWithPipeline container = scmClient. + getContainerWithPipeline(containerID); + Preconditions.checkNotNull(container, "Container cannot be null"); + + ContainerData containerData = scmClient.readContainer(container + .getContainerInfo().getContainerID(), container.getPipeline()); + + // Print container report info. + LOG.info("Container id: {}", containerID); + String openStatus = + containerData.getState() == ContainerLifeCycleState.OPEN ? "OPEN" : + "CLOSED"; + LOG.info("Container State: {}", openStatus); + LOG.info("Container Path: {}", containerData.getContainerPath()); + + // Output meta data. + String metadataStr = containerData.getMetadataList().stream().map( + p -> p.getKey() + ":" + p.getValue()) + .collect(Collectors.joining(", ")); + LOG.info("Container Metadata: {}", metadataStr); + + // Print pipeline of an existing container. + LOG.info("LeaderID: {}", container.getPipeline() + .getLeader().getHostName()); + String machinesStr = container.getPipeline() + .getMachines().stream().map( + DatanodeDetails::getHostName).collect(Collectors.joining(",")); + LOG.info("Datanodes: [{}]", machinesStr); + return null; + } + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListContainerHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListContainerHandler.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListContainerHandler.java deleted file mode 100644 index 3483b3e..0000000 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListContainerHandler.java +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * <p> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hdds.scm.cli.container; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.hadoop.hdds.scm.cli.OzoneCommandHandler; -import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; -import org.apache.hadoop.ozone.web.utils.JsonUtils; - -import java.io.IOException; -import java.util.List; - -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.CMD_WIDTH; -import static org.apache.hadoop.hdds.scm.cli.SCMCLI.HELP_OP; - -/** - * This is the handler that process container list command. - */ -public class ListContainerHandler extends OzoneCommandHandler { - - public static final String CONTAINER_LIST = "list"; - public static final String OPT_START_CONTAINER = "start"; - public static final String OPT_COUNT = "count"; - - /** - * Constructs a handler object. - * - * @param scmClient scm client - */ - public ListContainerHandler(ScmClient scmClient) { - super(scmClient); - } - - @Override - public void execute(CommandLine cmd) throws IOException { - if (!cmd.hasOption(CONTAINER_LIST)) { - throw new IOException("Expecting container list"); - } - if (cmd.hasOption(HELP_OP)) { - displayHelp(); - return; - } - - if (!cmd.hasOption(OPT_COUNT)) { - displayHelp(); - if (!cmd.hasOption(HELP_OP)) { - throw new IOException("Expecting container count"); - } else { - return; - } - } - - String startID = cmd.getOptionValue(OPT_START_CONTAINER); - int count = 0; - - if (cmd.hasOption(OPT_COUNT)) { - count = Integer.parseInt(cmd.getOptionValue(OPT_COUNT)); - if (count < 0) { - displayHelp(); - throw new IOException("-count should not be negative"); - } - } - - List<ContainerInfo> containerList = - getScmClient().listContainer( - Long.parseLong(startID), count); - - // Output data list - for (ContainerInfo container : containerList) { - outputContainerInfo(container); - } - } - - private void outputContainerInfo(ContainerInfo containerInfo) - throws IOException { - // Print container report info. - logOut("%s", JsonUtils.toJsonStringWithDefaultPrettyPrinter( - containerInfo.toJsonString())); - } - - @Override - public void displayHelp() { - Options options = new Options(); - addOptions(options); - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(CMD_WIDTH, "hdfs scm -container -list <option>", - "where <option> can be the following", options, ""); - } - - public static void addOptions(Options options) { - Option startContainerOpt = new Option(OPT_START_CONTAINER, - true, "Specify start container id"); - Option countOpt = new Option(OPT_COUNT, true, - "Specify count number, required"); - options.addOption(countOpt); - options.addOption(startContainerOpt); - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java new file mode 100644 index 0000000..0f520fd --- /dev/null +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java @@ -0,0 +1,83 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.cli.container; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.Callable; + +import org.apache.hadoop.hdds.cli.HddsVersionProvider; +import org.apache.hadoop.hdds.scm.cli.SCMCLI; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; +import org.apache.hadoop.ozone.web.utils.JsonUtils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import picocli.CommandLine.Command; +import picocli.CommandLine.Help.Visibility; +import picocli.CommandLine.Option; +import picocli.CommandLine.ParentCommand; + +/** + * This is the handler that process container list command. + */ +@Command( + name = "list", + description = "List containers", + mixinStandardHelpOptions = true, + versionProvider = HddsVersionProvider.class) +public class ListSubcommand implements Callable<Void> { + + private static final Logger LOG = + LoggerFactory.getLogger(ListSubcommand.class); + + @ParentCommand + private SCMCLI parent; + + @Option(names = {"-s", "--start"}, + description = "Container id to start the iteration", required = true) + private long startId; + + @Option(names = {"-c", "--count"}, + description = "Maximum number of containers to list", + defaultValue = "20", showDefaultValue = Visibility.ALWAYS) + private int count = 20; + + private void outputContainerInfo(ContainerInfo containerInfo) + throws IOException { + // Print container report info. + LOG.info("{}", JsonUtils.toJsonStringWithDefaultPrettyPrinter( + containerInfo.toJsonString())); + } + + @Override + public Void call() throws Exception { + try (ScmClient scmClient = parent.createScmClient()) { + + List<ContainerInfo> containerList = + scmClient.listContainer(startId, count); + + // Output data list + for (ContainerInfo container : containerList) { + outputContainerInfo(container); + } + return null; + } + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java index 0630df2..ff8adbc 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/package-info.java @@ -16,4 +16,7 @@ * limitations under the License. */ +/** + * Contains all of the container related scm commands. + */ package org.apache.hadoop.hdds.scm.cli.container; \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java index 4762d55..d358b3c 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/package-info.java @@ -6,14 +6,18 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * <p> + * SCM related cli tools. + */ +/** + * SCM related cli tools. */ - package org.apache.hadoop.hdds.scm.cli; \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/76bae4cc/hadoop-ozone/common/src/main/bin/ozone ---------------------------------------------------------------------- diff --git a/hadoop-ozone/common/src/main/bin/ozone b/hadoop-ozone/common/src/main/bin/ozone index 6a30433..17b47a9 100755 --- a/hadoop-ozone/common/src/main/bin/ozone +++ b/hadoop-ozone/common/src/main/bin/ozone @@ -116,7 +116,7 @@ function ozonecmd_case HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell ;; scmcli) - HADOOP_CLASSNAME=org.apache.hadoop.ozone.scm.cli.SCMCLI + HADOOP_CLASSNAME=org.apache.hadoop.hdds.scm.cli.SCMCLI ;; version) HADOOP_CLASSNAME=org.apache.hadoop.ozone.util.OzoneVersionInfo --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org