This is an automated email from the ASF dual-hosted git repository.

epugh pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/solr.git


The following commit(s) were added to refs/heads/main by this push:
     new 621165a94dd SOLR-17567: Improve Stream CLI implementation (#2872)
621165a94dd is described below

commit 621165a94dd5f72cb422abf28ca69aa38e5f5fca
Author: Eric Pugh <[email protected]>
AuthorDate: Thu Nov 21 20:30:07 2024 -0500

    SOLR-17567: Improve Stream CLI implementation (#2872)
    
    Cleanup and adding polish to the Stream CLI tool after getting more eyes on 
it.  More robust handling of comments in .expr files, better use of CLI 
options, preventing "Strings" from being used to select an Option, in favour of 
an Option object.
    
    ---------
    
    Co-authored-by: Christos Malliaridis <[email protected]>
---
 .../forbidden-apis/commons-cli.commons-cli.all.txt |  16 +++
 .../core/src/java/org/apache/solr/cli/SolrCLI.java |   2 +-
 .../src/java/org/apache/solr/cli/StreamTool.java   |  43 ++++---
 .../java/org/apache/solr/handler/CatStream.java    |   7 +-
 .../test/org/apache/solr/cli/StreamToolTest.java   |  15 ++-
 .../hdfs/snapshots/SolrOnHdfsSnapshotsTool.java    | 126 +++++++++++----------
 solr/packaging/test/test_stream.bats               |   2 +-
 .../modules/query-guide/pages/stream-tool.adoc     |  44 +++++--
 .../solr/client/solrj/io/stream/LetStream.java     |   5 -
 9 files changed, 152 insertions(+), 108 deletions(-)

diff --git a/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt 
b/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt
new file mode 100644
index 00000000000..469fef8238f
--- /dev/null
+++ b/gradle/validation/forbidden-apis/commons-cli.commons-cli.all.txt
@@ -0,0 +1,16 @@
+@defaultMessage Use a org.apache.commons.cli.Option instead of a String value
+org.apache.commons.cli.CommandLine#hasOption(java.lang.String)
+org.apache.commons.cli.CommandLine#getOptionValue(java.lang.String)
+org.apache.commons.cli.CommandLine#getOptionValue(java.lang.String, 
java.lang.String)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(java.lang.String, 
java.lang.Object)
+org.apache.commons.cli.CommandLine#hasOption(char)
+org.apache.commons.cli.CommandLine#getOptionValue(char)
+org.apache.commons.cli.CommandLine#getOptionValue(char, java.lang.String)
+#org.apache.commons.cli.CommandLine#getOptionValue(char, Supplier<String>)
+org.apache.commons.cli.CommandLine#getOptionValues(char)
+org.apache.commons.cli.CommandLine#getOptionValues(java.lang.String)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(char)
+# org.apache.commons.cli.CommandLine#getParsedOptionValue(char, Supplier<T>)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(char, java.lang.Object)
+org.apache.commons.cli.CommandLine#getParsedOptionValue(java.lang.String)
+# org.apache.commons.cli.CommandLine#getParsedOptionValue(String, Supplier<T>)
diff --git a/solr/core/src/java/org/apache/solr/cli/SolrCLI.java 
b/solr/core/src/java/org/apache/solr/cli/SolrCLI.java
index 32483720d17..15e26ce49c7 100755
--- a/solr/core/src/java/org/apache/solr/cli/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/cli/SolrCLI.java
@@ -231,7 +231,7 @@ public class SolrCLI implements CLIO {
    * CLI option.
    */
   public static String getOptionWithDeprecatedAndDefault(
-      CommandLine cli, String opt, String deprecated, String def) {
+      CommandLine cli, Option opt, Option deprecated, String def) {
     String val = cli.getOptionValue(opt);
     if (val == null) {
       val = cli.getOptionValue(deprecated);
diff --git a/solr/core/src/java/org/apache/solr/cli/StreamTool.java 
b/solr/core/src/java/org/apache/solr/cli/StreamTool.java
index 6cb6ae81dbb..39684310f0e 100644
--- a/solr/core/src/java/org/apache/solr/cli/StreamTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/StreamTool.java
@@ -35,6 +35,7 @@ import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 import java.util.Set;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.Option;
@@ -77,12 +78,12 @@ public class StreamTool extends ToolBase {
   @Override
   public String getUsage() {
     // Specify that the last argument is the streaming expression
-    return "bin/solr stream [--array-delimiter <CHARACTER>] [-c <NAME>] 
[--delimiter <CHARACTER>] [-e <ENVIRONMENT>] [-f\n"
+    return "bin/solr stream [--array-delimiter <CHARACTER>] [-c <NAME>] 
[--delimiter <CHARACTER>] [--execution <ENVIRONMENT>] [--fields\n"
         + "       <FIELDS>] [-h] [--header] [-s <HOST>] [-u <credentials>] 
[-v] [-z <HOST>]  <streaming expression OR stream_file.expr>\n";
   }
 
   private static final Option EXECUTION_OPTION =
-      Option.builder("e")
+      Option.builder()
           .longOpt("execution")
           .hasArg()
           .argName("ENVIRONMENT")
@@ -100,7 +101,7 @@ public class StreamTool extends ToolBase {
           .build();
 
   private static final Option FIELDS_OPTION =
-      Option.builder("f")
+      Option.builder()
           .longOpt("fields")
           .argName("FIELDS")
           .hasArg()
@@ -229,11 +230,7 @@ public class StreamTool extends ToolBase {
         }
       }
     } finally {
-
-      if (pushBackStream != null) {
-        pushBackStream.close();
-      }
-
+      pushBackStream.close();
       solrClientCache.close();
     }
 
@@ -277,7 +274,7 @@ public class StreamTool extends ToolBase {
 
     Lang.register(streamFactory);
 
-    stream = StreamTool.constructStream(streamFactory, streamExpression);
+    stream = streamFactory.constructStream(streamExpression);
 
     pushBackStream = new PushBackStream(stream);
 
@@ -306,11 +303,11 @@ public class StreamTool extends ToolBase {
   private PushBackStream doRemoteMode(CommandLine cli, String expr) throws 
Exception {
 
     String solrUrl = CLIUtils.normalizeSolrUrl(cli);
-    if (!cli.hasOption("name")) {
+    if (!cli.hasOption(COLLECTION_OPTION)) {
       throw new IllegalStateException(
-          "You must provide --name COLLECTION with --worker solr parameter.");
+          "You must provide --name COLLECTION with --execution remote 
parameter.");
     }
-    String collection = cli.getOptionValue("name");
+    String collection = cli.getOptionValue(COLLECTION_OPTION);
 
     if (expr.toLowerCase(Locale.ROOT).contains("stdin(")) {
       throw new IllegalStateException(
@@ -371,11 +368,10 @@ public class StreamTool extends ToolBase {
       }
     }
 
-    @SuppressWarnings({"unchecked", "rawtypes"})
     @Override
     public Tuple read() throws IOException {
       String line = reader.readLine();
-      HashMap map = new HashMap();
+      Map<String, ?> map = new HashMap<>();
       Tuple tuple = new Tuple(map);
       if (line != null) {
         tuple.put("line", line);
@@ -435,11 +431,15 @@ public class StreamTool extends ToolBase {
 
     @Override
     public void setStreamContext(StreamContext context) {
-      // LocalCatStream has no Solr core to pull from the context
+      // LocalCatStream inherently has no Solr core to pull from the context
     }
 
     @Override
-    protected List<CrawlFile> validateAndSetFilepathsInSandbox(String 
commaDelimitedFilepaths) {
+    protected List<CrawlFile> validateAndSetFilepathsInSandbox() {
+      // The nature of LocalCatStream is that we are not limited to the 
sandboxed "userfiles"
+      // directory
+      // the way the CatStream does.
+
       final List<CrawlFile> crawlSeeds = new ArrayList<>();
       for (String crawlRootStr : commaDelimitedFilepaths.split(",")) {
         Path crawlRootPath = Paths.get(crawlRootStr).normalize();
@@ -483,11 +483,6 @@ public class StreamTool extends ToolBase {
     return buf.toString();
   }
 
-  private static TupleStream constructStream(
-      StreamFactory streamFactory, StreamExpression streamExpression) throws 
IOException {
-    return streamFactory.constructStream(streamExpression);
-  }
-
   static String readExpression(LineNumberReader bufferedReader, String[] args) 
throws IOException {
 
     StringBuilder exprBuff = new StringBuilder();
@@ -499,17 +494,17 @@ public class StreamTool extends ToolBase {
         break;
       }
 
-      if (line.indexOf("/*") == 0) {
+      if (line.trim().indexOf("/*") == 0) {
         comment = true;
         continue;
       }
 
-      if (line.indexOf("*/") == 0) {
+      if (line.trim().contains("*/")) {
         comment = false;
         continue;
       }
 
-      if (comment || line.startsWith("#") || line.startsWith("//")) {
+      if (comment || line.trim().startsWith("#") || 
line.trim().startsWith("//")) {
         continue;
       }
 
diff --git a/solr/core/src/java/org/apache/solr/handler/CatStream.java 
b/solr/core/src/java/org/apache/solr/handler/CatStream.java
index f2515f9b38b..fdffe29e6c0 100644
--- a/solr/core/src/java/org/apache/solr/handler/CatStream.java
+++ b/solr/core/src/java/org/apache/solr/handler/CatStream.java
@@ -49,7 +49,7 @@ import org.slf4j.LoggerFactory;
 public class CatStream extends TupleStream implements Expressible {
   private static final Logger log = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private final String commaDelimitedFilepaths;
+  protected final String commaDelimitedFilepaths;
   private final int maxLines; // -1 for no max
 
   private StreamContext context;
@@ -113,8 +113,7 @@ public class CatStream extends TupleStream implements 
Expressible {
 
   @Override
   public void open() throws IOException {
-    final List<CrawlFile> initialCrawlSeeds =
-        validateAndSetFilepathsInSandbox(this.commaDelimitedFilepaths);
+    final List<CrawlFile> initialCrawlSeeds = 
validateAndSetFilepathsInSandbox();
 
     final List<CrawlFile> filesToCrawl = new ArrayList<>();
     for (CrawlFile crawlSeed : initialCrawlSeeds) {
@@ -164,7 +163,7 @@ public class CatStream extends TupleStream implements 
Expressible {
         .withExpression(toExpression(factory).toString());
   }
 
-  protected List<CrawlFile> validateAndSetFilepathsInSandbox(String 
commaDelimitedFilepaths) {
+  protected List<CrawlFile> validateAndSetFilepathsInSandbox() {
     final List<CrawlFile> crawlSeeds = new ArrayList<>();
     for (String crawlRootStr : commaDelimitedFilepaths.split(",")) {
       Path crawlRootPath = chroot.resolve(crawlRootStr).normalize();
diff --git a/solr/core/src/test/org/apache/solr/cli/StreamToolTest.java 
b/solr/core/src/test/org/apache/solr/cli/StreamToolTest.java
index e91ab9e2d81..926e8aa91e3 100644
--- a/solr/core/src/test/org/apache/solr/cli/StreamToolTest.java
+++ b/solr/core/src/test/org/apache/solr/cli/StreamToolTest.java
@@ -100,8 +100,15 @@ public class StreamToolTest extends SolrCloudTestCase {
     buf.println("/*");
     buf.println("Multi-line comment Comment...");
     buf.println("*/");
+    buf.println("  /*");
+    buf.println("Multi-line comment Comment...");
+    buf.println("  */");
+    buf.println("/*");
+    buf.println("Multi-line comment ending with closing chars... */");
     buf.println("// Single line comment");
     buf.println("# Single line comment");
+    buf.println(" // Single line comment");
+    buf.println(" # Single line comment");
     buf.println("let(a=$1, b=$2,");
     buf.println("search($3))");
     buf.println(")");
@@ -227,7 +234,7 @@ public class StreamToolTest extends SolrCloudTestCase {
     String[] args =
         new String[] {
           "stream",
-          "-e",
+          "--execution",
           "remote",
           "--name",
           "fakeCollection",
@@ -246,7 +253,7 @@ public class StreamToolTest extends SolrCloudTestCase {
     String[] args =
         new String[] {
           "stream",
-          "-e",
+          "--execution",
           "local",
           "-v",
           "-z",
@@ -269,7 +276,7 @@ public class StreamToolTest extends SolrCloudTestCase {
     // notice that we do not pass in zkHost or solrUrl for a simple echo run 
locally.
     String[] args = {
       "stream",
-      "-e",
+      "--execution",
       "local",
       "--verbose",
       "-zk-host",
@@ -313,7 +320,7 @@ public class StreamToolTest extends SolrCloudTestCase {
     // test passing in the file
     String[] args = {
       "stream",
-      "-e",
+      "--execution",
       "remote",
       "-c",
       collectionName,
diff --git 
a/solr/modules/hdfs/src/java/org/apache/solr/hdfs/snapshots/SolrOnHdfsSnapshotsTool.java
 
b/solr/modules/hdfs/src/java/org/apache/solr/hdfs/snapshots/SolrOnHdfsSnapshotsTool.java
index c0ce936da1d..92c0f73bf5c 100644
--- 
a/solr/modules/hdfs/src/java/org/apache/solr/hdfs/snapshots/SolrOnHdfsSnapshotsTool.java
+++ 
b/solr/modules/hdfs/src/java/org/apache/solr/hdfs/snapshots/SolrOnHdfsSnapshotsTool.java
@@ -35,13 +35,14 @@ import java.util.Map;
 import java.util.Optional;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.DefaultParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.PosixParser;
 import org.apache.hadoop.fs.Path;
 import org.apache.solr.cli.CLIO;
+import org.apache.solr.cli.CommonCLIOptions;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -61,28 +62,59 @@ import org.slf4j.LoggerFactory;
  * This class provides utility functions required for Solr on HDFS specific 
snapshots'
  * functionality.
  *
+ * <p>If HDFS remains in Solr 10, then we should migrate this to extending 
ToolBase
+ *
  * <p>For general purpose snapshot tooling see the related classes in the 
{@link
  * org.apache.solr.cli} package.
  */
 public class SolrOnHdfsSnapshotsTool implements Closeable, CLIO {
   private static final Logger log = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private static final String PREPARE_FOR_EXPORT = "prepare-snapshot-export";
-  private static final String HELP = "help";
-  private static final String COLLECTION = "c";
-  private static final String TEMP_DIR = "t";
-  private static final String DEST_DIR = "d";
-  private static final String SOLR_ZK_ENSEMBLE = "z";
-  private static final String HDFS_PATH_PREFIX = "p";
+  private static final Option PREPARE_FOR_EXPORT_OPTION =
+      Option.builder()
+          .longOpt("prepare-snapshot-export")
+          .hasArg()
+          .desc(
+              "The authentication mechanism to enable (basicAuth or kerberos). 
Defaults to 'basicAuth'.")
+          .build();
+  private static final Option HDFS_PATH_PREFIX_OPTION =
+      Option.builder("p")
+          .hasArg()
+          .desc(
+              "This parameter specifies the HDFS URI prefix to be used "
+                  + "during snapshot export preparation. This is applicable 
only if the Solr collection index files are stored on HDFS.")
+          .build();
+
+  private static final Option TEMP_DIR_OPTION =
+      Option.builder("t")
+          .hasArg()
+          .desc(
+              "This parameter specifies the path of a temporary directory on 
local filesystem"
+                  + " during prepare-snapshot-export command.")
+          .build();
+  private static final Option COLLECTION_OPTION =
+      Option.builder("c")
+          .hasArg()
+          .desc(
+              "This parameter specifies the name of the collection to be used 
during snapshot operation")
+          .build();
+  private static final Option DEST_DIR_OPTION =
+      Option.builder("d")
+          .hasArg()
+          .desc(
+              "This parameter specifies the path on shared file-system (e.g. 
HDFS) where the snapshot related"
+                  + " information should be stored.")
+          .build();
+
   private static final List<String> OPTION_HELP_ORDER =
       Arrays.asList(
-          PREPARE_FOR_EXPORT,
-          HELP,
-          SOLR_ZK_ENSEMBLE,
-          COLLECTION,
-          DEST_DIR,
-          TEMP_DIR,
-          HDFS_PATH_PREFIX);
+          PREPARE_FOR_EXPORT_OPTION.getLongOpt(),
+          CommonCLIOptions.HELP_OPTION.getOpt(),
+          CommonCLIOptions.ZK_HOST_OPTION.getOpt(),
+          COLLECTION_OPTION.getOpt(),
+          DEST_DIR_OPTION.getOpt(),
+          TEMP_DIR_OPTION.getOpt(),
+          HDFS_PATH_PREFIX_OPTION.getOpt());
 
   private final CloudSolrClient solrClient;
 
@@ -248,43 +280,17 @@ public class SolrOnHdfsSnapshotsTool implements 
Closeable, CLIO {
   }
 
   public static void main(String[] args) throws IOException {
-    CommandLineParser parser = new PosixParser();
+    CommandLineParser parser = new DefaultParser();
     Options options = new Options();
 
-    options.addOption(
-        null,
-        PREPARE_FOR_EXPORT,
-        true,
-        "This command will prepare copylistings for the specified snapshot."
-            + " This command should only be used only if Solr is deployed with 
Hadoop and collection index files are stored on a shared"
-            + " file-system e.g. HDFS");
-
-    options.addOption(
-        null,
-        HELP,
-        false,
-        "This command will print the help message for the snapshots related 
commands.");
-    options.addOption(
-        TEMP_DIR,
-        true,
-        "This parameter specifies the path of a temporary directory on local 
filesystem"
-            + " during prepare-snapshot-export command.");
-    options.addOption(
-        DEST_DIR,
-        true,
-        "This parameter specifies the path on shared file-system (e.g. HDFS) 
where the snapshot related"
-            + " information should be stored.");
-    options.addOption(
-        COLLECTION,
-        true,
-        "This parameter specifies the name of the collection to be used during 
snapshot operation");
-    options.addOption(
-        SOLR_ZK_ENSEMBLE, true, "This parameter specifies the Solr Zookeeper 
ensemble address");
-    options.addOption(
-        HDFS_PATH_PREFIX,
-        true,
-        "This parameter specifies the HDFS URI prefix to be used"
-            + " during snapshot export preparation. This is applicable only if 
the Solr collection index files are stored on HDFS.");
+    options.addOption(PREPARE_FOR_EXPORT_OPTION);
+
+    options.addOption(CommonCLIOptions.HELP_OPTION);
+    options.addOption(TEMP_DIR_OPTION);
+    options.addOption(DEST_DIR_OPTION);
+    options.addOption(COLLECTION_OPTION);
+    options.addOption(CommonCLIOptions.ZK_HOST_OPTION);
+    options.addOption(HDFS_PATH_PREFIX_OPTION);
 
     CommandLine cmd = null;
     try {
@@ -295,15 +301,15 @@ public class SolrOnHdfsSnapshotsTool implements 
Closeable, CLIO {
       System.exit(1);
     }
 
-    if (cmd.hasOption(PREPARE_FOR_EXPORT)) {
+    if (cmd.hasOption(PREPARE_FOR_EXPORT_OPTION)) {
       try (SolrOnHdfsSnapshotsTool tool =
-          new SolrOnHdfsSnapshotsTool(requiredArg(options, cmd, 
SOLR_ZK_ENSEMBLE))) {
-        if (cmd.hasOption(PREPARE_FOR_EXPORT)) {
-          String snapshotName = cmd.getOptionValue(PREPARE_FOR_EXPORT);
-          String collectionName = requiredArg(options, cmd, COLLECTION);
-          String localFsDir = requiredArg(options, cmd, TEMP_DIR);
-          String hdfsOpDir = requiredArg(options, cmd, DEST_DIR);
-          String pathPrefix = cmd.getOptionValue(HDFS_PATH_PREFIX);
+          new SolrOnHdfsSnapshotsTool(requiredArg(options, cmd, 
CommonCLIOptions.ZK_HOST_OPTION))) {
+        if (cmd.hasOption(PREPARE_FOR_EXPORT_OPTION)) {
+          String snapshotName = cmd.getOptionValue(PREPARE_FOR_EXPORT_OPTION);
+          String collectionName = requiredArg(options, cmd, COLLECTION_OPTION);
+          String localFsDir = requiredArg(options, cmd, TEMP_DIR_OPTION);
+          String hdfsOpDir = requiredArg(options, cmd, DEST_DIR_OPTION);
+          String pathPrefix = cmd.getOptionValue(HDFS_PATH_PREFIX_OPTION);
 
           if (pathPrefix != null) {
             try {
@@ -320,7 +326,7 @@ public class SolrOnHdfsSnapshotsTool implements Closeable, 
CLIO {
           tool.prepareForExport(collectionName, snapshotName, localFsDir, 
pathPrefix, hdfsOpDir);
         }
       }
-    } else if (cmd.hasOption(HELP)) {
+    } else if (cmd.hasOption(CommonCLIOptions.HELP_OPTION)) {
       printHelp(options);
     } else {
       CLIO.out("Unknown command specified.");
@@ -328,7 +334,7 @@ public class SolrOnHdfsSnapshotsTool implements Closeable, 
CLIO {
     }
   }
 
-  private static String requiredArg(Options options, CommandLine cmd, String 
optVal) {
+  private static String requiredArg(Options options, CommandLine cmd, Option 
optVal) {
     if (!cmd.hasOption(optVal)) {
       CLIO.out("Please specify the value for option " + optVal);
       printHelp(options);
diff --git a/solr/packaging/test/test_stream.bats 
b/solr/packaging/test/test_stream.bats
index b2f5072a3d8..2043d9c0c32 100644
--- a/solr/packaging/test/test_stream.bats
+++ b/solr/packaging/test/test_stream.bats
@@ -62,7 +62,7 @@ teardown() {
   echo 'sort="price desc"' >> "${solr_stream_file}"
   echo ')' >> "${solr_stream_file}"
   
-  run solr stream -e remote --name techproducts --solr-url 
http://localhost:${SOLR_PORT} --header --credentials name:password 
${solr_stream_file}
+  run solr stream --name techproducts --solr-url http://localhost:${SOLR_PORT} 
--header --credentials name:password ${solr_stream_file}
 
   assert_output --partial 'name   price'
   assert_output --partial 'CORSAIR  XMS'
diff --git a/solr/solr-ref-guide/modules/query-guide/pages/stream-tool.adoc 
b/solr/solr-ref-guide/modules/query-guide/pages/stream-tool.adoc
index 20fe2458e42..acb5d6964e7 100644
--- a/solr/solr-ref-guide/modules/query-guide/pages/stream-tool.adoc
+++ b/solr/solr-ref-guide/modules/query-guide/pages/stream-tool.adoc
@@ -27,17 +27,17 @@ To run it, open a terminal and enter:
 
 [,console]
 ----
-$ bin/solr stream --header -c techproducts --delimiter=\| 
'search(techproducts,q="name:memory",fl="name,price")'
+$ bin/solr stream -c techproducts --header --fields price,name --delimiter=\| 
'search(techproducts,q="iPod",fl="name,price")'
 ----
 
 This will run the provided streaming expression on the `techproducts` 
collection on your local Solr and produce:  
 
 [,console]
 ----
-name|price
-CORSAIR  XMS 2GB (2 x 1GB) 184-Pin DDR SDRAM Unbuffered DDR 400 (PC 3200) Dual 
Channel Kit System Memory - Retail|185.0
-CORSAIR ValueSelect 1GB 184-Pin DDR SDRAM Unbuffered DDR 400 (PC 3200) System 
Memory - Retail|74.99
-A-DATA V-Series 1GB 184-Pin DDR SDRAM Unbuffered DDR 400 (PC 3200) System 
Memory - OEM|
+price|name
+11.5|iPod & iPod Mini USB 2.0 Cable
+19.95|Belkin Mobile Power Cord for iPod w/ Dock
+399.0|Apple 60 GB iPod with Video Playback Black
 ----
 
 TIP: Notice how we used the pipe character (|) as the delimiter?  It required 
a backslash for escaping it so it wouldn't be treated as a pipe within the 
shell script.
@@ -50,14 +50,23 @@ Assuming you have create the file `stream.expr` with the 
contents:
 ----
 # Stream a search
 
+/*
+ You can have
+ multi-line comments as well.
+*/
+
 search(
   techproducts,
   q="name:memory",
   fl="name,price",
   sort="price desc"
 )
+
+// the end
 ----
 
+Notice the various ways to format comments in the file?
+
 Then you can run it on the Solr collection `techproducts`, specifying you want 
a header row:
 
 [,console]
@@ -83,7 +92,7 @@ The `--help` (or simply `-h`) option will output information 
on its usage (i.e.,
 
 [source,plain]
 ----
-usage: bin/solr stream [--array-delimiter <CHARACTER>] [-c <NAME>] 
[--delimiter <CHARACTER>] [-e <ENVIRONMENT>] [-f
+usage: bin/solr stream [--array-delimiter <CHARACTER>] [-c <NAME>] 
[--delimiter <CHARACTER>] [--execution <ENVIRONMENT>] [--fields
        <FIELDS>] [-h] [--header] [-s <HOST>] [-u <credentials>] [-v] [-z 
<HOST>]
 
 List of options:
@@ -91,9 +100,9 @@ List of options:
  -c,--name <NAME>                   Name of the specific collection to execute 
expression on if the execution is set
                                     to 'remote'. Required for 'remote' 
execution environment.
     --delimiter <CHARACTER>         The output delimiter. Default to using 
three spaces.
- -e,--execution <ENVIRONMENT>       Execution environment is either 'local' 
(i.e CLI process) or via a 'remote' Solr
+    --execution <ENVIRONMENT>       Execution environment is either 'local' 
(i.e CLI process) or via a 'remote' Solr
                                     server. Default environment is 'remote'.
- -f,--fields <FIELDS>               The fields in the tuples to output. 
Defaults to fields in the first tuple of result
+    --fields <FIELDS>               The fields in the tuples to output. 
Defaults to fields in the first tuple of result
                                     set.
  -h,--help                          Print this message.
     --header                        Specify to include a header line.
@@ -173,4 +182,21 @@ $ bin/solr stream -c techproducts 'echo("$1")' "Hello 
World"
 Hello World
 ----
 
-This also works when using `.expr` files.
+This also works when using `.expr` files.  You can nest as many variables you 
want.
+Here is an example of passing in both the file and the number of lines to 
process:
+
+----
+# Index CSV File
+
+update(
+  gettingstarted,
+  parseCSV(
+    cat($1, maxLines=$2)
+  )
+)
+----
+
+[,console]
+----
+$ bin/solr stream -c techproducts file.expr ./example/exampledocs/books.csv 10
+----
diff --git 
a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java
 
b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java
index be95e2271a3..d05942a1593 100644
--- 
a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java
+++ 
b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java
@@ -222,9 +222,4 @@ public class LetStream extends TupleStream implements 
Expressible {
   public int getCost() {
     return 0;
   }
-
-  @SuppressWarnings({"rawtypes"})
-  public Map getLetParams() {
-    return this.letParams;
-  }
 }

Reply via email to