This is an automated email from the ASF dual-hosted git repository.

epugh pushed a commit to branch branch_10x
in repository https://gitbox.apache.org/repos/asf/solr.git


The following commit(s) were added to refs/heads/branch_10x by this push:
     new ffe39411969 Chasing typos in source code comments (#3886)
ffe39411969 is described below

commit ffe39411969aa4df787bcafdcec4e1f0be94534c
Author: Eric Pugh <[email protected]>
AuthorDate: Sun Nov 30 18:36:33 2025 -0500

    Chasing typos in source code comments (#3886)
---
 solr/core/src/java/org/apache/solr/api/ApiBag.java                  | 2 +-
 solr/core/src/java/org/apache/solr/blockcache/BlockDirectory.java   | 2 +-
 solr/core/src/java/org/apache/solr/cloud/Overseer.java              | 2 +-
 .../src/java/org/apache/solr/handler/admin/RebalanceLeaders.java    | 2 +-
 .../src/java/org/apache/solr/handler/component/SearchHandler.java   | 4 ++--
 .../core/src/java/org/apache/solr/handler/component/StatsField.java | 2 +-
 .../src/java/org/apache/solr/handler/tagger/OffsetCorrector.java    | 4 ++--
 .../solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java | 2 +-
 .../src/java/org/apache/solr/search/CollapsingQParserPlugin.java    | 6 +++---
 .../src/java/org/apache/solr/search/facet/FacetRangeProcessor.java  | 2 +-
 solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java        | 2 +-
 .../org/apache/solr/search/neural/AbstractVectorQParserBase.java    | 2 +-
 solr/core/src/java/org/apache/solr/update/DocumentBuilder.java      | 2 +-
 solr/core/src/java/org/apache/solr/update/TransactionLog.java       | 2 +-
 .../update/processor/AddSchemaFieldsUpdateProcessorFactory.java     | 2 +-
 .../apache/solr/update/processor/AtomicUpdateDocumentMerger.java    | 2 +-
 .../src/test/org/apache/solr/blockcache/BlockDirectoryTest.java     | 2 +-
 .../solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java    | 2 +-
 .../src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java | 4 ++--
 solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java | 4 ++--
 solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java   | 4 ++--
 .../apache/solr/common/cloud/CollectionPropertiesZkStateReader.java | 2 +-
 .../src/test/org/apache/solr/client/solrj/SolrExampleTests.java     | 2 +-
 .../src/java/org/apache/solr/BaseDistributedSearchTestCase.java     | 2 +-
 .../apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java | 2 +-
 .../solr/cloud/api/collections/AbstractIncrementalBackupTest.java   | 2 +-
 solr/test-framework/src/java/org/apache/solr/util/LogListener.java  | 2 +-
 27 files changed, 34 insertions(+), 34 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/api/ApiBag.java 
b/solr/core/src/java/org/apache/solr/api/ApiBag.java
index 1d9a6d2a80d..70a1964263a 100644
--- a/solr/core/src/java/org/apache/solr/api/ApiBag.java
+++ b/solr/core/src/java/org/apache/solr/api/ApiBag.java
@@ -163,7 +163,7 @@ public class ApiBag {
         getCommands().put(entry.getKey(), entry.getValue());
       }
 
-      // Reference to Api must be saved to to merge uncached values (i.e. 
'spec') lazily
+      // Reference to Api must be saved to merge uncached values (i.e. 'spec') 
lazily
       if (newCommandsAdded) {
         combinedApis.add(api);
       }
diff --git a/solr/core/src/java/org/apache/solr/blockcache/BlockDirectory.java 
b/solr/core/src/java/org/apache/solr/blockcache/BlockDirectory.java
index 43f702ebce9..fdd48350f7a 100644
--- a/solr/core/src/java/org/apache/solr/blockcache/BlockDirectory.java
+++ b/solr/core/src/java/org/apache/solr/blockcache/BlockDirectory.java
@@ -349,7 +349,7 @@ public class BlockDirectory extends FilterDirectory 
implements ShutdownAwareDire
   boolean useWriteCache(String name, IOContext context) {
     if (!blockCacheWriteEnabled || 
name.startsWith(IndexFileNames.PENDING_SEGMENTS)) {
       // for safety, don't bother caching pending commits.
-      // the cache does support renaming (renameCacheFile), but thats a scary 
optimization.
+      // the cache does support renaming (renameCacheFile), but that's a scary 
optimization.
       return false;
     }
     if (blockCacheFileTypes != null && !isCachableFile(name)) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java 
b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 10746a1fcda..f67d358b6ab 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -406,7 +406,7 @@ public class Overseer implements SolrCloseable {
     }
 
     // Return true whenever the exception thrown by ZkStateWriter is correspond
-    // to a invalid state or 'bad' message (in this case, we should remove 
that message from queue)
+    // to an invalid state or 'bad' message (in this case, we should remove 
that message from queue)
     private boolean isBadMessage(Exception e) {
       if (e instanceof KeeperException ke) {
         return ke.code() == KeeperException.Code.NONODE
diff --git 
a/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java 
b/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java
index f2079cfcbea..2bebe2ba3ac 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/RebalanceLeaders.java
@@ -325,7 +325,7 @@ class RebalanceLeaders {
   }
 
   // Provide some feedback to the user about what actually happened, or in 
this case where no action
-  // was necesary since this preferred replica was already the leader
+  // was necessary since this preferred replica was already the leader
   private void addAlreadyLeaderToResults(Slice slice, Replica replica) {
     SimpleOrderedMap<SimpleOrderedMap<String>> alreadyLeaders = 
results.get(ALREADY_LEADERS);
     if (alreadyLeaders == null) {
diff --git 
a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java 
b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
index 0d54489696e..9c978be34b4 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
@@ -580,7 +580,7 @@ public class SearchHandler extends RequestHandlerBase
                 String reqPath = (String) req.getContext().get(PATH);
                 if (!"/select".equals(reqPath)) {
                   params.set(CommonParams.QT, reqPath);
-                } // else if path is /select, then the qt gets passed thru if 
set
+                } // else if path is /select, then the qt gets passed through 
if set
               }
               if (queryLimits.isLimitsEnabled()) {
                 if (queryLimits.adjustShardRequestLimits(sreq, shard, params, 
rb)) {
@@ -746,7 +746,7 @@ public class SearchHandler extends RequestHandlerBase
   }
 
   protected String stageToString(int stage) {
-    // This should probably be a enum, but that change should be its own 
ticket.
+    // This should probably be an enum, but that change should be its own 
ticket.
     switch (stage) {
       case STAGE_START:
         return "START";
diff --git 
a/solr/core/src/java/org/apache/solr/handler/component/StatsField.java 
b/solr/core/src/java/org/apache/solr/handler/component/StatsField.java
index 914f39f9c94..1a679760784 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/StatsField.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/StatsField.java
@@ -624,7 +624,7 @@ public class StatsField {
 
     // NOTE: this explanation linked to from the java-hll jdocs...
     // 
https://github.com/aggregateknowledge/postgresql-hll/blob/master/README.markdown#explanation-of-parameters-and-tuning
-    // ..if i'm understanding the regwidth chart correctly, a value of 6 
should be a enough
+    // ..if i'm understanding the regwidth chart correctly, a value of 6 
should be enough
     // to support any max cardinality given that we're always dealing with 
hashes and
     // the cardinality of the set of all long values is 2**64 == 1.9e19
     //
diff --git 
a/solr/core/src/java/org/apache/solr/handler/tagger/OffsetCorrector.java 
b/solr/core/src/java/org/apache/solr/handler/tagger/OffsetCorrector.java
index d5ad59fab97..dfa76fbf96e 100644
--- a/solr/core/src/java/org/apache/solr/handler/tagger/OffsetCorrector.java
+++ b/solr/core/src/java/org/apache/solr/handler/tagger/OffsetCorrector.java
@@ -98,7 +98,7 @@ public abstract class OffsetCorrector {
     // Find the ancestor tag enclosing offsetPair.  And bump out left offset 
along the way.
     int iTag = startTag;
     for (; !tagEnclosesOffset(iTag, rightOffset); iTag = getParentTag(iTag)) {
-      // Ensure there is nothing except whitespace thru OpenEndOff
+      // Ensure there is nothing except whitespace through OpenEndOff
       int tagOpenEndOff = getOpenEndOff(iTag);
       if (hasNonWhitespace(tagOpenEndOff, leftOffset)) return null;
       leftOffset = getOpenStartOff(iTag);
@@ -106,7 +106,7 @@ public abstract class OffsetCorrector {
     final int ancestorTag = iTag;
     // Bump out rightOffset until we get to ancestorTag.
     for (iTag = endTag; iTag != ancestorTag; iTag = getParentTag(iTag)) {
-      // Ensure there is nothing except whitespace thru CloseStartOff
+      // Ensure there is nothing except whitespace through CloseStartOff
       int tagCloseStartOff = getCloseStartOff(iTag);
       if (hasNonWhitespace(rightOffset, tagCloseStartOff)) return null;
       rightOffset = getCloseEndOff(iTag);
diff --git 
a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
 
b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
index 1d91fb8686f..6b9e6d6bedd 100644
--- 
a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
+++ 
b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
@@ -433,7 +433,7 @@ public class ManagedSynonymGraphFilterFactory extends 
BaseManagedTokenFilterFact
 
             ManagedSynonymParser parser =
                 new ManagedSynonymParser((SynonymManager) res, dedup, 
analyzer);
-            // null is safe here because there's no actual parsing done 
against a input Reader
+            // null is safe here because there's no actual parsing done 
against an input Reader
             parser.parse(null);
             return parser.build();
           }
diff --git 
a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java 
b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
index d85932bb3ed..9f127f7d460 100644
--- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
@@ -2183,7 +2183,7 @@ public class CollapsingQParserPlugin extends 
QParserPlugin {
 
         if (collapseFieldType instanceof StrField) {
           if (blockCollapse) {
-            // NOTE: for now we don't worry about wether this is a sortSpec of 
min/max
+            // NOTE: for now we don't worry about whether this is a sortSpec 
of min/max
             // groupHeadSelector, we use a "sort spec' based block collector 
unless/until there is
             // some (performance?) reason to specialize
             return new BlockOrdSortSpecCollector(
@@ -2212,7 +2212,7 @@ public class CollapsingQParserPlugin extends 
QParserPlugin {
         } else if (isNumericCollapsible(collapseFieldType)) {
 
           if (blockCollapse) {
-            // NOTE: for now we don't worry about wether this is a sortSpec of 
min/max
+            // NOTE: for now we don't worry about whether this is a sortSpec 
of min/max
             // groupHeadSelector, we use a "sort spec' based block collector 
unless/until there is
             // some (performance?) reason to specialize
             return new BlockIntSortSpecCollector(
@@ -3407,7 +3407,7 @@ public class CollapsingQParserPlugin extends 
QParserPlugin {
       boostedDocsIdsIter = getMergeBoost();
     }
 
-    /** True if there are any requested boosts (regardless of wether any have 
been collected) */
+    /** True if there are any requested boosts (regardless of whether any have 
been collected) */
     public boolean hasBoosts() {
       return hasBoosts;
     }
diff --git 
a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java 
b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java
index 08b034bc0cb..a3d7165ea46 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java
@@ -49,7 +49,7 @@ import org.apache.solr.util.DateMathParser;
 
 class FacetRangeProcessor extends FacetProcessor<FacetRange> {
   // TODO: the code paths for initial faceting, vs refinement, are very 
different...
-  // TODO: ...it might make sense to have seperate classes w/a common base?
+  // TODO: ...it might make sense to have separate classes w/a common base?
   // TODO: let FacetRange.createFacetProcessor decide which one to instantiate?
 
   final SchemaField sf;
diff --git a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java 
b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
index 9ea0df74159..93ea6474965 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
@@ -834,7 +834,7 @@ public abstract class SlotAcc implements Closeable {
   abstract static class CountSlotAcc extends SlotAcc implements 
ReadOnlyCountSlotAcc {
     public CountSlotAcc(FacetContext fcontext) {
       super(fcontext);
-      // assume we are the 'count' by default unless/untill our creator 
overrides this
+      // assume we are the 'count' by default unless/until our creator 
overrides this
       this.key = "count";
     }
 
diff --git 
a/solr/core/src/java/org/apache/solr/search/neural/AbstractVectorQParserBase.java
 
b/solr/core/src/java/org/apache/solr/search/neural/AbstractVectorQParserBase.java
index d7ab9c72938..065180d7e4a 100644
--- 
a/solr/core/src/java/org/apache/solr/search/neural/AbstractVectorQParserBase.java
+++ 
b/solr/core/src/java/org/apache/solr/search/neural/AbstractVectorQParserBase.java
@@ -79,7 +79,7 @@ public abstract class AbstractVectorQParserBase extends 
QParser {
   protected Query getFilterQuery() throws SolrException, SyntaxError {
 
     // Default behavior of FQ wrapping, and suitability of some local params
-    // depends on wether we are a sub-query or not
+    // depends on whether we are a sub-query or not
     final boolean isSubQuery = recurseCount != 0;
 
     // include/exclude tags for global fqs to wrap;
diff --git a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java 
b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
index c11502672f7..61e1ae8d0b9 100644
--- a/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
+++ b/solr/core/src/java/org/apache/solr/update/DocumentBuilder.java
@@ -356,7 +356,7 @@ public class DocumentBuilder {
           destinationField,
           fieldValue,
           destinationField.getName().equals(uniqueKeyFieldName) ? false : 
forInPlaceUpdate);
-      // record the field as having a originalFieldValue
+      // record the field as having an originalFieldValue
       usedFields.add(destinationField.getName());
       used = true;
     }
diff --git a/solr/core/src/java/org/apache/solr/update/TransactionLog.java 
b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
index 2169eaa695c..c81d9f26623 100644
--- a/solr/core/src/java/org/apache/solr/update/TransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
@@ -648,7 +648,7 @@ public class TransactionLog implements Closeable {
         try {
           Files.deleteIfExists(tlog);
         } catch (IOException e) {
-          // TODO: should this class care if a file couldnt be deleted?
+          // TODO: should this class care if a file couldn't be deleted?
           // this just emulates previous behavior, where only 
SecurityException would be handled.
         }
       }
diff --git 
a/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java
 
b/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java
index daea8ce83a4..50c711458c9 100644
--- 
a/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java
+++ 
b/solr/core/src/java/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactory.java
@@ -571,7 +571,7 @@ public class AddSchemaFieldsUpdateProcessorFactory extends 
UpdateRequestProcesso
       NEXT_TYPE_MAPPING:
       for (TypeMapping typeMapping : typeMappings) {
         for (SolrInputField field : fields) {
-          // We do a assert and a null check because even after SOLR-12710 is 
addressed
+          // We do an assert and a null check because even after SOLR-12710 is 
addressed
           // older SolrJ versions can send null values causing an NPE
           assert field.getValues() != null;
           if (field.getValues() != null) {
diff --git 
a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
 
b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
index 01fab007eb3..dfee7e1f8f7 100644
--- 
a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
+++ 
b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
@@ -268,7 +268,7 @@ public class AtomicUpdateDocumentMerger {
         // not an in-place update if there are fields that are not maps
         return Collections.emptySet();
       }
-      // else it's a atomic update map...
+      // else it's an atomic update map...
       Map<String, Object> fieldValueMap = (Map<String, Object>) fieldValue;
       for (Entry<String, Object> entry : fieldValueMap.entrySet()) {
         String op = entry.getKey();
diff --git 
a/solr/core/src/test/org/apache/solr/blockcache/BlockDirectoryTest.java 
b/solr/core/src/test/org/apache/solr/blockcache/BlockDirectoryTest.java
index 8cd07e58629..dbd69bca4d9 100644
--- a/solr/core/src/test/org/apache/solr/blockcache/BlockDirectoryTest.java
+++ b/solr/core/src/test/org/apache/solr/blockcache/BlockDirectoryTest.java
@@ -257,7 +257,7 @@ public class BlockDirectoryTest extends SolrTestCaseJ4 {
     try {
       IOUtils.rm(file);
     } catch (Throwable ignored) {
-      // TODO: should this class care if a file couldnt be deleted?
+      // TODO: should this class care if a file couldn't be deleted?
       // this just emulates previous behavior, where only SecurityException 
would be handled.
     }
   }
diff --git 
a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java
 
b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java
index 6800052434e..9082928a828 100644
--- 
a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java
+++ 
b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java
@@ -280,7 +280,7 @@ public class ChaosMonkeyNothingIsSafeWithPullReplicasTest 
extends AbstractFullDi
 
       commit();
 
-      // TODO: assert we didnt kill everyone
+      // TODO: assert we didn't kill everyone
 
       zkStateReader.updateLiveNodes();
       assertTrue(zkStateReader.getClusterState().getLiveNodes().size() > 0);
diff --git 
a/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java 
b/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java
index c9c2e03a62c..483c9b9ecf5 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java
@@ -72,7 +72,7 @@ public class TestCloudPseudoReturnFields extends 
SolrCloudTestCase {
 
   @BeforeClass
   public static void createMiniSolrCloudCluster() throws Exception {
-    // replication factor will impact wether we expect a list of urls from the 
'[shard]'
+    // replication factor will impact whether we expect a list of urls from 
the '[shard]'
     // augmenter...
     repFactor = usually() ? 1 : 2;
     // ... and we definitely want to ensure forwarded requests to other shards 
work ...
@@ -218,7 +218,7 @@ public class TestCloudPseudoReturnFields extends 
SolrCloudTestCase {
     // that way we can first sanity check a single value in a multivalued 
field is returned
     // correctly as a "List" of one element, *AND* then we could be testing 
that a (single valued)
     // pseudo-field correctly overrides that actual (real) value in a 
multivalued field (ie: not
-    // returning a an List)
+    // returning a List)
     //
     // (NOTE: not doing this yet due to how it will impact most other tests, 
many of which are
     // currently @AwaitsFix status)
diff --git 
a/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java 
b/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java
index 244e162802a..dfbebaa72dc 100644
--- a/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java
@@ -436,7 +436,7 @@ public class DocValuesMissingTest extends SolrTestCaseJ4 {
   @Test
   public void testStringSort() {
 
-    // note: cant use checkSortMissingDefault because
+    // note: can't use checkSortMissingDefault because
     // nothing sorts lower then the default of ""
     for (String field : new String[] {"stringdv", "dyn_stringdv"}) {
       assertU(adoc("id", "0")); // missing
@@ -520,7 +520,7 @@ public class DocValuesMissingTest extends SolrTestCaseJ4 {
   /** bool (and dynamic bool) with default lucene sort (treats as "") */
   @Test
   public void testBoolSort() {
-    // note: cant use checkSortMissingDefault because
+    // note: can't use checkSortMissingDefault because
     // nothing sorts lower then the default of "" and
     // bool fields are, at root, string fields.
     for (String field : new String[] {"booldv", "dyn_booldv"}) {
diff --git a/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java 
b/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java
index 592fc824b1f..c9134c6c43b 100644
--- a/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java
+++ b/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java
@@ -107,7 +107,7 @@ public class SoftAutoCommitTest extends SolrTestCaseJ4 {
     softTracker.setTimeUpperBound(-1);
     hardTracker.setDocsUpperBound(hardCommitMaxDocs);
     hardTracker.setTimeUpperBound(-1);
-    // simplify whats going on by only having soft auto commits trigger new 
searchers
+    // simplify what's going on by only having soft auto commits trigger new 
searchers
     hardTracker.setOpenSearcher(false);
 
     // Note: doc id counting starts at 0, see comment at start of test 
regarding "upper bound"
@@ -199,7 +199,7 @@ public class SoftAutoCommitTest extends SolrTestCaseJ4 {
     hardTracker.setTimeUpperBound(
         commitWithinType.equals(CommitWithinType.HARD) ? -1 : 
hardCommitWaitMillis);
     hardTracker.setDocsUpperBound(-1);
-    // simplify whats going on by only having soft auto commits trigger new 
searchers
+    // simplify what's going on by only having soft auto commits trigger new 
searchers
     hardTracker.setOpenSearcher(false);
 
     // Add a single document
diff --git 
a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/CollectionPropertiesZkStateReader.java
 
b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/CollectionPropertiesZkStateReader.java
index 4242e0beee9..083f7ec0def 100644
--- 
a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/CollectionPropertiesZkStateReader.java
+++ 
b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/CollectionPropertiesZkStateReader.java
@@ -242,7 +242,7 @@ public class CollectionPropertiesZkStateReader implements 
Closeable {
               collectionPropsObservers.remove(coll);
 
               // This is the one time we know it's safe to throw this out. We 
just failed to set the
-              // watch due to an NoNodeException, so it isn't held by ZK and 
can't re-set itself due
+              // watch due to a NoNodeException, so it isn't held by ZK and 
can't re-set itself due
               // to an update.
               collectionPropsWatchers.remove(coll);
             }
diff --git 
a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java 
b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
index 50ab432931c..0756e132585 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
@@ -1474,7 +1474,7 @@ public abstract class SolrExampleTests extends 
SolrExampleTestsBase {
           "{!key=pivot_key stats=s1}features,manu,cat,inStock"
         }) {
 
-      // for any of these pivot params, the assertions we check should be teh 
same
+      // for any of these pivot params, the assertions we check should be the 
same
       // (we stop asserting at the "manu" level)
 
       SolrQuery query = new SolrQuery("*:*");
diff --git 
a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java
 
b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java
index 2ae2398d98b..cc4ed5b8369 100644
--- 
a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java
+++ 
b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java
@@ -983,7 +983,7 @@ public abstract class BaseDistributedSearchTestCase extends 
SolrTestCaseJ4 {
 
   protected void compareResponses(QueryResponse a, QueryResponse b) {
     if (System.getProperty("remove.version.field") != null) {
-      // we don't care if one has a version and the other doesnt -
+      // we don't care if one has a version and the other doesn't -
       // control vs distrib
       // TODO: this should prob be done by adding an ignore on _version_ 
rather than mutating the
       // responses?
diff --git 
a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java
 
b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java
index a80134a4252..fba15eb5416 100644
--- 
a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java
+++ 
b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java
@@ -246,7 +246,7 @@ public abstract class 
AbstractChaosMonkeyNothingIsSafeTestBase
 
       commit();
 
-      // TODO: assert we didnt kill everyone
+      // TODO: assert we didn't kill everyone
 
       zkStateReader.updateLiveNodes();
       assertTrue(zkStateReader.getClusterState().getLiveNodes().size() > 0);
diff --git 
a/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java
 
b/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java
index 1d7276a2e22..9cf6c15e0aa 100644
--- 
a/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java
+++ 
b/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java
@@ -714,7 +714,7 @@ public abstract class AbstractIncrementalBackupTest extends 
SolrCloudTestCase {
     }
 
     public void verify(List<URI> newFilesCopiedOver) throws IOException {
-      // Verify zk files are reuploaded to a appropriate each time a backup is 
called
+      // Verify zk files are reuploaded to an appropriate each time a backup 
is called
       // TODO make a little change to zk files and make sure that backed up 
files match with zk data
       BackupId prevBackupId = new BackupId(Math.max(0, numBackup - 1));
 
diff --git a/solr/test-framework/src/java/org/apache/solr/util/LogListener.java 
b/solr/test-framework/src/java/org/apache/solr/util/LogListener.java
index 9bbd08e6e09..17039025517 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/LogListener.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/LogListener.java
@@ -200,7 +200,7 @@ public final class LogListener implements Closeable, 
AutoCloseable {
       config.addLogger(loggerName, loggerConfig);
     }
 
-    // Regardless of wether loggerConfig exactly matches loggerName, or is an 
ancestor, if it's
+    // Regardless of whether loggerConfig exactly matches loggerName, or is an 
ancestor, if it's
     // level is (strictly) more specific
     // then our configured level, it will be impossible to listen for the 
events we want - so track
     // the original level and modify as needed...

Reply via email to