This is an automated email from the ASF dual-hosted git repository.

sanjaydutt pushed a commit to branch branch_9x
in repository https://gitbox.apache.org/repos/asf/solr.git


The following commit(s) were added to refs/heads/branch_9x by this push:
     new 11fa990502b SOLR-16503: Replace default USH apache client with 
Http2SolrClient (#2741)
11fa990502b is described below

commit 11fa990502b88ade11a0aa1b282541ba86e07cc0
Author: Sanjay Dutt <[email protected]>
AuthorDate: Mon Jan 13 17:02:17 2025 +0530

    SOLR-16503: Replace default USH apache client with Http2SolrClient (#2741)
    
    Co-authored-by: David Smiley <[email protected]>
    (cherry picked from commit 98906f41a5e25450cc2c26f4b162f4eb662636bf)
---
 solr/CHANGES.txt                                   |   2 +
 .../src/java/org/apache/solr/cloud/Overseer.java   |  24 +-
 .../api/collections/ReindexCollectionCmd.java      | 276 ++++++++++-----------
 .../apache/solr/filestore/DistribFileStore.java    |  96 ++++---
 .../src/java/org/apache/solr/pkg/PackageAPI.java   |  55 +++-
 .../solr/security/PKIAuthenticationPlugin.java     |  29 +--
 solr/modules/cross-dc/build.gradle                 |   1 -
 .../update/processor/MirroringUpdateProcessor.java |  13 +-
 8 files changed, 269 insertions(+), 227 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7bf6c07eb0f..83bcbf5af6e 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -44,6 +44,8 @@ Other Changes
 
 * GITHUB#2680: Improve reliablity of NpmTasks finding needed files/commands.  
(Tyler Bertrand via Eric Pugh)
 
+* SOLR-16503: Most remaining usages of Apache HttpClient in Solr switched to 
Jetty HttpClient (HTTP 2). (Sanjay Dutt, David Smiley)
+
 ==================  9.8.0 ==================
 New Features
 ---------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java 
b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index bd91a226a5d..ab666a70bf4 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -36,8 +36,8 @@ import java.util.function.BiConsumer;
 import org.apache.lucene.util.Version;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
-import org.apache.solr.client.solrj.impl.CloudLegacySolrClient;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.CloudHttp2SolrClient;
+import org.apache.solr.client.solrj.impl.Http2SolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.cloud.api.collections.CreateCollectionCmd;
@@ -856,13 +856,19 @@ public class Overseer implements SolrCloseable {
     } else {
       return;
     }
-    try (CloudSolrClient client =
-        new CloudLegacySolrClient.Builder(
-                
Collections.singletonList(getZkController().getZkServerAddress()), 
Optional.empty())
-            .withSocketTimeout(30000, TimeUnit.MILLISECONDS)
-            .withConnectionTimeout(15000, TimeUnit.MILLISECONDS)
-            .withHttpClient(updateShardHandler.getDefaultHttpClient())
-            .build()) {
+
+    try (var solrClient =
+            new Http2SolrClient.Builder()
+                .withHttpClient(getCoreContainer().getDefaultHttpSolrClient())
+                .withIdleTimeout(30000, TimeUnit.MILLISECONDS)
+                .withConnectionTimeout(15000, TimeUnit.MILLISECONDS)
+                .build();
+        var client =
+            new CloudHttp2SolrClient.Builder(
+                    
Collections.singletonList(getZkController().getZkServerAddress()),
+                    Optional.empty())
+                .withHttpClient(solrClient)
+                .build()) {
       CollectionAdminRequest.ColStatus req =
           
CollectionAdminRequest.collectionStatus(CollectionAdminParams.SYSTEM_COLL)
               .setWithSegments(true)
diff --git 
a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
 
b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
index 8448f6f2999..1fa86897b8f 100644
--- 
a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
+++ 
b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
@@ -34,12 +34,9 @@ import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-import org.apache.http.client.HttpClient;
-import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrResponse;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -736,62 +733,51 @@ public class ReindexCollectionCmd implements 
CollApiCmds.CollectionApiCommand {
       String targetCollection,
       Map<String, Object> reindexingState)
       throws Exception {
-    HttpClient client = 
ccc.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient();
-    try (SolrClient solrClient =
-        new HttpSolrClient.Builder()
-            .withHttpClient(client)
-            .withBaseSolrUrl(daemonReplica.getBaseUrl())
-            .build()) {
-      ModifiableSolrParams q = new ModifiableSolrParams();
-      q.set(CommonParams.QT, "/stream");
-      q.set("action", "list");
-      q.set(CommonParams.DISTRIB, false);
-      QueryRequest req = new QueryRequest(q);
-      boolean isRunning;
-      int statusCheck = 0;
-      do {
-        isRunning = false;
-        statusCheck++;
-        try {
-          NamedList<Object> rsp = solrClient.request(req, 
daemonReplica.getCoreName());
-          Map<String, Object> rs = (Map<String, Object>) rsp.get("result-set");
-          if (rs == null || rs.isEmpty()) {
-            throw new SolrException(
-                SolrException.ErrorCode.SERVER_ERROR,
-                "Can't find daemon list: missing result-set: " + 
Utils.toJSONString(rsp));
-          }
-          List<Object> list = (List<Object>) rs.get("docs");
-          if (list == null) {
-            throw new SolrException(
-                SolrException.ErrorCode.SERVER_ERROR,
-                "Can't find daemon list: missing result-set: " + 
Utils.toJSONString(rsp));
-          }
-          if (list.isEmpty()) { // finished?
-            break;
-          }
-          for (Object o : list) {
-            Map<String, Object> map = (Map<String, Object>) o;
-            String id = (String) map.get("id");
-            if (daemonName.equals(id)) {
-              isRunning = true;
-              // fail here
-              TestInjection.injectReindexFailure();
-              break;
-            }
-          }
-        } catch (Exception e) {
+
+    boolean isRunning;
+    int statusCheck = 0;
+    do {
+      isRunning = false;
+      statusCheck++;
+      try {
+        NamedList<Object> rsp = executeDaemonAction("list", daemonName, 
daemonReplica);
+        Map<String, Object> rs = (Map<String, Object>) rsp.get("result-set");
+        if (rs == null || rs.isEmpty()) {
+          throw new SolrException(
+              SolrException.ErrorCode.SERVER_ERROR,
+              "Can't find daemon list: missing result-set: " + 
Utils.toJSONString(rsp));
+        }
+        List<Object> list = (List<Object>) rs.get("docs");
+        if (list == null) {
           throw new SolrException(
               SolrException.ErrorCode.SERVER_ERROR,
-              "Exception waiting for daemon " + daemonName + " at " + 
daemonReplica.getCoreUrl(),
-              e);
+              "Can't find daemon list: missing result-set: " + 
Utils.toJSONString(rsp));
         }
-        if (statusCheck % 5 == 0) {
-          reindexingState.put("processedDocs", 
getNumberOfDocs(targetCollection));
-          setReindexingState(sourceCollection, State.RUNNING, reindexingState);
+        if (list.isEmpty()) { // finished?
+          break;
         }
-        ccc.getSolrCloudManager().getTimeSource().sleep(2000);
-      } while (isRunning && !maybeAbort(sourceCollection));
-    }
+        for (Object o : list) {
+          Map<String, Object> map = (Map<String, Object>) o;
+          String id = (String) map.get("id");
+          if (daemonName.equals(id)) {
+            isRunning = true;
+            // fail here
+            TestInjection.injectReindexFailure();
+            break;
+          }
+        }
+      } catch (Exception e) {
+        throw new SolrException(
+            SolrException.ErrorCode.SERVER_ERROR,
+            "Exception waiting for daemon " + daemonName + " at " + 
daemonReplica.getCoreUrl(),
+            e);
+      }
+      if (statusCheck % 5 == 0) {
+        reindexingState.put("processedDocs", 
getNumberOfDocs(targetCollection));
+        setReindexingState(sourceCollection, State.RUNNING, reindexingState);
+      }
+      ccc.getSolrCloudManager().getTimeSource().sleep(2000);
+    } while (isRunning && !maybeAbort(sourceCollection));
   }
 
   @SuppressWarnings({"unchecked"})
@@ -799,107 +785,101 @@ public class ReindexCollectionCmd implements 
CollApiCmds.CollectionApiCommand {
     if (log.isDebugEnabled()) {
       log.debug("-- killing daemon {} at {}", daemonName, 
daemonReplica.getCoreUrl());
     }
-    HttpClient client = 
ccc.getCoreContainer().getUpdateShardHandler().getDefaultHttpClient();
-    try (SolrClient solrClient =
-        new HttpSolrClient.Builder()
-            .withHttpClient(client)
-            .withDefaultCollection(daemonReplica.getCoreName())
-            .withBaseSolrUrl(daemonReplica.getBaseUrl())
-            .build()) {
-      ModifiableSolrParams q = new ModifiableSolrParams();
-      q.set(CommonParams.QT, "/stream");
-      // we should really use 'kill' here, but then we will never
-      // know when the daemon actually finishes running - 'kill' only
-      // sets a flag that may be noticed much later
-      q.set("action", "stop");
-      q.set(CommonParams.ID, daemonName);
-      q.set(CommonParams.DISTRIB, false);
-      QueryRequest req = new QueryRequest(q);
-      NamedList<Object> rsp = solrClient.request(req);
-      // /result-set/docs/[0]/DaemonOp : Deamon:id killed on coreName
-      if (log.isDebugEnabled()) {
-        log.debug(" -- stop daemon response: {}", Utils.toJSONString(rsp));
-      }
-      Map<String, Object> rs = (Map<String, Object>) rsp.get("result-set");
-      if (rs == null || rs.isEmpty()) {
-        log.warn(
-            "Problem killing daemon {}: missing result-set: {}",
-            daemonName,
-            Utils.toJSONString(rsp));
-        return;
-      }
-      List<Object> list = (List<Object>) rs.get("docs");
-      if (list == null) {
-        log.warn(
-            "Problem killing daemon {}: missing result-set: {}",
-            daemonName,
-            Utils.toJSONString(rsp));
-        return;
-      }
-      if (list.isEmpty()) { // already finished?
-        return;
+
+    // we should really use 'kill' here, but then we will never
+    // know when the daemon actually finishes running - 'kill' only
+    // sets a flag that may be noticed much later
+    NamedList<Object> rsp = executeDaemonAction("stop", daemonName, 
daemonReplica);
+    // /result-set/docs/[0]/DaemonOp : Deamon:id killed on coreName
+    if (log.isDebugEnabled()) {
+      log.debug(" -- stop daemon response: {}", Utils.toJSONString(rsp));
+    }
+    Map<String, Object> rs = (Map<String, Object>) rsp.get("result-set");
+    if (rs == null || rs.isEmpty()) {
+      log.warn(
+          "Problem killing daemon {}: missing result-set: {}", daemonName, 
Utils.toJSONString(rsp));
+      return;
+    }
+    List<Object> list = (List<Object>) rs.get("docs");
+    if (list == null) {
+      log.warn(
+          "Problem killing daemon {}: missing result-set: {}", daemonName, 
Utils.toJSONString(rsp));
+      return;
+    }
+    if (list.isEmpty()) { // already finished?
+      return;
+    }
+    for (Object o : list) {
+      Map<String, Object> map = (Map<String, Object>) o;
+      String op = (String) map.get("DaemonOp");
+      if (op == null) {
+        continue;
       }
-      for (Object o : list) {
-        Map<String, Object> map = (Map<String, Object>) o;
-        String op = (String) map.get("DaemonOp");
-        if (op == null) {
-          continue;
-        }
-        if (op.contains(daemonName) && op.contains("stopped")) {
-          // now wait for the daemon to really stop
-          q.set("action", "list");
-          req = new QueryRequest(q);
-          TimeOut timeOut =
-              new TimeOut(60, TimeUnit.SECONDS, 
ccc.getSolrCloudManager().getTimeSource());
-          while (!timeOut.hasTimedOut()) {
-            rsp = solrClient.request(req);
-            rs = (Map<String, Object>) rsp.get("result-set");
-            if (rs == null || rs.isEmpty()) {
-              log.warn(
-                  "Problem killing daemon {}: missing result-set: {}",
-                  daemonName,
-                  Utils.toJSONString(rsp));
-              break;
-            }
-            List<Object> list2 = (List<Object>) rs.get("docs");
-            if (list2 == null) {
-              log.warn(
-                  "Problem killing daemon {}: missing result-set: {}",
-                  daemonName,
-                  Utils.toJSONString(rsp));
-              break;
-            }
-            if (list2.isEmpty()) { // already finished?
-              break;
-            }
-            Map<String, Object> status2 = null;
-            for (Object o2 : list2) {
-              Map<String, Object> map2 = (Map<String, Object>) o2;
-              if (daemonName.equals(map2.get("id"))) {
-                status2 = map2;
-                break;
-              }
-            }
-            if (status2 == null) { // finished?
-              break;
-            }
-            Number stopTime = (Number) status2.get("stopTime");
-            if (stopTime.longValue() > 0) {
+      if (op.contains(daemonName) && op.contains("stopped")) {
+        // now wait for the daemon to really stop
+        TimeOut timeOut =
+            new TimeOut(60, TimeUnit.SECONDS, 
ccc.getSolrCloudManager().getTimeSource());
+        while (!timeOut.hasTimedOut()) {
+          rsp = executeDaemonAction("list", daemonName, daemonReplica);
+          rs = (Map<String, Object>) rsp.get("result-set");
+          if (rs == null || rs.isEmpty()) {
+            log.warn(
+                "Problem killing daemon {}: missing result-set: {}",
+                daemonName,
+                Utils.toJSONString(rsp));
+            break;
+          }
+          List<Object> list2 = (List<Object>) rs.get("docs");
+          if (list2 == null) {
+            log.warn(
+                "Problem killing daemon {}: missing result-set: {}",
+                daemonName,
+                Utils.toJSONString(rsp));
+            break;
+          }
+          if (list2.isEmpty()) { // already finished?
+            break;
+          }
+          Map<String, Object> status2 = null;
+          for (Object o2 : list2) {
+            Map<String, Object> map2 = (Map<String, Object>) o2;
+            if (daemonName.equals(map2.get("id"))) {
+              status2 = map2;
               break;
             }
           }
-          if (timeOut.hasTimedOut()) {
-            log.warn(
-                "Problem killing daemon {}: timed out waiting for daemon to 
stop.", daemonName);
-            // proceed anyway
+          if (status2 == null) { // finished?
+            break;
+          }
+          Number stopTime = (Number) status2.get("stopTime");
+          if (stopTime.longValue() > 0) {
+            break;
           }
         }
+        if (timeOut.hasTimedOut()) {
+          log.warn("Problem killing daemon {}: timed out waiting for daemon to 
stop.", daemonName);
+          // proceed anyway
+        }
       }
-      // now kill it - it's already stopped, this simply removes its status
-      q.set("action", "kill");
-      req = new QueryRequest(q);
-      solrClient.request(req);
     }
+    // now kill it - it's already stopped, this simply removes its status
+    executeDaemonAction("kill", daemonName, daemonReplica);
+  }
+
+  private NamedList<Object> executeDaemonAction(
+      String action, String daemonName, Replica daemonReplica) throws 
Exception {
+    final var solrClient = ccc.getCoreContainer().getDefaultHttpSolrClient();
+
+    final var solrParams = new ModifiableSolrParams();
+    solrParams.set(CommonParams.QT, "/stream");
+    solrParams.set("action", action);
+    solrParams.set(CommonParams.ID, daemonName);
+    solrParams.set(CommonParams.DISTRIB, false);
+
+    final var req = new QueryRequest(solrParams);
+    final var solrResponse =
+        solrClient.requestWithBaseUrl(daemonReplica.getBaseUrl(), 
daemonReplica.getCoreName(), req);
+    return solrResponse.getResponse();
   }
 
   private void cleanup(
diff --git a/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java 
b/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java
index 1a462dee26e..061cd209b14 100644
--- a/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java
+++ b/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java
@@ -18,6 +18,8 @@
 package org.apache.solr.filestore;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE;
+import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET;
 import static org.apache.solr.common.SolrException.ErrorCode.BAD_REQUEST;
 import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
 
@@ -45,11 +47,13 @@ import java.util.function.Consumer;
 import java.util.function.Predicate;
 import net.jcip.annotations.NotThreadSafe;
 import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpDelete;
 import org.apache.lucene.util.IOUtils;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.InputStreamResponseParser;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrPaths;
@@ -178,25 +182,33 @@ public class DistribFileStore implements FileStore {
           
coreContainer.getZkController().getZkStateReader().getBaseUrlV2ForNodeName(fromNode);
       if (baseUrl == null) throw new SolrException(BAD_REQUEST, "No such 
node");
 
-      ByteBuffer metadata = null;
-      Map<?, ?> m = null;
+      ByteBuffer metadata;
+      Map<?, ?> m;
+
+      InputStream is = null;
+      var solrClient = coreContainer.getDefaultHttpSolrClient();
+
       try {
+        GenericSolrRequest request = new GenericSolrRequest(GET, "/node/files" 
+ getMetaPath());
+        request.setResponseParser(new InputStreamResponseParser(null));
+        var response = solrClient.requestWithBaseUrl(baseUrl, 
request::process).getResponse();
+        is = (InputStream) response.get("stream");
         metadata =
-            Utils.executeGET(
-                coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
-                baseUrl + "/node/files" + getMetaPath(),
-                Utils.newBytesConsumer((int) MAX_PKG_SIZE));
+            Utils.newBytesConsumer((int) MAX_PKG_SIZE).accept((InputStream) 
response.get("stream"));
         m = (Map<?, ?>) Utils.fromJSON(metadata.array(), 
metadata.arrayOffset(), metadata.limit());
-      } catch (SolrException e) {
+      } catch (SolrServerException | IOException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error 
fetching metadata", e);
+      } finally {
+        org.apache.solr.common.util.IOUtils.closeQuietly(is);
       }
 
       try {
+        GenericSolrRequest request = new GenericSolrRequest(GET, "/node/files" 
+ path);
+        request.setResponseParser(new InputStreamResponseParser(null));
+        var response = solrClient.requestWithBaseUrl(baseUrl, 
request::process).getResponse();
+        is = (InputStream) response.get("stream");
         ByteBuffer filedata =
-            Utils.executeGET(
-                coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
-                baseUrl + "/node/files" + path,
-                Utils.newBytesConsumer((int) MAX_PKG_SIZE));
+            Utils.newBytesConsumer((int) MAX_PKG_SIZE).accept((InputStream) 
response.get("stream"));
         filedata.mark();
         String sha512 = DigestUtils.sha512Hex(new 
ByteBufferInputStream(filedata));
         String expected = (String) m.get("sha512");
@@ -207,10 +219,12 @@ public class DistribFileStore implements FileStore {
         filedata.reset();
         persistToFile(filedata, metadata);
         return true;
-      } catch (SolrException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error 
fetching data", e);
+      } catch (SolrServerException e) {
+        throw new SolrException(SERVER_ERROR, "Error fetching data", e);
       } catch (IOException ioe) {
         throw new SolrException(SERVER_ERROR, "Error persisting file", ioe);
+      } finally {
+        org.apache.solr.common.util.IOUtils.closeQuietly(is);
       }
     }
 
@@ -221,17 +235,18 @@ public class DistribFileStore implements FileStore {
         try {
           String baseUrl =
               
coreContainer.getZkController().getZkStateReader().getBaseUrlV2ForNodeName(liveNode);
-          String reqUrl = baseUrl + "/node/files" + path + 
"?meta=true&wt=javabin&omitHeader=true";
+          final var solrParams = new ModifiableSolrParams();
+          solrParams.add("meta", "true");
+          solrParams.add("omitHeader", "true");
+
+          final var request = new GenericSolrRequest(GET, "/node/files" + 
path, solrParams);
           boolean nodeHasBlob = false;
-          Object nl =
-              Utils.executeGET(
-                  coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
-                  reqUrl,
-                  Utils.JAVABINCONSUMER);
-          if (Utils.getObjectByPath(nl, false, Arrays.asList("files", path)) 
!= null) {
+          var solrClient = coreContainer.getDefaultHttpSolrClient();
+          var resp = solrClient.requestWithBaseUrl(baseUrl, 
request::process).getResponse();
+
+          if (Utils.getObjectByPath(resp, false, Arrays.asList("files", path)) 
!= null) {
             nodeHasBlob = true;
           }
-
           if (nodeHasBlob) {
             boolean success = fetchFileFromNodeAndPersist(liveNode);
             if (success) return true;
@@ -344,16 +359,16 @@ public class DistribFileStore implements FileStore {
     int i = 0;
     int FETCHFROM_SRC = 50;
     String myNodeName = coreContainer.getZkController().getNodeName();
+    String getFrom = "";
     try {
       for (String node : nodes) {
         String baseUrl =
             
coreContainer.getZkController().getZkStateReader().getBaseUrlV2ForNodeName(node);
-        String url = baseUrl + "/node/files" + info.path + "?getFrom=";
         if (i < FETCHFROM_SRC) {
           // this is to protect very large clusters from overwhelming a single 
node
           // the first FETCHFROM_SRC nodes will be asked to fetch from this 
node.
           // it's there in  the memory now. So , it must be served fast
-          url += myNodeName;
+          getFrom = myNodeName;
         } else {
           if (i == FETCHFROM_SRC) {
             // This is just an optimization
@@ -367,11 +382,16 @@ public class DistribFileStore implements FileStore {
           // trying to avoid the thundering herd problem when there are a very 
large no:of nodes
           // others should try to fetch it from any node where it is 
available. By now,
           // almost FETCHFROM_SRC other nodes may have it
-          url += "*";
+          getFrom = "*";
         }
         try {
+          var solrClient = coreContainer.getDefaultHttpSolrClient();
+          var solrParams = new ModifiableSolrParams();
+          solrParams.set("getFrom", getFrom);
+
+          var request = new GenericSolrRequest(GET, "/node/files" + info.path, 
solrParams);
           // fire and forget
-          
Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), 
url, null);
+          solrClient.requestWithBaseUrl(baseUrl, request::process);
         } catch (Exception e) {
           log.info("Node: {} failed to respond for file fetch notification", 
node, e);
           // ignore the exception
@@ -484,14 +504,26 @@ public class DistribFileStore implements FileStore {
   public void delete(String path) {
     deleteLocal(path);
     List<String> nodes = 
FileStoreUtils.fetchAndShuffleRemoteLiveNodes(coreContainer);
-    HttpClient client = 
coreContainer.getUpdateShardHandler().getDefaultHttpClient();
+
+    final var solrParams = new ModifiableSolrParams();
+    solrParams.add("localDelete", "true");
+    final var solrRequest = new GenericSolrRequest(DELETE, "/cluster/files" + 
path, solrParams);
+
     for (String node : nodes) {
       String baseUrl =
           
coreContainer.getZkController().getZkStateReader().getBaseUrlV2ForNodeName(node);
-      String url = baseUrl + "/cluster/files" + path + "?localDelete=true";
-      HttpDelete del = new HttpDelete(url);
-      // invoke delete command on all nodes asynchronously
-      coreContainer.runAsync(() -> Utils.executeHttpMethod(client, url, null, 
del));
+      try {
+        var solrClient = coreContainer.getDefaultHttpSolrClient();
+        // invoke delete command on all nodes asynchronously
+        solrClient.requestWithBaseUrl(baseUrl, client -> 
client.requestAsync(solrRequest));
+      } catch (SolrServerException | IOException e) {
+        // Note: This catch block will not handle failures from the 
asynchronous request,
+        // as requestAsync returns immediately and does not propagate remote 
exceptions.
+        throw new SolrException(
+            SolrException.ErrorCode.SERVER_ERROR,
+            "Failed to delete " + path + " on node " + node,
+            e);
+      }
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java 
b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index 24339feefe8..8053502cddf 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -35,11 +35,15 @@ import org.apache.solr.api.Command;
 import org.apache.solr.api.EndPoint;
 import org.apache.solr.api.PayloadObj;
 import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.BinaryResponseParser;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
 import org.apache.solr.client.solrj.request.beans.PackagePayload;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.annotation.JsonProperty;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZooKeeperException;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.CommandOperation;
 import org.apache.solr.common.util.EnvUtils;
 import org.apache.solr.common.util.ReflectMapWriter;
@@ -250,13 +254,27 @@ public class PackageAPI {
       }
       // first refresh my own
       packageLoader.notifyListeners(p);
+
+      final var solrParams = new ModifiableSolrParams();
+      solrParams.add("omitHeader", "true");
+      solrParams.add("refreshPackage", p);
+
+      final var request =
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/cluster/package", 
solrParams);
+      request.setResponseParser(new BinaryResponseParser());
+
       for (String liveNode : 
FileStoreUtils.fetchAndShuffleRemoteLiveNodes(coreContainer)) {
-        Utils.executeGET(
-            coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
-            
coreContainer.getZkController().zkStateReader.getBaseUrlV2ForNodeName(liveNode)
-                + "/cluster/package?wt=javabin&omitHeader=true&refreshPackage="
-                + p,
-            Utils.JAVABINCONSUMER);
+        final var baseUrl =
+            
coreContainer.getZkController().zkStateReader.getBaseUrlV2ForNodeName(liveNode);
+        try {
+          var solrClient = coreContainer.getDefaultHttpSolrClient();
+          solrClient.requestWithBaseUrl(baseUrl, request::process);
+        } catch (SolrServerException | IOException e) {
+          throw new SolrException(
+              SolrException.ErrorCode.SERVER_ERROR,
+              "Failed to refresh package on node: " + liveNode,
+              e);
+        }
       }
     }
 
@@ -418,13 +436,26 @@ public class PackageAPI {
   }
 
   void notifyAllNodesToSync(int expected) {
+
+    final var solrParams = new ModifiableSolrParams();
+    solrParams.add("omitHeader", "true");
+    solrParams.add("expectedVersion", String.valueOf(expected));
+
+    final var request =
+        new GenericSolrRequest(SolrRequest.METHOD.GET, "/cluster/package", 
solrParams);
+    request.setResponseParser(new BinaryResponseParser());
+
     for (String liveNode : 
FileStoreUtils.fetchAndShuffleRemoteLiveNodes(coreContainer)) {
-      Utils.executeGET(
-          coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
-          
coreContainer.getZkController().zkStateReader.getBaseUrlV2ForNodeName(liveNode)
-              + "/cluster/package?wt=javabin&omitHeader=true&expectedVersion"
-              + expected,
-          Utils.JAVABINCONSUMER);
+      var baseUrl = 
coreContainer.getZkController().zkStateReader.getBaseUrlV2ForNodeName(liveNode);
+      try {
+        var solrClient = coreContainer.getDefaultHttpSolrClient();
+        solrClient.requestWithBaseUrl(baseUrl, request::process);
+      } catch (SolrServerException | IOException e) {
+        throw new SolrException(
+            SolrException.ErrorCode.SERVER_ERROR,
+            "Failed to notify node: " + liveNode + " to sync expected package 
version: " + expected,
+            e);
+      }
     }
   }
 
diff --git 
a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java 
b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
index b1f6e6b6eed..e23c90e9b23 100644
--- a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
@@ -17,6 +17,7 @@
 package org.apache.solr.security;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET;
 
 import com.google.common.annotations.VisibleForTesting;
 import java.io.IOException;
@@ -41,16 +42,16 @@ import org.apache.http.HttpException;
 import org.apache.http.HttpHeaders;
 import org.apache.http.HttpRequest;
 import org.apache.http.HttpRequestInterceptor;
-import org.apache.http.HttpResponse;
 import org.apache.http.auth.BasicUserPrincipal;
-import org.apache.http.client.methods.HttpGet;
 import org.apache.http.protocol.HttpContext;
-import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.impl.HttpListenerFactory;
 import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.SuppressForbidden;
 import org.apache.solr.common.util.Utils;
@@ -346,23 +347,23 @@ public class PKIAuthenticationPlugin extends 
AuthenticationPlugin
     String url = 
cores.getZkController().getZkStateReader().getBaseUrlForNodeName(nodename);
     HttpEntity entity = null;
     try {
-      String uri = url + PublicKeyHandler.PATH + "?wt=json&omitHeader=true";
-      log.debug("Fetching fresh public key from: {}", uri);
-      HttpResponse rsp =
-          cores
-              .getUpdateShardHandler()
-              .getDefaultHttpClient()
-              .execute(new HttpGet(uri), 
HttpClientUtil.createNewHttpClientRequestContext());
-      entity = rsp.getEntity();
-      byte[] bytes = EntityUtils.toByteArray(entity);
-      Map<?, ?> m = (Map<?, ?>) Utils.fromJSON(bytes);
-      String key = (String) m.get("key");
+      final var solrParams = new ModifiableSolrParams();
+      solrParams.add("wt", "json");
+      solrParams.add("omitHeader", "true");
+
+      final var request = new GenericSolrRequest(GET, PublicKeyHandler.PATH, 
solrParams);
+      log.debug("Fetching fresh public key from: {}", url);
+      var solrClient = cores.getDefaultHttpSolrClient();
+      NamedList<Object> resp = solrClient.requestWithBaseUrl(url, 
request::process).getResponse();
+
+      String key = (String) resp.get("key");
       if (key == null) {
         log.error("No key available from {}{}", url, PublicKeyHandler.PATH);
         return null;
       } else {
         log.info("New key obtained from node={}, key={}", nodename, key);
       }
+
       PublicKey pubKey = CryptoKeys.deserializeX509PublicKey(key);
       keyCache.put(nodename, pubKey);
       return pubKey;
diff --git a/solr/modules/cross-dc/build.gradle 
b/solr/modules/cross-dc/build.gradle
index dbd9c00c1bf..44143285d08 100644
--- a/solr/modules/cross-dc/build.gradle
+++ b/solr/modules/cross-dc/build.gradle
@@ -36,7 +36,6 @@ dependencies {
   implementation 'org.apache.kafka:kafka-clients'
   implementation 'com.google.guava:guava'
   implementation 'io.dropwizard.metrics:metrics-core'
-  implementation 'org.apache.httpcomponents:httpclient'
   implementation 'org.apache.zookeeper:zookeeper'
   implementation 'org.apache.zookeeper:zookeeper-jute'
 
diff --git 
a/solr/modules/cross-dc/src/java/org/apache/solr/crossdc/update/processor/MirroringUpdateProcessor.java
 
b/solr/modules/cross-dc/src/java/org/apache/solr/crossdc/update/processor/MirroringUpdateProcessor.java
index b5b3fbfb935..3b391f83b83 100644
--- 
a/solr/modules/cross-dc/src/java/org/apache/solr/crossdc/update/processor/MirroringUpdateProcessor.java
+++ 
b/solr/modules/cross-dc/src/java/org/apache/solr/crossdc/update/processor/MirroringUpdateProcessor.java
@@ -26,11 +26,9 @@ import java.util.IdentityHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
-import org.apache.http.client.HttpClient;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.cloud.CloudDescriptor;
@@ -209,15 +207,7 @@ public class MirroringUpdateProcessor extends 
UpdateRequestProcessor {
       CloudDescriptor cloudDesc = 
cmd.getReq().getCore().getCoreDescriptor().getCloudDescriptor();
       String collection = cloudDesc.getCollectionName();
 
-      HttpClient httpClient =
-          
cmd.getReq().getCore().getCoreContainer().getUpdateShardHandler().getDefaultHttpClient();
-
-      try (HttpSolrClient client =
-          new HttpSolrClient.Builder(
-                  
cmd.getReq().getCore().getCoreContainer().getZkController().getBaseUrl())
-              .withHttpClient(httpClient)
-              .build()) {
-
+      try {
         String uniqueField = 
cmd.getReq().getSchema().getUniqueKeyField().getName();
 
         // TODO: implement "expand without deep paging"
@@ -233,6 +223,7 @@ public class MirroringUpdateProcessor extends 
UpdateRequestProcessor {
         boolean done = false;
         while (!done) {
           q.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
+          var client = 
cmd.getReq().getCoreContainer().getDefaultHttpSolrClient();
           QueryResponse rsp = client.query(collection, q);
           String nextCursorMark = rsp.getNextCursorMark();
 


Reply via email to