This is an automated email from the ASF dual-hosted git repository.
gerlowskija pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/solr.git
The following commit(s) were added to refs/heads/main by this push:
new c058ea5dac3 SOLR-15751: Add v2 COLSTATUS and "segments" APIs (#2912)
c058ea5dac3 is described below
commit c058ea5dac37fbcc54f2944ce3d6fad2bded86fc
Author: Jason Gerlowski <[email protected]>
AuthorDate: Fri Dec 27 06:59:09 2024 -0500
SOLR-15751: Add v2 COLSTATUS and "segments" APIs (#2912)
COLSTATUS functionality is offered at `GET /api/collections/collName`, and
segment
functionality at `GET /api/cores/coreName/segments`.
This also converts the APIs to JAX-RS, and creates generated SolrRequest
bindings for
these APIs as a result.
---
solr/CHANGES.txt | 4 +-
.../client/api/endpoint/CollectionStatusApi.java | 75 ++++
.../solr/client/api/endpoint/SegmentsApi.java | 68 ++++
.../client/api/model/CollectionStatusResponse.java | 147 +++++++
.../client/api/model/GetSegmentDataResponse.java | 191 +++++++++
.../src/java/org/apache/solr/api/V2HttpCall.java | 2 +
.../org/apache/solr/handler/admin/ColStatus.java | 39 +-
.../solr/handler/admin/CollectionsHandler.java | 11 +-
.../handler/admin/SegmentsInfoRequestHandler.java | 448 ++-------------------
.../solr/handler/admin/api/CollectionStatus.java | 97 +++++
.../handler/admin/api/CollectionStatusAPI.java | 65 ---
.../GetSegmentData.java} | 407 ++++++++++---------
.../org/apache/solr/handler/api/V2ApiUtils.java | 3 +
.../solr/jersey/CatchAllExceptionMapper.java | 1 +
.../solr/jersey/MediaTypeOverridingFilter.java | 6 +-
.../apache/solr/cloud/CollectionsAPISolrJTest.java | 200 ++++++++-
.../solr/handler/admin/IndexSizeEstimatorTest.java | 55 +--
.../solr/handler/admin/TestApiFramework.java | 1 -
.../admin/api/V2CollectionAPIMappingTest.java | 14 -
.../pages/collection-management.adoc | 7 +-
20 files changed, 1068 insertions(+), 773 deletions(-)
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index a844220f2fc..a806d567b39 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -150,7 +150,9 @@ New Features
Improvements
---------------------
-(No changes)
+* SOLR-15751: The v2 API now has parity with the v1 "COLSTATUS" and "segments"
APIs, which can be used to fetch detailed information about
+ specific collections or cores. Collection information can be fetched by a
call to `GET /api/collections/collectionName`, and core
+ information with a call to `GET /api/cores/coreName/segments`. (Jason
Gerlowski)
Optimizations
---------------------
diff --git
a/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java
b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java
new file mode 100644
index 00000000000..d07982cab76
--- /dev/null
+++
b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.endpoint;
+
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.Parameter;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import org.apache.solr.client.api.model.CollectionStatusResponse;
+
+/**
+ * V2 API definition for fetching collection metadata
+ *
+ * <p>This API (GET /v2/collections/collectionName) is analogous to the v1
+ * /admin/collections?action=COLSTATUS command.
+ */
+@Path("/collections/{collectionName}")
+public interface CollectionStatusApi {
+
+ // TODO Query parameters currently match those offered by the v1
+ // /admin/collections?action=COLSTATUS. Should param names be
updated/clarified?
+ @GET
+ @Operation(
+ summary = "Fetches metadata about the specified collection",
+ tags = {"collections"})
+ CollectionStatusResponse getCollectionStatus(
+ @Parameter(description = "The name of the collection return metadata
for", required = true)
+ @PathParam("collectionName")
+ String collectionName,
+ @Parameter(description = SegmentsApi.CORE_INFO_PARAM_DESC)
@QueryParam("coreInfo")
+ Boolean coreInfo,
+ @Parameter(
+ description =
+ "Boolean flag to include metadata and statistics about the
segments used by each shard leader. Implicitly set to true by 'fieldInfo' and
'sizeInfo'")
+ @QueryParam("segments")
+ Boolean segments,
+ @Parameter(
+ description =
+ SegmentsApi.FIELD_INFO_PARAM_DESC
+ + " Implicitly sets the 'segments' flag to 'true'")
+ @QueryParam("fieldInfo")
+ Boolean fieldInfo,
+ @Parameter(description = SegmentsApi.RAW_SIZE_PARAM_DESC)
@QueryParam("rawSize")
+ Boolean rawSize,
+ @Parameter(description = SegmentsApi.RAW_SIZE_SUMMARY_DESC)
@QueryParam("rawSizeSummary")
+ Boolean rawSizeSummary,
+ @Parameter(description = SegmentsApi.RAW_SIZE_DETAILS_DESC)
@QueryParam("rawSizeDetails")
+ Boolean rawSizeDetails,
+ @Parameter(description = SegmentsApi.RAW_SIZE_SAMPLING_PERCENT_DESC)
+ @QueryParam("rawSizeSamplingPercent")
+ Float rawSizeSamplingPercent,
+ @Parameter(
+ description =
+ SegmentsApi.SIZE_INFO_PARAM_DESC
+ + ". Implicitly sets the 'segment' flag to 'true'")
+ @QueryParam("sizeInfo")
+ Boolean sizeInfo)
+ throws Exception;
+}
diff --git
a/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java
b/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java
new file mode 100644
index 00000000000..1f6f089642e
--- /dev/null
+++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.endpoint;
+
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.Parameter;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.QueryParam;
+import org.apache.solr.client.api.model.GetSegmentDataResponse;
+import org.apache.solr.client.api.util.CoreApiParameters;
+
+/**
+ * V2 API definition for fetching metadata about a core's segments
+ *
+ * <p>This API (GET /v2/cores/coreName/segments) is analogous to the v1
+ * /solr/coreName/admin/segments API
+ */
+@Path("/cores/{coreName}/segments")
+public interface SegmentsApi {
+
+ String CORE_INFO_PARAM_DESC =
+ "Boolean flag to include metadata (e.g. index an data directories,
IndexWriter configuration, etc.) about each shard leader's core";
+ String FIELD_INFO_PARAM_DESC =
+ "Boolean flag to include statistics about the indexed fields present on
each shard leader.";
+ String RAW_SIZE_PARAM_DESC =
+ "Boolean flag to include simple estimates of the disk size taken up by
each field (e.g. \"id\", \"_version_\") and by each index data structure (e.g.
'storedFields', 'docValues_numeric').";
+ String RAW_SIZE_SUMMARY_DESC =
+ "Boolean flag to include more involved estimates of the disk size taken
up by index data structures, on a per-field basis (e.g. how much data does the
\"id\" field contribute to 'storedField' index files). More detail than
'rawSize', less detail than 'rawSizeDetails'.";
+ String RAW_SIZE_DETAILS_DESC =
+ "Boolean flag to include detailed statistics about the disk size taken
up by various fields and data structures. More detail than 'rawSize' and
'rawSizeSummary'.";
+ String RAW_SIZE_SAMPLING_PERCENT_DESC =
+ "Percentage (between 0 and 100) of data to read when estimating index
size and statistics. Defaults to 5.0 (i.e. 5%).";
+ String SIZE_INFO_PARAM_DESC =
+ "Boolean flag to include information about the largest index files for
each Lucene segment.";
+
+ @GET
+ @CoreApiParameters
+ @Operation(
+ summary = "Fetches metadata about the segments in use by the specified
core",
+ tags = {"segments"})
+ GetSegmentDataResponse getSegmentData(
+ @Parameter(description = CORE_INFO_PARAM_DESC) @QueryParam("coreInfo")
Boolean coreInfo,
+ @Parameter(description = FIELD_INFO_PARAM_DESC) @QueryParam("fieldInfo")
Boolean fieldInfo,
+ @Parameter(description = RAW_SIZE_PARAM_DESC) @QueryParam("rawSize")
Boolean rawSize,
+ @Parameter(description = RAW_SIZE_SUMMARY_DESC)
@QueryParam("rawSizeSummary")
+ Boolean rawSizeSummary,
+ @Parameter(description = RAW_SIZE_DETAILS_DESC)
@QueryParam("rawSizeDetails")
+ Boolean rawSizeDetails,
+ @Parameter(description = RAW_SIZE_SAMPLING_PERCENT_DESC)
@QueryParam("rawSizeSamplingPercent")
+ Float rawSizeSamplingPercent,
+ @Parameter(description = SIZE_INFO_PARAM_DESC) @QueryParam("sizeInfo")
Boolean sizeInfo)
+ throws Exception;
+}
diff --git
a/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java
b/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java
new file mode 100644
index 00000000000..82109edb915
--- /dev/null
+++
b/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonFormat;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Response of the CollectionStatusApi.getCollectionStatus() API
+ *
+ * <p>Note that the corresponding v1 API has a slightly different response
format. Users should not
+ * attempt to convert a v1 response into this type.
+ */
+public class CollectionStatusResponse extends SolrJerseyResponse {
+
+ @JsonProperty public String name;
+ @JsonProperty public Integer znodeVersion;
+
+ // TODO - consider 'Instant' once SOLR-17608 is finished
+ @JsonProperty
+ @JsonFormat(shape = JsonFormat.Shape.NUMBER)
+ public Date creationTimeMillis;
+
+ @JsonProperty public CollectionMetadata properties;
+ @JsonProperty public Integer activeShards;
+ @JsonProperty public Integer inactiveShards;
+ @JsonProperty public List<String> schemaNonCompliant;
+
+ @JsonProperty public Map<String, ShardMetadata> shards;
+
+ // Always present in response
+ public static class CollectionMetadata {
+ @JsonProperty public String configName;
+ @JsonProperty public Integer nrtReplicas;
+ @JsonProperty public Integer pullReplicas;
+ @JsonProperty public Integer tlogReplicas;
+ @JsonProperty public Map<String, String> router;
+ @JsonProperty public Integer replicationFactor;
+
+ private Map<String, Object> unknownFields = new HashMap<>();
+
+ @JsonAnyGetter
+ public Map<String, Object> unknownProperties() {
+ return unknownFields;
+ }
+
+ @JsonAnySetter
+ public void setUnknownProperty(String field, Object value) {
+ unknownFields.put(field, value);
+ }
+ }
+
+ // Always present in response
+ public static class ShardMetadata {
+ @JsonProperty public String state; // TODO Make this an enum?
+ @JsonProperty public String range;
+ @JsonProperty public ReplicaSummary replicas;
+ @JsonProperty public LeaderSummary leader;
+ }
+
+ // Always present in response
+ public static class ReplicaSummary {
+ @JsonProperty public Integer total;
+ @JsonProperty public Integer active;
+ @JsonProperty public Integer down;
+ @JsonProperty public Integer recovering;
+
+ @JsonProperty("recovery_failed")
+ public Integer recoveryFailed;
+ }
+
+ // Always present in response unless otherwise specified
+ public static class LeaderSummary {
+ @JsonProperty public String coreNode;
+ @JsonProperty public String core;
+ @JsonProperty public Boolean leader;
+
+ @JsonProperty("node_name")
+ public String nodeName;
+
+ @JsonProperty("base_url")
+ public String baseUrl;
+
+ @JsonProperty public String state; // TODO Make this an enum?
+ @JsonProperty public String type; // TODO Make this an enum?
+
+ @JsonProperty("force_set_state")
+ public Boolean forceSetState;
+
+ // Present with coreInfo=true || sizeInfo=true unless otherwise specified
+ @JsonProperty public SegmentInfo segInfos;
+
+ private Map<String, Object> unknownFields = new HashMap<>();
+
+ @JsonAnyGetter
+ public Map<String, Object> unknownProperties() {
+ return unknownFields;
+ }
+
+ @JsonAnySetter
+ public void setUnknownProperty(String field, Object value) {
+ unknownFields.put(field, value);
+ }
+ }
+
+ // Present with segments=true || coreInfo=true || sizeInfo=true ||
fieldInfo=true unless otherwise
+ // specified
+
+ /**
+ * Same properties as {@link GetSegmentDataResponse}, but uses a different
class to avoid
+ * inheriting "responseHeader", etc.
+ */
+ public static class SegmentInfo {
+ @JsonProperty public GetSegmentDataResponse.SegmentSummary info;
+
+ @JsonProperty public Map<String, Object> runningMerges;
+
+ // Present with segments=true || sizeInfo=true || fieldInfo=true
+ @JsonProperty public Map<String, GetSegmentDataResponse.SingleSegmentData>
segments;
+
+ // Present with rawSize=true
+ @JsonProperty public GetSegmentDataResponse.RawSize rawSize;
+
+ // Present only with fieldInfo=true
+ @JsonProperty public List<String> fieldInfoLegend;
+ }
+}
diff --git
a/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java
b/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java
new file mode 100644
index 00000000000..b5e3714bfd3
--- /dev/null
+++
b/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonFormat;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Response for {@link
org.apache.solr.client.api.endpoint.SegmentsApi#getSegmentData(Boolean,
+ * Boolean, Boolean, Boolean, Boolean, Float, Boolean)} API
+ */
+public class GetSegmentDataResponse extends SolrJerseyResponse {
+ @JsonProperty public SegmentSummary info;
+
+ @JsonProperty public Map<String, Object> runningMerges;
+
+ @JsonProperty public Map<String, SingleSegmentData> segments;
+
+ // Present only with fieldInfo=true
+ @JsonProperty public List<String> fieldInfoLegend;
+
+ // Present with rawSize=true
+ @JsonProperty public RawSize rawSize;
+
+ // Always present in response
+ public static class SegmentSummary {
+ @JsonProperty public String minSegmentLuceneVersion;
+ @JsonProperty public String commitLuceneVersion;
+ @JsonProperty public Integer numSegments;
+ @JsonProperty public String segmentsFileName;
+ @JsonProperty public Integer totalMaxDoc;
+ // Typically keys are 'commitCommandVer' and 'commitTimeMSec'
+ @JsonProperty public Map<String, String> userData;
+
+ // Present for coreInfo=true only
+ @JsonProperty public CoreSummary core;
+ }
+
+ // Always present in response, provided that the specified core has segments
+ public static class SingleSegmentData {
+ @JsonProperty public String name;
+ @JsonProperty public Integer delCount;
+ @JsonProperty public Integer softDelCount;
+ @JsonProperty public Boolean hasFieldUpdates;
+ @JsonProperty public Long sizeInBytes;
+ @JsonProperty public Integer size;
+
+ // TODO - consider 'Instant' once SOLR-17608 is finished
+ @JsonProperty
+ @JsonFormat(shape = JsonFormat.Shape.STRING, pattern =
"YYYY-MM-DD'T'hh:mm:ss.S'Z'")
+ public Date age;
+
+ @JsonProperty public String source;
+ @JsonProperty public String version;
+ @JsonProperty public Integer createdVersionMajor;
+ @JsonProperty public String minVersion;
+ @JsonProperty public SegmentDiagnosticInfo diagnostics;
+ @JsonProperty public Map<String, String> attributes;
+ // Only present when index-sorting is in use
+ @JsonProperty public String sort;
+ @JsonProperty public Boolean mergeCandidate;
+
+ // Present only when fieldInfo=true
+ @JsonProperty public Map<String, SegmentSingleFieldInfo> fields;
+
+ // Present only when sizeInfo=true
+ @JsonProperty("largestFiles")
+ public Map<String, String> largestFilesByName;
+ }
+
+ // Always present in response, provided that the specified core has segments
+ public static class SegmentSingleFieldInfo {
+ @JsonProperty public String flags;
+ @JsonProperty public Integer docCount;
+ @JsonProperty public Long termCount;
+ @JsonProperty public Long sumDocFreq;
+ @JsonProperty public Long sumTotalTermFreq;
+ @JsonProperty public String schemaType;
+ @JsonProperty public Map<String, String> nonCompliant;
+ }
+
+ // Always present in response
+ public static class SegmentDiagnosticInfo {
+ @JsonProperty("os.version")
+ public String osVersion;
+
+ @JsonProperty("lucene.version")
+ public String luceneVersion;
+
+ @JsonProperty public String source;
+
+ // TODO - consider 'Instant' once SOLR-17608 is finished
+ @JsonProperty
+ @JsonFormat(shape = JsonFormat.Shape.NUMBER)
+ public Date timestamp;
+
+ @JsonProperty("java.runtime.version")
+ public String javaRuntimeVersion;
+
+ @JsonProperty public String os;
+
+ @JsonProperty("java.vendor")
+ public String javaVendor;
+
+ @JsonProperty("os.arch")
+ public String osArchitecture;
+
+ private Map<String, Object> additionalDiagnostics = new HashMap<>();
+
+ @JsonAnyGetter
+ public Map<String, Object> getAdditionalDiagnostics() {
+ return additionalDiagnostics;
+ }
+
+ @JsonAnySetter
+ public void getAdditionalDiagnostics(String field, Object value) {
+ additionalDiagnostics.put(field, value);
+ }
+ }
+
+ // Present with coreInfo=true unless otherwise specified
+ public static class CoreSummary {
+ @JsonProperty public String startTime;
+ @JsonProperty public String dataDir;
+ @JsonProperty public String indexDir;
+ @JsonProperty public Double sizeInGB;
+ @JsonProperty public IndexWriterConfigSummary indexWriterConfig;
+ }
+
+ // Present with coreInfo=true unless otherwise specified
+
+ /** A serializable representation of Lucene's "LiveIndexWriterConfig" */
+ public static class IndexWriterConfigSummary {
+ @JsonProperty public String analyzer;
+ @JsonProperty public Double ramBufferSizeMB;
+ @JsonProperty public Integer maxBufferedDocs;
+ @JsonProperty public String mergedSegmentWarmer;
+ @JsonProperty public String delPolicy;
+ @JsonProperty public String commit;
+ @JsonProperty public String openMode;
+ @JsonProperty public String similarity;
+ @JsonProperty public String mergeScheduler;
+ @JsonProperty public String codec;
+ @JsonProperty public String infoStream;
+ @JsonProperty public String mergePolicy;
+ @JsonProperty public Boolean readerPooling;
+ @JsonProperty public Integer perThreadHardLimitMB;
+ @JsonProperty public Boolean useCompoundFile;
+ @JsonProperty public Boolean commitOnClose;
+ @JsonProperty public String indexSort;
+ @JsonProperty public Boolean checkPendingFlushOnUpdate;
+ @JsonProperty public String softDeletesField;
+ @JsonProperty public Long maxFullFlushMergeWaitMillis;
+ @JsonProperty public String leafSorter;
+ @JsonProperty public String eventListener;
+ @JsonProperty public String parentField;
+ @JsonProperty public String writer;
+ }
+
+ // Present with rawSize=true unless otherwise specified
+ public static class RawSize {
+ @JsonProperty public Map<String, String> fieldsBySize;
+ @JsonProperty public Map<String, String> typesBySize;
+
+ // Present with rawSizeDetails=true
+ @JsonProperty public Object details;
+
+ // Present with rawSizeSummary=true
+ @JsonProperty public Map<String, Object> summary;
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
index d156710a675..1ecb290fa0e 100644
--- a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
+++ b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
@@ -188,6 +188,8 @@ public class V2HttpCall extends HttpSolrCall {
Thread.currentThread().setContextClassLoader(core.getResourceLoader().getClassLoader());
this.path = path = path.substring(prefix.length() +
pathSegments.get(1).length() + 2);
+ // Core-level API, so populate "collection" template val
+ parts.put(COLLECTION_PROP, origCorename);
Api apiInfo = getApiInfo(core.getRequestHandlers(), path,
req.getMethod(), fullPath, parts);
if (isCompositeApi && apiInfo instanceof CompositeApi) {
((CompositeApi) this.api).add(apiInfo);
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java
b/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java
index e7fe44e0fe6..234682e8473 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java
@@ -25,6 +25,7 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
+import org.apache.solr.client.api.model.GetSegmentDataResponse;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.io.SolrClientCache;
@@ -41,6 +42,8 @@ import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.jersey.SolrJacksonMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,12 +83,16 @@ public class ColStatus {
collections = Collections.singleton(col);
}
boolean withFieldInfo = props.getBool(FIELD_INFO_PROP, false);
- boolean withSegments = props.getBool(SEGMENTS_PROP, false);
boolean withCoreInfo = props.getBool(CORE_INFO_PROP, false);
boolean withSizeInfo = props.getBool(SIZE_INFO_PROP, false);
boolean withRawSizeInfo = props.getBool(RAW_SIZE_PROP, false);
boolean withRawSizeSummary = props.getBool(RAW_SIZE_SUMMARY_PROP, false);
boolean withRawSizeDetails = props.getBool(RAW_SIZE_DETAILS_PROP, false);
+ // FieldInfo and SizeInfo imply segments=true, since they add to the data
reported about each
+ // segment
+ boolean withSegments = props.getBool(SEGMENTS_PROP, false);
+ withSegments |= withFieldInfo || withSizeInfo;
+
Object samplingPercentVal = props.get(RAW_SIZE_SAMPLING_PERCENT_PROP);
Float samplingPercent =
samplingPercentVal != null ?
Float.parseFloat(String.valueOf(samplingPercentVal)) : null;
@@ -94,6 +101,7 @@ public class ColStatus {
}
boolean getSegments = false;
if (withFieldInfo
+ || withSegments
|| withSizeInfo
|| withCoreInfo
|| withRawSizeInfo
@@ -196,32 +204,35 @@ public class ColStatus {
}
QueryRequest req = new QueryRequest(params);
NamedList<Object> rsp = client.request(req);
- rsp.remove("responseHeader");
- leaderMap.add("segInfos", rsp);
- NamedList<?> segs = (NamedList<?>) rsp.get("segments");
+ final var segmentResponse =
+ SolrJacksonMapper.getObjectMapper().convertValue(rsp,
GetSegmentDataResponse.class);
+ segmentResponse.responseHeader = null;
+
+ final var segs = segmentResponse.segments;
if (segs != null) {
- for (Map.Entry<String, ?> entry : segs) {
- NamedList<Object> fields =
- (NamedList<Object>) ((NamedList<Object>)
entry.getValue()).get("fields");
- if (fields != null) {
- for (Map.Entry<String, Object> fEntry : fields) {
- Object nc = ((NamedList<Object>)
fEntry.getValue()).get("nonCompliant");
- if (nc != null) {
+ for (Map.Entry<String, GetSegmentDataResponse.SingleSegmentData>
entry :
+ segs.entrySet()) {
+ final var fieldInfoByName = entry.getValue().fields;
+ if (fieldInfoByName != null) {
+ for (Map.Entry<String,
GetSegmentDataResponse.SegmentSingleFieldInfo> fEntry :
+ fieldInfoByName.entrySet()) {
+ if (fEntry.getValue().nonCompliant != null) {
nonCompliant.add(fEntry.getKey());
}
}
}
if (!withFieldInfo) {
- ((NamedList<Object>) entry.getValue()).remove("fields");
+ entry.getValue().fields = null;
}
}
}
if (!withSegments) {
- rsp.remove("segments");
+ segmentResponse.segments = null;
}
if (!withFieldInfo) {
- rsp.remove("fieldInfoLegend");
+ segmentResponse.fieldInfoLegend = null;
}
+ leaderMap.add("segInfos", Utils.reflectToMap(segmentResponse));
} catch (SolrServerException | IOException e) {
log.warn("Error getting details of replica segments from {}", url,
e);
}
diff --git
a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index df6ba086d06..aefc1033d5e 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -174,7 +174,7 @@ import org.apache.solr.handler.admin.api.BalanceReplicas;
import org.apache.solr.handler.admin.api.BalanceShardUnique;
import org.apache.solr.handler.admin.api.ClusterProperty;
import org.apache.solr.handler.admin.api.CollectionProperty;
-import org.apache.solr.handler.admin.api.CollectionStatusAPI;
+import org.apache.solr.handler.admin.api.CollectionStatus;
import org.apache.solr.handler.admin.api.CreateAlias;
import org.apache.solr.handler.admin.api.CreateCollection;
import org.apache.solr.handler.admin.api.CreateCollectionBackup;
@@ -539,11 +539,8 @@ public class CollectionsHandler extends RequestHandlerBase
implements Permission
ColStatus.RAW_SIZE_SAMPLING_PERCENT_PROP,
ColStatus.SIZE_INFO_PROP);
- new ColStatus(
- h.coreContainer.getSolrClientCache(),
-
h.coreContainer.getZkController().getZkStateReader().getClusterState(),
- new ZkNodeProps(props))
- .getColStatus(rsp.getValues());
+ CollectionStatus.populateColStatusData(
+ h.coreContainer, new ZkNodeProps(props), rsp.getValues());
return null;
}),
DELETE_OP(
@@ -1360,6 +1357,7 @@ public class CollectionsHandler extends
RequestHandlerBase implements Permission
CreateReplica.class,
AddReplicaProperty.class,
BalanceShardUnique.class,
+ CollectionStatus.class,
CreateAlias.class,
CreateCollection.class,
CreateCollectionBackup.class,
@@ -1399,7 +1397,6 @@ public class CollectionsHandler extends
RequestHandlerBase implements Permission
apis.addAll(AnnotatedApi.getApis(new ModifyCollectionAPI(this)));
apis.addAll(AnnotatedApi.getApis(new MoveReplicaAPI(this)));
apis.addAll(AnnotatedApi.getApis(new RebalanceLeadersAPI(this)));
- apis.addAll(AnnotatedApi.getApis(new CollectionStatusAPI(this)));
return apis;
}
diff --git
a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
index 93cdf071a1c..fd1378b6597 100644
---
a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
+++
b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
@@ -16,57 +16,20 @@
*/
package org.apache.solr.handler.admin;
-import static org.apache.lucene.index.IndexOptions.DOCS;
-import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS;
-import static
org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
-import static org.apache.solr.common.params.CommonParams.NAME;
-
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
+import java.util.Collection;
import java.util.List;
-import java.util.stream.Collectors;
-import org.apache.lucene.index.DocValuesType;
-import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.FieldInfos;
-import org.apache.lucene.index.FilterLeafReader;
-import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.LeafMetaData;
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.MergePolicy;
-import org.apache.lucene.index.MergePolicy.MergeSpecification;
-import org.apache.lucene.index.MergePolicy.OneMerge;
-import org.apache.lucene.index.MergeTrigger;
-import org.apache.lucene.index.SegmentCommitInfo;
-import org.apache.lucene.index.SegmentInfos;
-import org.apache.lucene.index.SegmentReader;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.RamUsageEstimator;
-import org.apache.lucene.util.Version;
-import org.apache.solr.common.luke.FieldFlag;
-import org.apache.solr.common.util.Pair;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrCore;
+import org.apache.solr.api.JerseyResource;
+import org.apache.solr.client.api.model.SolrJerseyResponse;
+import org.apache.solr.common.params.SolrParams;
import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.handler.admin.api.GetSegmentData;
+import org.apache.solr.handler.api.V2ApiUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.schema.SchemaField;
-import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.security.AuthorizationContext;
-import org.apache.solr.update.SolrIndexWriter;
-import org.apache.solr.util.RefCounted;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** This handler exposes information about last commit generation segments */
public class SegmentsInfoRequestHandler extends RequestHandlerBase {
- private static final Logger log =
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String FIELD_INFO_PARAM = "fieldInfo";
public static final String CORE_INFO_PARAM = "coreInfo";
@@ -76,385 +39,22 @@ public class SegmentsInfoRequestHandler extends
RequestHandlerBase {
public static final String RAW_SIZE_DETAILS_PARAM = "rawSizeDetails";
public static final String RAW_SIZE_SAMPLING_PERCENT_PARAM =
"rawSizeSamplingPercent";
- private static final List<String> FI_LEGEND;
-
- static {
- FI_LEGEND =
- Arrays.asList(
- FieldFlag.INDEXED.toString(),
- FieldFlag.DOC_VALUES.toString(),
- "xxx - DocValues type",
- FieldFlag.TERM_VECTOR_STORED.toString(),
- FieldFlag.OMIT_NORMS.toString(),
- FieldFlag.OMIT_TF.toString(),
- FieldFlag.OMIT_POSITIONS.toString(),
- FieldFlag.STORE_OFFSETS_WITH_POSITIONS.toString(),
- "p - field has payloads",
- "s - field uses soft deletes",
- ":x:x:x - point data dim : index dim : num bytes");
- }
-
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp)
throws Exception {
- getSegmentsInfo(req, rsp);
- rsp.setHttpCaching(false);
- }
-
- private static final double GB = 1024.0 * 1024.0 * 1024.0;
-
- private void getSegmentsInfo(SolrQueryRequest req, SolrQueryResponse rsp)
throws Exception {
- boolean withFieldInfo = req.getParams().getBool(FIELD_INFO_PARAM, false);
- boolean withCoreInfo = req.getParams().getBool(CORE_INFO_PARAM, false);
- boolean withSizeInfo = req.getParams().getBool(SIZE_INFO_PARAM, false);
- boolean withRawSizeInfo = req.getParams().getBool(RAW_SIZE_PARAM, false);
- boolean withRawSizeSummary =
req.getParams().getBool(RAW_SIZE_SUMMARY_PARAM, false);
- boolean withRawSizeDetails =
req.getParams().getBool(RAW_SIZE_DETAILS_PARAM, false);
- if (withRawSizeSummary || withRawSizeDetails) {
- withRawSizeInfo = true;
- }
- SolrIndexSearcher searcher = req.getSearcher();
-
- SegmentInfos infos =
SegmentInfos.readLatestCommit(searcher.getIndexReader().directory());
-
- SimpleOrderedMap<Object> segmentInfos = new SimpleOrderedMap<>();
-
- SolrCore core = req.getCore();
- SimpleOrderedMap<Object> infosInfo = new SimpleOrderedMap<>();
- Version minVersion = infos.getMinSegmentLuceneVersion();
- if (minVersion != null) {
- infosInfo.add("minSegmentLuceneVersion", minVersion.toString());
- }
- Version commitVersion = infos.getCommitLuceneVersion();
- if (commitVersion != null) {
- infosInfo.add("commitLuceneVersion", commitVersion.toString());
- }
- infosInfo.add("numSegments", infos.size());
- infosInfo.add("segmentsFileName", infos.getSegmentsFileName());
- infosInfo.add("totalMaxDoc", infos.totalMaxDoc());
- infosInfo.add("userData", infos.userData);
- if (withCoreInfo) {
- SimpleOrderedMap<Object> coreInfo = new SimpleOrderedMap<>();
- infosInfo.add("core", coreInfo);
- coreInfo.add(
- "startTime", core.getStartTimeStamp().getTime() + "(" +
core.getStartTimeStamp() + ")");
- coreInfo.add("dataDir", core.getDataDir());
- coreInfo.add("indexDir", core.getIndexDir());
- coreInfo.add("sizeInGB", (double) core.getIndexSize() / GB);
-
- RefCounted<IndexWriter> iwRef =
core.getSolrCoreState().getIndexWriter(core);
- if (iwRef != null) {
- try {
- IndexWriter iw = iwRef.get();
- String iwConfigStr = iw.getConfig().toString();
- SimpleOrderedMap<Object> iwConfig = new SimpleOrderedMap<>();
- // meh ...
- String[] lines = iwConfigStr.split("\\n");
- for (String line : lines) {
- String[] parts = line.split("=");
- if (parts.length < 2) {
- continue;
- }
- iwConfig.add(parts[0], parts[1]);
- }
- coreInfo.add("indexWriterConfig", iwConfig);
- } finally {
- iwRef.decref();
- }
- }
- }
- SimpleOrderedMap<Object> segmentInfo;
- List<SegmentCommitInfo> sortable = new ArrayList<>(infos.asList());
- // Order by the number of live docs. The display is logarithmic so it is a
little jumbled
- // visually
- sortable.sort(
- (s1, s2) -> (s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc()
- s1.getDelCount()));
-
- List<String> mergeCandidates = new ArrayList<>();
- SimpleOrderedMap<Object> runningMerges = getMergeInformation(req, infos,
mergeCandidates);
- List<LeafReaderContext> leafContexts = searcher.getIndexReader().leaves();
- IndexSchema schema = req.getSchema();
- for (SegmentCommitInfo segmentCommitInfo : sortable) {
- segmentInfo =
- getSegmentInfo(segmentCommitInfo, withSizeInfo, withFieldInfo,
leafContexts, schema);
- if (mergeCandidates.contains(segmentCommitInfo.info.name)) {
- segmentInfo.add("mergeCandidate", true);
- }
- segmentInfos.add((String) segmentInfo.get(NAME), segmentInfo);
- }
-
- rsp.add("info", infosInfo);
- if (runningMerges.size() > 0) {
- rsp.add("runningMerges", runningMerges);
- }
- if (withFieldInfo) {
- rsp.add("fieldInfoLegend", FI_LEGEND);
- }
- rsp.add("segments", segmentInfos);
- if (withRawSizeInfo) {
- IndexSizeEstimator estimator =
- new IndexSizeEstimator(
- searcher.getRawReader(), 20, 100, withRawSizeSummary,
withRawSizeDetails);
- Object samplingPercentVal =
req.getParams().get(RAW_SIZE_SAMPLING_PERCENT_PARAM);
- if (samplingPercentVal != null) {
-
estimator.setSamplingPercent(Float.parseFloat(String.valueOf(samplingPercentVal)));
- }
- IndexSizeEstimator.Estimate estimate = estimator.estimate();
- SimpleOrderedMap<Object> estimateMap = new SimpleOrderedMap<>();
- // make the units more user-friendly
- estimateMap.add(IndexSizeEstimator.FIELDS_BY_SIZE,
estimate.getHumanReadableFieldsBySize());
- estimateMap.add(IndexSizeEstimator.TYPES_BY_SIZE,
estimate.getHumanReadableTypesBySize());
- if (estimate.getSummary() != null) {
- estimateMap.add(IndexSizeEstimator.SUMMARY, estimate.getSummary());
- }
- if (estimate.getDetails() != null) {
- estimateMap.add(IndexSizeEstimator.DETAILS, estimate.getDetails());
- }
- rsp.add("rawSize", estimateMap);
- }
- }
-
- private SimpleOrderedMap<Object> getSegmentInfo(
- SegmentCommitInfo segmentCommitInfo,
- boolean withSizeInfo,
- boolean withFieldInfos,
- List<LeafReaderContext> leafContexts,
- IndexSchema schema)
- throws IOException {
- SimpleOrderedMap<Object> segmentInfoMap = new SimpleOrderedMap<>();
-
- segmentInfoMap.add(NAME, segmentCommitInfo.info.name);
- segmentInfoMap.add("delCount", segmentCommitInfo.getDelCount());
- segmentInfoMap.add("softDelCount", segmentCommitInfo.getSoftDelCount());
- segmentInfoMap.add("hasFieldUpdates", segmentCommitInfo.hasFieldUpdates());
- segmentInfoMap.add("sizeInBytes", segmentCommitInfo.sizeInBytes());
- segmentInfoMap.add("size", segmentCommitInfo.info.maxDoc());
- Long timestamp =
Long.parseLong(segmentCommitInfo.info.getDiagnostics().get("timestamp"));
- segmentInfoMap.add("age", new Date(timestamp));
- segmentInfoMap.add("source",
segmentCommitInfo.info.getDiagnostics().get("source"));
- segmentInfoMap.add("version",
segmentCommitInfo.info.getVersion().toString());
- // don't open a new SegmentReader - try to find the right one from the
leaf contexts
- SegmentReader seg = null;
- for (LeafReaderContext lrc : leafContexts) {
- LeafReader leafReader = lrc.reader();
- leafReader = FilterLeafReader.unwrap(leafReader);
- if (leafReader instanceof SegmentReader sr) {
- if (sr.getSegmentInfo().info.equals(segmentCommitInfo.info)) {
- seg = sr;
- break;
- }
- }
- }
- if (seg != null) {
- LeafMetaData metaData = seg.getMetaData();
- if (metaData != null) {
- segmentInfoMap.add("createdVersionMajor",
metaData.getCreatedVersionMajor());
- segmentInfoMap.add("minVersion", metaData.getMinVersion().toString());
- if (metaData.getSort() != null) {
- segmentInfoMap.add("sort", metaData.getSort().toString());
- }
- }
- }
- if (!segmentCommitInfo.info.getDiagnostics().isEmpty()) {
- segmentInfoMap.add("diagnostics",
segmentCommitInfo.info.getDiagnostics());
- }
- if (!segmentCommitInfo.info.getAttributes().isEmpty()) {
- segmentInfoMap.add("attributes", segmentCommitInfo.info.getAttributes());
- }
- if (withSizeInfo) {
- Directory dir = segmentCommitInfo.info.dir;
- List<Pair<String, Long>> files =
- segmentCommitInfo.files().stream()
- .map(
- f -> {
- long size = -1;
- try {
- size = dir.fileLength(f);
- } catch (IOException e) {
- }
- return new Pair<String, Long>(f, size);
- })
- .sorted(
- (p1, p2) -> {
- if (p1.second() > p2.second()) {
- return -1;
- } else if (p1.second() < p2.second()) {
- return 1;
- } else {
- return 0;
- }
- })
- .collect(Collectors.toList());
- if (!files.isEmpty()) {
- SimpleOrderedMap<Object> topFiles = new SimpleOrderedMap<>();
- for (int i = 0; i < Math.min(files.size(), 5); i++) {
- Pair<String, Long> p = files.get(i);
- topFiles.add(p.first(),
RamUsageEstimator.humanReadableUnits(p.second()));
- }
- segmentInfoMap.add("largestFiles", topFiles);
- }
- }
- if (withFieldInfos) {
- if (seg == null) {
- log.debug(
- "Skipping segment info - not available as a SegmentReader: {}",
segmentCommitInfo);
- } else {
- FieldInfos fis = seg.getFieldInfos();
- SimpleOrderedMap<Object> fields = new SimpleOrderedMap<>();
- for (FieldInfo fi : fis) {
- fields.add(fi.name, getFieldInfo(seg, fi, schema));
- }
- segmentInfoMap.add("fields", fields);
- }
- }
-
- return segmentInfoMap;
- }
+ final SolrParams params = req.getParams();
+ final GetSegmentData segmentDataApi = new GetSegmentData(req.getCore(),
req, rsp);
+ final SolrJerseyResponse response =
+ segmentDataApi.getSegmentData(
+ params.getBool(CORE_INFO_PARAM),
+ params.getBool(FIELD_INFO_PARAM),
+ params.getBool(RAW_SIZE_PARAM),
+ params.getBool(RAW_SIZE_SUMMARY_PARAM),
+ params.getBool(RAW_SIZE_DETAILS_PARAM),
+ params.getFloat(RAW_SIZE_SAMPLING_PERCENT_PARAM),
+ params.getBool(SIZE_INFO_PARAM));
+ V2ApiUtils.squashIntoSolrResponseWithoutHeader(rsp, response);
- private SimpleOrderedMap<Object> getFieldInfo(
- SegmentReader reader, FieldInfo fi, IndexSchema schema) {
- SimpleOrderedMap<Object> fieldFlags = new SimpleOrderedMap<>();
- StringBuilder flags = new StringBuilder();
- IndexOptions opts = fi.getIndexOptions();
- flags.append((opts != IndexOptions.NONE) ?
FieldFlag.INDEXED.getAbbreviation() : '-');
- DocValuesType dvt = fi.getDocValuesType();
- if (dvt != DocValuesType.NONE) {
- flags.append(FieldFlag.DOC_VALUES.getAbbreviation());
- switch (dvt) {
- case NUMERIC:
- flags.append("num");
- break;
- case BINARY:
- flags.append("bin");
- break;
- case SORTED:
- flags.append("srt");
- break;
- case SORTED_NUMERIC:
- flags.append("srn");
- break;
- case SORTED_SET:
- flags.append("srs");
- break;
- default:
- flags.append("???"); // should not happen
- }
- } else {
- flags.append("----");
- }
- flags.append((fi.hasVectors()) ?
FieldFlag.TERM_VECTOR_STORED.getAbbreviation() : '-');
- flags.append((fi.omitsNorms()) ? FieldFlag.OMIT_NORMS.getAbbreviation() :
'-');
-
- flags.append((DOCS == opts) ? FieldFlag.OMIT_TF.getAbbreviation() : '-');
-
- flags.append((DOCS_AND_FREQS == opts) ?
FieldFlag.OMIT_POSITIONS.getAbbreviation() : '-');
-
- flags.append(
- (DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS == opts)
- ? FieldFlag.STORE_OFFSETS_WITH_POSITIONS.getAbbreviation()
- : '-');
-
- flags.append((fi.hasPayloads() ? "p" : "-"));
- flags.append((fi.isSoftDeletesField() ? "s" : "-"));
- if (fi.getPointDimensionCount() > 0 || fi.getPointIndexDimensionCount() >
0) {
- flags.append(":");
- flags.append(fi.getPointDimensionCount()).append(':');
- flags.append(fi.getPointIndexDimensionCount()).append(':');
- flags.append(fi.getPointNumBytes());
- }
-
- fieldFlags.add("flags", flags.toString());
- try {
- Terms terms = reader.terms(fi.name);
- if (terms != null) {
- fieldFlags.add("docCount", terms.getDocCount());
- fieldFlags.add("termCount", terms.size());
- fieldFlags.add("sumDocFreq", terms.getSumDocFreq());
- fieldFlags.add("sumTotalTermFreq", terms.getSumTotalTermFreq());
- }
- } catch (Exception e) {
- log.debug("Exception retrieving term stats for field {}", fi.name, e);
- }
-
- // probably too much detail?
- // Map<String, String> attributes = fi.attributes();
- // if (!attributes.isEmpty()) {
- // fieldFlags.add("attributes", attributes);
- // }
-
- // check compliance of the index with the current schema
- SchemaField sf = schema.getFieldOrNull(fi.name);
- boolean hasPoints = fi.getPointDimensionCount() > 0 ||
fi.getPointIndexDimensionCount() > 0;
-
- if (sf != null) {
- fieldFlags.add("schemaType", sf.getType().getTypeName());
- SimpleOrderedMap<Object> nonCompliant = new SimpleOrderedMap<>();
- if (sf.hasDocValues()
- && fi.getDocValuesType() == DocValuesType.NONE
- && fi.getIndexOptions() != IndexOptions.NONE) {
- nonCompliant.add(
- "docValues", "schema=" + sf.getType().getUninversionType(sf) + ",
segment=false");
- }
- if (!sf.hasDocValues() && fi.getDocValuesType() != DocValuesType.NONE) {
- nonCompliant.add("docValues", "schema=false, segment=" +
fi.getDocValuesType().toString());
- }
- if (!sf.isPolyField()) { // difficult to find all sub-fields in a
general way
- if (sf.indexed() != ((fi.getIndexOptions() != IndexOptions.NONE) ||
hasPoints)) {
- nonCompliant.add(
- "indexed", "schema=" + sf.indexed() + ", segment=" +
fi.getIndexOptions());
- }
- }
- if (!hasPoints && (sf.omitNorms() != fi.omitsNorms())) {
- nonCompliant.add("omitNorms", "schema=" + sf.omitNorms() + ",
segment=" + fi.omitsNorms());
- }
- if (sf.storeTermVector() != fi.hasVectors()) {
- nonCompliant.add(
- "termVectors", "schema=" + sf.storeTermVector() + ", segment=" +
fi.hasVectors());
- }
- if (sf.storeOffsetsWithPositions()
- != (fi.getIndexOptions() ==
IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)) {
- nonCompliant.add(
- "storeOffsetsWithPositions",
- "schema=" + sf.storeOffsetsWithPositions() + ", segment=" +
fi.getIndexOptions());
- }
-
- if (nonCompliant.size() > 0) {
- nonCompliant.add("schemaField", sf.toString());
- fieldFlags.add("nonCompliant", nonCompliant);
- }
- } else {
- fieldFlags.add("schemaType", "(UNKNOWN)");
- }
- return fieldFlags;
- }
-
- // returns a map of currently running merges, and populates a list of
candidate segments for merge
- private SimpleOrderedMap<Object> getMergeInformation(
- SolrQueryRequest req, SegmentInfos infos, List<String> mergeCandidates)
throws IOException {
- SimpleOrderedMap<Object> result = new SimpleOrderedMap<>();
- RefCounted<IndexWriter> refCounted =
- req.getCore().getSolrCoreState().getIndexWriter(req.getCore());
- try {
- IndexWriter indexWriter = refCounted.get();
- if (indexWriter instanceof SolrIndexWriter) {
- result.addAll(((SolrIndexWriter) indexWriter).getRunningMerges());
- }
- // get chosen merge policy
- MergePolicy mp = indexWriter.getConfig().getMergePolicy();
- // Find merges
- MergeSpecification findMerges = mp.findMerges(MergeTrigger.EXPLICIT,
infos, indexWriter);
- if (findMerges != null && findMerges.merges != null &&
findMerges.merges.size() > 0) {
- for (OneMerge merge : findMerges.merges) {
- // TODO: add merge grouping
- for (SegmentCommitInfo mergeSegmentInfo : merge.segments) {
- mergeCandidates.add(mergeSegmentInfo.info.name);
- }
- }
- }
-
- return result;
- } finally {
- refCounted.decref();
- }
+ rsp.setHttpCaching(false);
}
@Override
@@ -471,4 +71,14 @@ public class SegmentsInfoRequestHandler extends
RequestHandlerBase {
public Name getPermissionName(AuthorizationContext request) {
return Name.METRICS_READ_PERM;
}
+
+ @Override
+ public Boolean registerV2() {
+ return Boolean.TRUE;
+ }
+
+ @Override
+ public Collection<Class<? extends JerseyResource>> getJerseyResources() {
+ return List.of(GetSegmentData.class);
+ }
}
diff --git
a/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatus.java
b/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatus.java
new file mode 100644
index 00000000000..f80b6363071
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatus.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin.api;
+
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+
+import jakarta.inject.Inject;
+import org.apache.solr.client.api.endpoint.CollectionStatusApi;
+import org.apache.solr.client.api.model.CollectionStatusResponse;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.handler.admin.ColStatus;
+import org.apache.solr.jersey.PermissionName;
+import org.apache.solr.jersey.SolrJacksonMapper;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.security.PermissionNameProvider;
+
+/** V2 API implementation for {@link CollectionStatusApi}. */
+public class CollectionStatus extends AdminAPIBase implements
CollectionStatusApi {
+
+ @Inject
+ public CollectionStatus(
+ CoreContainer coreContainer,
+ SolrQueryRequest solrQueryRequest,
+ SolrQueryResponse solrQueryResponse) {
+ super(coreContainer, solrQueryRequest, solrQueryResponse);
+ }
+
+ @Override
+ @PermissionName(PermissionNameProvider.Name.COLL_READ_PERM)
+ public CollectionStatusResponse getCollectionStatus(
+ String collectionName,
+ Boolean coreInfo,
+ Boolean segments,
+ Boolean fieldInfo,
+ Boolean rawSize,
+ Boolean rawSizeSummary,
+ Boolean rawSizeDetails,
+ Float rawSizeSamplingPercent,
+ Boolean sizeInfo)
+ throws Exception {
+ recordCollectionForLogAndTracing(collectionName, solrQueryRequest);
+
+ final var params = new ModifiableSolrParams();
+ params.set(COLLECTION_PROP, collectionName);
+ params.setNonNull(ColStatus.CORE_INFO_PROP, coreInfo);
+ params.setNonNull(ColStatus.SEGMENTS_PROP, segments);
+ params.setNonNull(ColStatus.FIELD_INFO_PROP, fieldInfo);
+ params.setNonNull(ColStatus.RAW_SIZE_PROP, rawSize);
+ params.setNonNull(ColStatus.RAW_SIZE_SUMMARY_PROP, rawSizeSummary);
+ params.setNonNull(ColStatus.RAW_SIZE_DETAILS_PROP, rawSizeDetails);
+ params.setNonNull(ColStatus.RAW_SIZE_SAMPLING_PERCENT_PROP,
rawSizeSamplingPercent);
+ params.setNonNull(ColStatus.SIZE_INFO_PROP, sizeInfo);
+
+ final var nlResponse = new NamedList<>();
+ populateColStatusData(coreContainer, new ZkNodeProps(params), nlResponse);
+
+ // v2 API does not support requesting the status of multiple collections
simultaneously as its
+ // counterpart does, and its response looks slightly different as a
result. Primarily, the
+ // v2 response eschews a level of nesting that necessitated by the
multi-collection nature of
+ // v1. These tweaks are made below before returning.
+ final var colStatusResponse =
+ SolrJacksonMapper.getObjectMapper()
+ .convertValue(nlResponse.get(collectionName),
CollectionStatusResponse.class);
+ colStatusResponse.name = collectionName;
+ return colStatusResponse;
+ }
+
+ // TODO Modify ColStatus to produce a CollectionStatusResponse instead of a
NL
+ public static void populateColStatusData(
+ CoreContainer coreContainer, ZkNodeProps params, NamedList<Object>
colStatusSink) {
+ final var colStatusAssembler =
+ new ColStatus(
+ coreContainer.getSolrClientCache(),
+
coreContainer.getZkController().getZkStateReader().getClusterState(),
+ params);
+ colStatusAssembler.getColStatus(colStatusSink);
+ }
+}
diff --git
a/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatusAPI.java
b/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatusAPI.java
deleted file mode 100644
index 4b7eabe7226..00000000000
---
a/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatusAPI.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler.admin.api;
-
-import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET;
-import static org.apache.solr.common.params.CommonParams.ACTION;
-import static org.apache.solr.common.params.CoreAdminParams.COLLECTION;
-import static org.apache.solr.handler.ClusterAPI.wrapParams;
-import static
org.apache.solr.security.PermissionNameProvider.Name.COLL_READ_PERM;
-
-import java.lang.invoke.MethodHandles;
-import org.apache.solr.api.EndPoint;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.handler.admin.CollectionsHandler;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.SolrQueryResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * V2 API for displaying basic information about a single collection.
- *
- * <p>This API (GET /v2/collections/collectionName) is analogous to the v1
- * /admin/collections?action=CLUSTERSTATUS&collection=collectionName
command.
- */
-public class CollectionStatusAPI {
- private static final Logger log =
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- private final CollectionsHandler collectionsHandler;
-
- public CollectionStatusAPI(CollectionsHandler collectionsHandler) {
- this.collectionsHandler = collectionsHandler;
- }
-
- @EndPoint(
- path = {"/c/{collection}", "/collections/{collection}"},
- method = GET,
- permission = COLL_READ_PERM)
- public void getCollectionStatus(SolrQueryRequest req, SolrQueryResponse rsp)
throws Exception {
- req =
- wrapParams(
- req, // 'req' can have a 'shard' param
- ACTION,
- CollectionParams.CollectionAction.CLUSTERSTATUS.toString(),
- COLLECTION,
- req.getPathTemplateValues().get(ZkStateReader.COLLECTION_PROP));
- collectionsHandler.handleRequestBody(req, rsp);
- }
-}
diff --git
a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
b/solr/core/src/java/org/apache/solr/handler/admin/api/GetSegmentData.java
similarity index 53%
copy from
solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
copy to solr/core/src/java/org/apache/solr/handler/admin/api/GetSegmentData.java
index 93cdf071a1c..ceec55ea33f 100644
---
a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/api/GetSegmentData.java
@@ -14,19 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.solr.handler.admin;
+package org.apache.solr.handler.admin.api;
import static org.apache.lucene.index.IndexOptions.DOCS;
import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS;
import static
org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
-import static org.apache.solr.common.params.CommonParams.NAME;
+import jakarta.inject.Inject;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.stream.Collectors;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
@@ -37,9 +39,8 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafMetaData;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.LiveIndexWriterConfig;
import org.apache.lucene.index.MergePolicy;
-import org.apache.lucene.index.MergePolicy.MergeSpecification;
-import org.apache.lucene.index.MergePolicy.OneMerge;
import org.apache.lucene.index.MergeTrigger;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
@@ -48,121 +49,121 @@ import org.apache.lucene.index.Terms;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.Version;
+import org.apache.solr.api.JerseyResource;
+import org.apache.solr.client.api.endpoint.SegmentsApi;
+import org.apache.solr.client.api.model.GetSegmentDataResponse;
import org.apache.solr.common.luke.FieldFlag;
import org.apache.solr.common.util.Pair;
-import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
-import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.handler.admin.IndexSizeEstimator;
+import org.apache.solr.jersey.PermissionName;
+import org.apache.solr.jersey.SolrJacksonMapper;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.SolrIndexSearcher;
-import org.apache.solr.security.AuthorizationContext;
+import org.apache.solr.security.PermissionNameProvider;
import org.apache.solr.update.SolrIndexWriter;
import org.apache.solr.util.RefCounted;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-/** This handler exposes information about last commit generation segments */
-public class SegmentsInfoRequestHandler extends RequestHandlerBase {
+/**
+ * V2 API implementation for {@link SegmentsApi}
+ *
+ * <p>Equivalent to the v1 /solr/coreName/admin/segments endpoint.
+ */
+public class GetSegmentData extends JerseyResource implements SegmentsApi {
+
private static final Logger log =
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final double GB = 1024.0 * 1024.0 * 1024.0;
- public static final String FIELD_INFO_PARAM = "fieldInfo";
- public static final String CORE_INFO_PARAM = "coreInfo";
- public static final String SIZE_INFO_PARAM = "sizeInfo";
- public static final String RAW_SIZE_PARAM = "rawSize";
- public static final String RAW_SIZE_SUMMARY_PARAM = "rawSizeSummary";
- public static final String RAW_SIZE_DETAILS_PARAM = "rawSizeDetails";
- public static final String RAW_SIZE_SAMPLING_PERCENT_PARAM =
"rawSizeSamplingPercent";
-
- private static final List<String> FI_LEGEND;
-
- static {
- FI_LEGEND =
- Arrays.asList(
- FieldFlag.INDEXED.toString(),
- FieldFlag.DOC_VALUES.toString(),
- "xxx - DocValues type",
- FieldFlag.TERM_VECTOR_STORED.toString(),
- FieldFlag.OMIT_NORMS.toString(),
- FieldFlag.OMIT_TF.toString(),
- FieldFlag.OMIT_POSITIONS.toString(),
- FieldFlag.STORE_OFFSETS_WITH_POSITIONS.toString(),
- "p - field has payloads",
- "s - field uses soft deletes",
- ":x:x:x - point data dim : index dim : num bytes");
+ private static final List<String> FI_LEGEND =
+ Arrays.asList(
+ FieldFlag.INDEXED.toString(),
+ FieldFlag.DOC_VALUES.toString(),
+ "xxx - DocValues type",
+ FieldFlag.TERM_VECTOR_STORED.toString(),
+ FieldFlag.OMIT_NORMS.toString(),
+ FieldFlag.OMIT_TF.toString(),
+ FieldFlag.OMIT_POSITIONS.toString(),
+ FieldFlag.STORE_OFFSETS_WITH_POSITIONS.toString(),
+ "p - field has payloads",
+ "s - field uses soft deletes",
+ ":x:x:x - point data dim : index dim : num bytes");
+
+ protected final SolrCore solrCore;
+ protected final SolrQueryRequest solrQueryRequest;
+ protected final SolrQueryResponse solrQueryResponse;
+
+ @Inject
+ public GetSegmentData(SolrCore solrCore, SolrQueryRequest req,
SolrQueryResponse rsp) {
+ this.solrCore = solrCore;
+ this.solrQueryRequest = req;
+ this.solrQueryResponse = rsp;
}
@Override
- public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp)
throws Exception {
- getSegmentsInfo(req, rsp);
- rsp.setHttpCaching(false);
- }
-
- private static final double GB = 1024.0 * 1024.0 * 1024.0;
-
- private void getSegmentsInfo(SolrQueryRequest req, SolrQueryResponse rsp)
throws Exception {
- boolean withFieldInfo = req.getParams().getBool(FIELD_INFO_PARAM, false);
- boolean withCoreInfo = req.getParams().getBool(CORE_INFO_PARAM, false);
- boolean withSizeInfo = req.getParams().getBool(SIZE_INFO_PARAM, false);
- boolean withRawSizeInfo = req.getParams().getBool(RAW_SIZE_PARAM, false);
- boolean withRawSizeSummary =
req.getParams().getBool(RAW_SIZE_SUMMARY_PARAM, false);
- boolean withRawSizeDetails =
req.getParams().getBool(RAW_SIZE_DETAILS_PARAM, false);
+ @PermissionName(PermissionNameProvider.Name.METRICS_READ_PERM)
+ public GetSegmentDataResponse getSegmentData(
+ Boolean coreInfo,
+ Boolean fieldInfo,
+ Boolean rawSize,
+ Boolean rawSizeSummary,
+ Boolean rawSizeDetails,
+ Float rawSizeSamplingPercent,
+ Boolean sizeInfo)
+ throws Exception {
+ boolean withFieldInfo = Boolean.TRUE.equals(fieldInfo);
+ boolean withCoreInfo = Boolean.TRUE.equals(coreInfo);
+ boolean withSizeInfo = Boolean.TRUE.equals(sizeInfo);
+ boolean withRawSizeInfo = Boolean.TRUE.equals(rawSize);
+ boolean withRawSizeSummary = Boolean.TRUE.equals(rawSizeSummary);
+ boolean withRawSizeDetails = Boolean.TRUE.equals(rawSizeDetails);
if (withRawSizeSummary || withRawSizeDetails) {
withRawSizeInfo = true;
}
- SolrIndexSearcher searcher = req.getSearcher();
+ SolrIndexSearcher searcher = solrQueryRequest.getSearcher();
+ SolrCore core = solrQueryRequest.getCore();
- SegmentInfos infos =
SegmentInfos.readLatestCommit(searcher.getIndexReader().directory());
+ final var response = new GetSegmentDataResponse();
- SimpleOrderedMap<Object> segmentInfos = new SimpleOrderedMap<>();
-
- SolrCore core = req.getCore();
- SimpleOrderedMap<Object> infosInfo = new SimpleOrderedMap<>();
+ SegmentInfos infos =
SegmentInfos.readLatestCommit(searcher.getIndexReader().directory());
+ response.info = new GetSegmentDataResponse.SegmentSummary();
Version minVersion = infos.getMinSegmentLuceneVersion();
if (minVersion != null) {
- infosInfo.add("minSegmentLuceneVersion", minVersion.toString());
+ response.info.minSegmentLuceneVersion = minVersion.toString();
}
Version commitVersion = infos.getCommitLuceneVersion();
if (commitVersion != null) {
- infosInfo.add("commitLuceneVersion", commitVersion.toString());
+ response.info.commitLuceneVersion = commitVersion.toString();
}
- infosInfo.add("numSegments", infos.size());
- infosInfo.add("segmentsFileName", infos.getSegmentsFileName());
- infosInfo.add("totalMaxDoc", infos.totalMaxDoc());
- infosInfo.add("userData", infos.userData);
+ response.info.numSegments = infos.size();
+ response.info.segmentsFileName = infos.getSegmentsFileName();
+ response.info.totalMaxDoc = infos.totalMaxDoc();
+ response.info.userData = infos.userData;
+
if (withCoreInfo) {
- SimpleOrderedMap<Object> coreInfo = new SimpleOrderedMap<>();
- infosInfo.add("core", coreInfo);
- coreInfo.add(
- "startTime", core.getStartTimeStamp().getTime() + "(" +
core.getStartTimeStamp() + ")");
- coreInfo.add("dataDir", core.getDataDir());
- coreInfo.add("indexDir", core.getIndexDir());
- coreInfo.add("sizeInGB", (double) core.getIndexSize() / GB);
+ final var coreSummary = new GetSegmentDataResponse.CoreSummary();
+ response.info.core = coreSummary;
+ coreSummary.startTime =
+ core.getStartTimeStamp().getTime() + "(" + core.getStartTimeStamp()
+ ")";
+ coreSummary.dataDir = core.getDataDir();
+ coreSummary.indexDir = core.getIndexDir();
+ coreSummary.sizeInGB = (double) core.getIndexSize() / GB;
RefCounted<IndexWriter> iwRef =
core.getSolrCoreState().getIndexWriter(core);
if (iwRef != null) {
try {
IndexWriter iw = iwRef.get();
- String iwConfigStr = iw.getConfig().toString();
- SimpleOrderedMap<Object> iwConfig = new SimpleOrderedMap<>();
- // meh ...
- String[] lines = iwConfigStr.split("\\n");
- for (String line : lines) {
- String[] parts = line.split("=");
- if (parts.length < 2) {
- continue;
- }
- iwConfig.add(parts[0], parts[1]);
- }
- coreInfo.add("indexWriterConfig", iwConfig);
+ coreSummary.indexWriterConfig =
convertIndexWriterConfigToResponse(iw.getConfig());
} finally {
iwRef.decref();
}
}
}
- SimpleOrderedMap<Object> segmentInfo;
+
List<SegmentCommitInfo> sortable = new ArrayList<>(infos.asList());
// Order by the number of live docs. The display is logarithmic so it is a
little jumbled
// visually
@@ -170,68 +171,137 @@ public class SegmentsInfoRequestHandler extends
RequestHandlerBase {
(s1, s2) -> (s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc()
- s1.getDelCount()));
List<String> mergeCandidates = new ArrayList<>();
- SimpleOrderedMap<Object> runningMerges = getMergeInformation(req, infos,
mergeCandidates);
+ final var runningMerges = getMergeInformation(solrQueryRequest, infos,
mergeCandidates);
List<LeafReaderContext> leafContexts = searcher.getIndexReader().leaves();
- IndexSchema schema = req.getSchema();
+ IndexSchema schema = solrQueryRequest.getSchema();
+ response.segments = new HashMap<>();
for (SegmentCommitInfo segmentCommitInfo : sortable) {
- segmentInfo =
+ final var singleSegmentData =
getSegmentInfo(segmentCommitInfo, withSizeInfo, withFieldInfo,
leafContexts, schema);
if (mergeCandidates.contains(segmentCommitInfo.info.name)) {
- segmentInfo.add("mergeCandidate", true);
+ singleSegmentData.mergeCandidate = true;
}
- segmentInfos.add((String) segmentInfo.get(NAME), segmentInfo);
+ response.segments.put(singleSegmentData.name, singleSegmentData);
}
- rsp.add("info", infosInfo);
if (runningMerges.size() > 0) {
- rsp.add("runningMerges", runningMerges);
+ response.runningMerges = runningMerges;
}
if (withFieldInfo) {
- rsp.add("fieldInfoLegend", FI_LEGEND);
+ response.fieldInfoLegend = FI_LEGEND;
}
- rsp.add("segments", segmentInfos);
+
if (withRawSizeInfo) {
IndexSizeEstimator estimator =
new IndexSizeEstimator(
searcher.getRawReader(), 20, 100, withRawSizeSummary,
withRawSizeDetails);
- Object samplingPercentVal =
req.getParams().get(RAW_SIZE_SAMPLING_PERCENT_PARAM);
- if (samplingPercentVal != null) {
-
estimator.setSamplingPercent(Float.parseFloat(String.valueOf(samplingPercentVal)));
+ if (rawSizeSamplingPercent != null) {
+ estimator.setSamplingPercent(rawSizeSamplingPercent);
}
IndexSizeEstimator.Estimate estimate = estimator.estimate();
- SimpleOrderedMap<Object> estimateMap = new SimpleOrderedMap<>();
+ final var rawSizeResponse = new GetSegmentDataResponse.RawSize();
// make the units more user-friendly
- estimateMap.add(IndexSizeEstimator.FIELDS_BY_SIZE,
estimate.getHumanReadableFieldsBySize());
- estimateMap.add(IndexSizeEstimator.TYPES_BY_SIZE,
estimate.getHumanReadableTypesBySize());
+ rawSizeResponse.fieldsBySize = estimate.getHumanReadableFieldsBySize();
+ rawSizeResponse.typesBySize = estimate.getHumanReadableTypesBySize();
if (estimate.getSummary() != null) {
- estimateMap.add(IndexSizeEstimator.SUMMARY, estimate.getSummary());
+ rawSizeResponse.summary = estimate.getSummary();
}
if (estimate.getDetails() != null) {
- estimateMap.add(IndexSizeEstimator.DETAILS, estimate.getDetails());
+ rawSizeResponse.details = estimate.getDetails();
}
- rsp.add("rawSize", estimateMap);
+ response.rawSize = rawSizeResponse;
}
+
+ return response;
}
- private SimpleOrderedMap<Object> getSegmentInfo(
+ /**
+ * Converts Lucene's IndexWriter configuration object into a response type
fit for serialization
+ *
+ * <p>Based on {@link LiveIndexWriterConfig#toString()} for legacy reasons.
+ *
+ * @param iwConfig the Lucene configuration object to convert
+ */
+ private GetSegmentDataResponse.IndexWriterConfigSummary
convertIndexWriterConfigToResponse(
+ LiveIndexWriterConfig iwConfig) {
+ final var iwConfigResponse = new
GetSegmentDataResponse.IndexWriterConfigSummary();
+ iwConfigResponse.analyzer =
+ iwConfig.getAnalyzer() != null ?
iwConfig.getAnalyzer().getClass().getName() : "null";
+ iwConfigResponse.ramBufferSizeMB = iwConfig.getRAMBufferSizeMB();
+ iwConfigResponse.maxBufferedDocs = iwConfig.getMaxBufferedDocs();
+ iwConfigResponse.mergedSegmentWarmer =
String.valueOf(iwConfig.getMergedSegmentWarmer());
+ iwConfigResponse.delPolicy =
iwConfig.getIndexDeletionPolicy().getClass().getName();
+ iwConfigResponse.commit = String.valueOf(iwConfig.getIndexCommit());
+ iwConfigResponse.openMode = String.valueOf(iwConfig.getOpenMode());
+ iwConfigResponse.similarity =
iwConfig.getSimilarity().getClass().getName();
+ iwConfigResponse.mergeScheduler =
String.valueOf(iwConfig.getMergeScheduler());
+ iwConfigResponse.codec = String.valueOf(iwConfig.getCodec());
+ iwConfigResponse.infoStream =
iwConfig.getInfoStream().getClass().getName();
+ iwConfigResponse.mergePolicy = String.valueOf(iwConfig.getMergePolicy());
+ iwConfigResponse.readerPooling = iwConfig.getReaderPooling();
+ iwConfigResponse.perThreadHardLimitMB =
iwConfig.getRAMPerThreadHardLimitMB();
+ iwConfigResponse.useCompoundFile = iwConfig.getUseCompoundFile();
+ iwConfigResponse.commitOnClose = iwConfig.getCommitOnClose();
+ iwConfigResponse.indexSort = String.valueOf(iwConfig.getIndexSort());
+ iwConfigResponse.checkPendingFlushOnUpdate =
iwConfig.isCheckPendingFlushOnUpdate();
+ iwConfigResponse.softDeletesField = iwConfig.getSoftDeletesField();
+ iwConfigResponse.maxFullFlushMergeWaitMillis =
iwConfig.getMaxFullFlushMergeWaitMillis();
+ iwConfigResponse.leafSorter = String.valueOf(iwConfig.getLeafSorter());
+ iwConfigResponse.eventListener =
String.valueOf(iwConfig.getIndexWriterEventListener());
+ iwConfigResponse.parentField = iwConfig.getParentField();
+ return iwConfigResponse;
+ }
+
+ // returns a map of currently running merges, and populates a list of
candidate segments for merge
+ private Map<String, Object> getMergeInformation(
+ SolrQueryRequest req, SegmentInfos infos, List<String> mergeCandidates)
throws IOException {
+ final var result = new HashMap<String, Object>();
+ RefCounted<IndexWriter> refCounted =
+ req.getCore().getSolrCoreState().getIndexWriter(req.getCore());
+ try {
+ IndexWriter indexWriter = refCounted.get();
+ if (indexWriter instanceof SolrIndexWriter) {
+ result.putAll(((SolrIndexWriter) indexWriter).getRunningMerges());
+ }
+ // get chosen merge policy
+ MergePolicy mp = indexWriter.getConfig().getMergePolicy();
+ // Find merges
+ MergePolicy.MergeSpecification findMerges =
+ mp.findMerges(MergeTrigger.EXPLICIT, infos, indexWriter);
+ if (findMerges != null && findMerges.merges != null &&
findMerges.merges.size() > 0) {
+ for (MergePolicy.OneMerge merge : findMerges.merges) {
+ // TODO: add merge grouping
+ for (SegmentCommitInfo mergeSegmentInfo : merge.segments) {
+ mergeCandidates.add(mergeSegmentInfo.info.name);
+ }
+ }
+ }
+
+ return result;
+ } finally {
+ refCounted.decref();
+ }
+ }
+
+ private GetSegmentDataResponse.SingleSegmentData getSegmentInfo(
SegmentCommitInfo segmentCommitInfo,
boolean withSizeInfo,
boolean withFieldInfos,
List<LeafReaderContext> leafContexts,
IndexSchema schema)
throws IOException {
- SimpleOrderedMap<Object> segmentInfoMap = new SimpleOrderedMap<>();
-
- segmentInfoMap.add(NAME, segmentCommitInfo.info.name);
- segmentInfoMap.add("delCount", segmentCommitInfo.getDelCount());
- segmentInfoMap.add("softDelCount", segmentCommitInfo.getSoftDelCount());
- segmentInfoMap.add("hasFieldUpdates", segmentCommitInfo.hasFieldUpdates());
- segmentInfoMap.add("sizeInBytes", segmentCommitInfo.sizeInBytes());
- segmentInfoMap.add("size", segmentCommitInfo.info.maxDoc());
+ final var segmentInfo = new GetSegmentDataResponse.SingleSegmentData();
+ segmentInfo.name = segmentCommitInfo.info.name;
+ segmentInfo.delCount = segmentCommitInfo.getDelCount();
+ segmentInfo.softDelCount = segmentCommitInfo.getSoftDelCount();
+ segmentInfo.hasFieldUpdates = segmentCommitInfo.hasFieldUpdates();
+ segmentInfo.sizeInBytes = segmentCommitInfo.sizeInBytes();
+ segmentInfo.size = segmentCommitInfo.info.maxDoc();
Long timestamp =
Long.parseLong(segmentCommitInfo.info.getDiagnostics().get("timestamp"));
- segmentInfoMap.add("age", new Date(timestamp));
- segmentInfoMap.add("source",
segmentCommitInfo.info.getDiagnostics().get("source"));
- segmentInfoMap.add("version",
segmentCommitInfo.info.getVersion().toString());
+ segmentInfo.age = new Date(timestamp);
+ segmentInfo.source = segmentCommitInfo.info.getDiagnostics().get("source");
+ segmentInfo.version = segmentCommitInfo.info.getVersion().toString();
+
// don't open a new SegmentReader - try to find the right one from the
leaf contexts
SegmentReader seg = null;
for (LeafReaderContext lrc : leafContexts) {
@@ -247,18 +317,23 @@ public class SegmentsInfoRequestHandler extends
RequestHandlerBase {
if (seg != null) {
LeafMetaData metaData = seg.getMetaData();
if (metaData != null) {
- segmentInfoMap.add("createdVersionMajor",
metaData.getCreatedVersionMajor());
- segmentInfoMap.add("minVersion", metaData.getMinVersion().toString());
+ segmentInfo.createdVersionMajor = metaData.getCreatedVersionMajor();
+ segmentInfo.minVersion = metaData.getMinVersion().toString();
if (metaData.getSort() != null) {
- segmentInfoMap.add("sort", metaData.getSort().toString());
+ segmentInfo.sort = metaData.getSort().toString();
}
}
}
+
if (!segmentCommitInfo.info.getDiagnostics().isEmpty()) {
- segmentInfoMap.add("diagnostics",
segmentCommitInfo.info.getDiagnostics());
+ segmentInfo.diagnostics =
+ SolrJacksonMapper.getObjectMapper()
+ .convertValue(
+ segmentCommitInfo.info.getDiagnostics(),
+ GetSegmentDataResponse.SegmentDiagnosticInfo.class);
}
if (!segmentCommitInfo.info.getAttributes().isEmpty()) {
- segmentInfoMap.add("attributes", segmentCommitInfo.info.getAttributes());
+ segmentInfo.attributes = segmentCommitInfo.info.getAttributes();
}
if (withSizeInfo) {
Directory dir = segmentCommitInfo.info.dir;
@@ -285,34 +360,35 @@ public class SegmentsInfoRequestHandler extends
RequestHandlerBase {
})
.collect(Collectors.toList());
if (!files.isEmpty()) {
- SimpleOrderedMap<Object> topFiles = new SimpleOrderedMap<>();
+ final var topFiles = new HashMap<String, String>();
for (int i = 0; i < Math.min(files.size(), 5); i++) {
Pair<String, Long> p = files.get(i);
- topFiles.add(p.first(),
RamUsageEstimator.humanReadableUnits(p.second()));
+ topFiles.put(p.first(),
RamUsageEstimator.humanReadableUnits(p.second()));
}
- segmentInfoMap.add("largestFiles", topFiles);
+ segmentInfo.largestFilesByName = topFiles;
}
}
+
if (withFieldInfos) {
if (seg == null) {
log.debug(
"Skipping segment info - not available as a SegmentReader: {}",
segmentCommitInfo);
} else {
FieldInfos fis = seg.getFieldInfos();
- SimpleOrderedMap<Object> fields = new SimpleOrderedMap<>();
+ final var fields = new HashMap<String,
GetSegmentDataResponse.SegmentSingleFieldInfo>();
for (FieldInfo fi : fis) {
- fields.add(fi.name, getFieldInfo(seg, fi, schema));
+ fields.put(fi.name, getFieldInfo(seg, fi, schema));
}
- segmentInfoMap.add("fields", fields);
+ segmentInfo.fields = fields;
}
}
- return segmentInfoMap;
+ return segmentInfo;
}
- private SimpleOrderedMap<Object> getFieldInfo(
+ private GetSegmentDataResponse.SegmentSingleFieldInfo getFieldInfo(
SegmentReader reader, FieldInfo fi, IndexSchema schema) {
- SimpleOrderedMap<Object> fieldFlags = new SimpleOrderedMap<>();
+ final var responseFieldInfo = new
GetSegmentDataResponse.SegmentSingleFieldInfo();
StringBuilder flags = new StringBuilder();
IndexOptions opts = fi.getIndexOptions();
flags.append((opts != IndexOptions.NONE) ?
FieldFlag.INDEXED.getAbbreviation() : '-');
@@ -362,113 +438,64 @@ public class SegmentsInfoRequestHandler extends
RequestHandlerBase {
flags.append(fi.getPointNumBytes());
}
- fieldFlags.add("flags", flags.toString());
+ responseFieldInfo.flags = flags.toString();
+
try {
Terms terms = reader.terms(fi.name);
if (terms != null) {
- fieldFlags.add("docCount", terms.getDocCount());
- fieldFlags.add("termCount", terms.size());
- fieldFlags.add("sumDocFreq", terms.getSumDocFreq());
- fieldFlags.add("sumTotalTermFreq", terms.getSumTotalTermFreq());
+ responseFieldInfo.docCount = terms.getDocCount();
+ responseFieldInfo.termCount = terms.size();
+ responseFieldInfo.sumDocFreq = terms.getSumDocFreq();
+ responseFieldInfo.sumTotalTermFreq = terms.getSumTotalTermFreq();
}
} catch (Exception e) {
log.debug("Exception retrieving term stats for field {}", fi.name, e);
}
- // probably too much detail?
- // Map<String, String> attributes = fi.attributes();
- // if (!attributes.isEmpty()) {
- // fieldFlags.add("attributes", attributes);
- // }
-
// check compliance of the index with the current schema
SchemaField sf = schema.getFieldOrNull(fi.name);
boolean hasPoints = fi.getPointDimensionCount() > 0 ||
fi.getPointIndexDimensionCount() > 0;
if (sf != null) {
- fieldFlags.add("schemaType", sf.getType().getTypeName());
- SimpleOrderedMap<Object> nonCompliant = new SimpleOrderedMap<>();
+ responseFieldInfo.schemaType = sf.getType().getTypeName();
+ final var nonCompliant = new HashMap<String, String>();
if (sf.hasDocValues()
&& fi.getDocValuesType() == DocValuesType.NONE
&& fi.getIndexOptions() != IndexOptions.NONE) {
- nonCompliant.add(
+ nonCompliant.put(
"docValues", "schema=" + sf.getType().getUninversionType(sf) + ",
segment=false");
}
if (!sf.hasDocValues() && fi.getDocValuesType() != DocValuesType.NONE) {
- nonCompliant.add("docValues", "schema=false, segment=" +
fi.getDocValuesType().toString());
+ nonCompliant.put("docValues", "schema=false, segment=" +
fi.getDocValuesType().toString());
}
if (!sf.isPolyField()) { // difficult to find all sub-fields in a
general way
if (sf.indexed() != ((fi.getIndexOptions() != IndexOptions.NONE) ||
hasPoints)) {
- nonCompliant.add(
+ nonCompliant.put(
"indexed", "schema=" + sf.indexed() + ", segment=" +
fi.getIndexOptions());
}
}
if (!hasPoints && (sf.omitNorms() != fi.omitsNorms())) {
- nonCompliant.add("omitNorms", "schema=" + sf.omitNorms() + ",
segment=" + fi.omitsNorms());
+ nonCompliant.put("omitNorms", "schema=" + sf.omitNorms() + ",
segment=" + fi.omitsNorms());
}
if (sf.storeTermVector() != fi.hasVectors()) {
- nonCompliant.add(
+ nonCompliant.put(
"termVectors", "schema=" + sf.storeTermVector() + ", segment=" +
fi.hasVectors());
}
if (sf.storeOffsetsWithPositions()
!= (fi.getIndexOptions() ==
IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)) {
- nonCompliant.add(
+ nonCompliant.put(
"storeOffsetsWithPositions",
"schema=" + sf.storeOffsetsWithPositions() + ", segment=" +
fi.getIndexOptions());
}
if (nonCompliant.size() > 0) {
- nonCompliant.add("schemaField", sf.toString());
- fieldFlags.add("nonCompliant", nonCompliant);
+ nonCompliant.put("schemaField", sf.toString());
+ responseFieldInfo.nonCompliant = nonCompliant;
}
} else {
- fieldFlags.add("schemaType", "(UNKNOWN)");
- }
- return fieldFlags;
- }
-
- // returns a map of currently running merges, and populates a list of
candidate segments for merge
- private SimpleOrderedMap<Object> getMergeInformation(
- SolrQueryRequest req, SegmentInfos infos, List<String> mergeCandidates)
throws IOException {
- SimpleOrderedMap<Object> result = new SimpleOrderedMap<>();
- RefCounted<IndexWriter> refCounted =
- req.getCore().getSolrCoreState().getIndexWriter(req.getCore());
- try {
- IndexWriter indexWriter = refCounted.get();
- if (indexWriter instanceof SolrIndexWriter) {
- result.addAll(((SolrIndexWriter) indexWriter).getRunningMerges());
- }
- // get chosen merge policy
- MergePolicy mp = indexWriter.getConfig().getMergePolicy();
- // Find merges
- MergeSpecification findMerges = mp.findMerges(MergeTrigger.EXPLICIT,
infos, indexWriter);
- if (findMerges != null && findMerges.merges != null &&
findMerges.merges.size() > 0) {
- for (OneMerge merge : findMerges.merges) {
- // TODO: add merge grouping
- for (SegmentCommitInfo mergeSegmentInfo : merge.segments) {
- mergeCandidates.add(mergeSegmentInfo.info.name);
- }
- }
- }
-
- return result;
- } finally {
- refCounted.decref();
+ responseFieldInfo.schemaType = "(UNKNOWN)";
}
- }
-
- @Override
- public String getDescription() {
- return "Lucene segments info.";
- }
- @Override
- public Category getCategory() {
- return Category.ADMIN;
- }
-
- @Override
- public Name getPermissionName(AuthorizationContext request) {
- return Name.METRICS_READ_PERM;
+ return responseFieldInfo;
}
}
diff --git a/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java
b/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java
index 9a96b34afc0..22f492abf92 100644
--- a/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java
+++ b/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java
@@ -91,6 +91,9 @@ public class V2ApiUtils {
}
public static String getMediaTypeFromWtParam(SolrParams params, String
defaultMediaType) {
+ if (params == null) {
+ return defaultMediaType;
+ }
final String wtParam = params.get(WT);
if (StrUtils.isBlank(wtParam)) return defaultMediaType;
diff --git
a/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java
b/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java
index 1110880529c..3760bfc4590 100644
--- a/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java
+++ b/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java
@@ -63,6 +63,7 @@ public class CatchAllExceptionMapper implements
ExceptionMapper<Exception> {
// success/failure for AuditLogging, and other logic.
final SolrQueryResponse solrQueryResponse =
(SolrQueryResponse)
containerRequestContext.getProperty(SOLR_QUERY_RESPONSE);
+
final SolrQueryRequest solrQueryRequest =
(SolrQueryRequest)
containerRequestContext.getProperty(SOLR_QUERY_REQUEST);
if (exception instanceof WebApplicationException wae) {
diff --git
a/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java
b/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java
index e5a7f7150cc..44c08bff03e 100644
--- a/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java
+++ b/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java
@@ -63,9 +63,9 @@ public class MediaTypeOverridingFilter implements
ContainerResponseFilter {
final SolrQueryRequest solrQueryRequest =
(SolrQueryRequest) requestContext.getProperty(SOLR_QUERY_REQUEST);
- final String mediaType =
- V2ApiUtils.getMediaTypeFromWtParam(
- solrQueryRequest.getParams(), MediaType.APPLICATION_JSON);
+ // TODO Is it valid for SQRequest to be null?
+ final var params = (solrQueryRequest != null) ?
solrQueryRequest.getParams() : null;
+ final String mediaType = V2ApiUtils.getMediaTypeFromWtParam(params,
MediaType.APPLICATION_JSON);
if (mediaType != null) {
responseContext.getHeaders().putSingle(CONTENT_TYPE, mediaType);
}
diff --git
a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
index 0fe1b755d62..5239deeaeac 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
@@ -21,6 +21,9 @@ import static
org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS;
import static org.apache.solr.common.cloud.ZkStateReader.NUM_SHARDS_PROP;
import static org.apache.solr.common.params.CollectionAdminParams.COLLECTION;
import static org.apache.solr.common.params.CollectionAdminParams.DEFAULTS;
+import static org.hamcrest.Matchers.emptyString;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
@@ -29,6 +32,7 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
@@ -39,11 +43,14 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.Version;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CollectionsApi;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.CoreStatus;
import org.apache.solr.client.solrj.request.V2Request;
@@ -569,14 +576,7 @@ public class CollectionsAPISolrJTest extends
SolrCloudTestCase {
fail("Timed out waiting for cluster property value");
}
- @Test
- public void testColStatus() throws Exception {
- String collectionName = getSaferTestName();
- CollectionAdminRequest.createCollection(collectionName, "conf2", 2, 2)
- .process(cluster.getSolrClient());
-
- cluster.waitForActiveCollection(collectionName, 2, 4);
-
+ private void indexSomeDocs(String collectionName) throws
SolrServerException, IOException {
SolrClient client = cluster.getSolrClient();
byte[] binData = collectionName.getBytes(StandardCharsets.UTF_8);
// index some docs
@@ -602,13 +602,97 @@ public class CollectionsAPISolrJTest extends
SolrCloudTestCase {
client.add(collectionName, doc);
}
client.commit(collectionName);
+ }
+
+ private void assertRspPathNull(SolrResponse rsp, String... pathSegments) {
+ assertNull(Utils.getObjectByPath(rsp.getResponse(), false,
Arrays.asList(pathSegments)));
+ }
+
+ private void assertRspPathNotNull(SolrResponse rsp, String... pathSegments) {
+ assertNotNull(Utils.getObjectByPath(rsp.getResponse(), false,
Arrays.asList(pathSegments)));
+ }
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testColStatus() throws Exception {
+ String collectionName = getSaferTestName();
+ CollectionAdminRequest.createCollection(collectionName, "conf2", 2, 2)
+ .process(cluster.getSolrClient());
+
+ cluster.waitForActiveCollection(collectionName, 2, 4);
+ indexSomeDocs(collectionName);
+
+ // Returns basic info if no additional flags are set
CollectionAdminRequest.ColStatus req =
CollectionAdminRequest.collectionStatus(collectionName);
+ CollectionAdminResponse rsp = req.process(cluster.getSolrClient());
+ assertEquals(0, rsp.getStatus());
+ assertNotNull(rsp.getResponse().get(collectionName));
+ assertNotNull(rsp.getResponse().findRecursive(collectionName,
"properties"));
+ final var collPropMap =
+ (Map<String, Object>) rsp.getResponse().findRecursive(collectionName,
"properties");
+ assertEquals("conf2", collPropMap.get("configName"));
+ assertEquals(2L, collPropMap.get("nrtReplicas"));
+ assertEquals("0", collPropMap.get("tlogReplicas"));
+ assertEquals("0", collPropMap.get("pullReplicas"));
+ assertEquals(
+ 2, ((NamedList<Object>)
rsp.getResponse().findRecursive(collectionName, "shards")).size());
+ assertNotNull(rsp.getResponse().findRecursive(collectionName, "shards",
"shard1", "leader"));
+ // Ensure more advanced info is not returned
+ assertNull(
+ rsp.getResponse().findRecursive(collectionName, "shards", "shard1",
"leader", "segInfos"));
+
+ // Returns segment metadata iff requested
+ req = CollectionAdminRequest.collectionStatus(collectionName);
+ req.setWithSegments(true);
+ rsp = req.process(cluster.getSolrClient());
+ assertEquals(0, rsp.getStatus());
+ assertNotNull(rsp.getResponse().get(collectionName));
+ assertRspPathNotNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos",
"segments", "_0");
+ // Ensure field, size, etc. information isn't returned if only segment
data was requested
+ assertRspPathNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos",
"segments", "_0", "fields");
+ assertRspPathNull(
+ rsp,
+ collectionName,
+ "shards",
+ "shard1",
+ "leader",
+ "segInfos",
+ "segments",
+ "_0",
+ "largestFiles");
+
+ // Returns segment metadata and file-size info iff requested
+ // (Note that 'sizeInfo=true' should implicitly enable segments=true)
+ req = CollectionAdminRequest.collectionStatus(collectionName);
+ req.setWithSizeInfo(true);
+ rsp = req.process(cluster.getSolrClient());
+ assertEquals(0, rsp.getStatus());
+ assertRspPathNotNull(rsp, collectionName);
+ assertRspPathNotNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos",
"segments", "_0");
+ assertRspPathNotNull(
+ rsp,
+ collectionName,
+ "shards",
+ "shard1",
+ "leader",
+ "segInfos",
+ "segments",
+ "_0",
+ "largestFiles");
+ // Ensure field, etc. information isn't returned if only segment+size data
was requested
+ assertRspPathNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos",
"segments", "_0", "fields");
+
+ // Set all flags and ensure everything is returned as expected
+ req = CollectionAdminRequest.collectionStatus(collectionName);
+ req.setWithSegments(true);
req.setWithFieldInfo(true);
req.setWithCoreInfo(true);
- req.setWithSegments(true);
req.setWithSizeInfo(true);
- CollectionAdminResponse rsp = req.process(cluster.getSolrClient());
+ rsp = req.process(cluster.getSolrClient());
assertEquals(0, rsp.getStatus());
@SuppressWarnings({"unchecked"})
List<Object> nonCompliant =
@@ -616,14 +700,22 @@ public class CollectionsAPISolrJTest extends
SolrCloudTestCase {
assertEquals(nonCompliant.toString(), 1, nonCompliant.size());
assertTrue(nonCompliant.toString(), nonCompliant.contains("(NONE)"));
@SuppressWarnings({"unchecked"})
- NamedList<Object> segInfos =
- (NamedList<Object>)
- rsp.getResponse()
- .findRecursive(collectionName, "shards", "shard1", "leader",
"segInfos");
- assertNotNull(Utils.toJSONString(rsp), segInfos.findRecursive("info",
"core", "startTime"));
- assertNotNull(Utils.toJSONString(rsp), segInfos.get("fieldInfoLegend"));
+ final var segInfos =
+ (Map<String, Object>)
+ Utils.getObjectByPath(
+ rsp.getResponse(),
+ false,
+ List.of(collectionName, "shards", "shard1", "leader",
"segInfos"));
assertNotNull(
- Utils.toJSONString(rsp), segInfos.findRecursive("segments", "_0",
"fields", "id", "flags"));
+ Utils.toJSONString(rsp),
+ Utils.getObjectByPath(segInfos, false, List.of("info", "core",
"startTime")));
+ assertNotNull(
+ Utils.toJSONString(rsp),
+ Utils.getObjectByPath(segInfos, false, List.of("fieldInfoLegend")));
+ assertNotNull(
+ Utils.toJSONString(rsp),
+ Utils.getObjectByPath(segInfos, false, List.of("segments", "_0",
"fields", "id", "flags")));
+
// test for replicas not active - SOLR-13882
DocCollection coll =
cluster.getSolrClient().getClusterState().getCollection(collectionName);
Replica firstReplica =
coll.getSlice("shard1").getReplicas().iterator().next();
@@ -637,7 +729,10 @@ public class CollectionsAPISolrJTest extends
SolrCloudTestCase {
assertEquals(0, rsp.getStatus());
Number down =
(Number)
- rsp.getResponse().findRecursive(collectionName, "shards",
"shard1", "replicas", "down");
+ Utils.getObjectByPath(
+ rsp.getResponse(),
+ false,
+ List.of(collectionName, "shards", "shard1", "replicas",
"down"));
assertTrue(
"should be some down replicas, but there were none in shard1:" + rsp,
down.intValue() > 0);
@@ -652,10 +747,8 @@ public class CollectionsAPISolrJTest extends
SolrCloudTestCase {
req = CollectionAdminRequest.collectionStatus(implicitColl);
rsp = req.process(cluster.getSolrClient());
assertNotNull(rsp.getResponse().get(implicitColl));
- assertNotNull(
- rsp.toString(), rsp.getResponse().findRecursive(implicitColl,
"shards", "shardA"));
- assertNotNull(
- rsp.toString(), rsp.getResponse().findRecursive(implicitColl,
"shards", "shardB"));
+ assertRspPathNotNull(rsp, implicitColl, "shards", "shardA");
+ assertRspPathNotNull(rsp, implicitColl, "shards", "shardB");
}
@Test
@@ -697,6 +790,69 @@ public class CollectionsAPISolrJTest extends
SolrCloudTestCase {
assertNotNull(rsp.getResponse().get(collectionNames[0]));
}
+ /**
+ * Unit test for the v2 API: GET /api/collections/$collName
+ *
+ * <p>Uses the OAS-generated SolrRequest/SolrResponse API binding.
+ */
+ @Test
+ public void testV2BasicCollectionStatus() throws Exception {
+ final String simpleCollName = "simpleCollection";
+ CollectionAdminRequest.createCollection(simpleCollName, "conf2", 2, 1, 1,
1)
+ .process(cluster.getSolrClient());
+ cluster.waitForActiveCollection(simpleCollName, 2, 6);
+ indexSomeDocs(simpleCollName);
+
+ final var simpleResponse =
+ new CollectionsApi.GetCollectionStatus(simpleCollName)
+ .process(cluster.getSolrClient())
+ .getParsed();
+ assertEquals(simpleCollName, simpleResponse.name);
+ assertEquals(2, simpleResponse.shards.size());
+ assertEquals(Integer.valueOf(2), simpleResponse.activeShards);
+ assertEquals(Integer.valueOf(0), simpleResponse.inactiveShards);
+ assertEquals(Integer.valueOf(1), simpleResponse.properties.nrtReplicas);
+ assertEquals(Integer.valueOf(1),
simpleResponse.properties.replicationFactor);
+ assertEquals(Integer.valueOf(1), simpleResponse.properties.pullReplicas);
+ assertEquals(Integer.valueOf(1), simpleResponse.properties.tlogReplicas);
+ assertNotNull(simpleResponse.shards.get("shard1").leader);
+ assertNull(simpleResponse.shards.get("shard1").leader.segInfos);
+
+ // Ensure segment data present when request sets 'segments=true' flag
+ final var segmentDataRequest = new
CollectionsApi.GetCollectionStatus(simpleCollName);
+ segmentDataRequest.setSegments(true);
+ final var segmentDataResponse =
segmentDataRequest.process(cluster.getSolrClient()).getParsed();
+ var segmentData = segmentDataResponse.shards.get("shard1").leader.segInfos;
+ assertNotNull(segmentData);
+ assertTrue(segmentData.info.numSegments > 0); // Expect at least one
segment
+ assertEquals(segmentData.info.numSegments.intValue(),
segmentData.segments.size());
+ assertEquals(Version.LATEST.toString(),
segmentData.info.commitLuceneVersion);
+ // Ensure field, size, etc. data not provided
+ assertNull(segmentData.segments.get("_0").fields);
+ assertNull(segmentData.segments.get("_0").largestFilesByName);
+
+ // Ensure file-size data present when request sets sizeInfo flag
+ final var segmentFileSizeRequest = new
CollectionsApi.GetCollectionStatus(simpleCollName);
+ segmentFileSizeRequest.setSizeInfo(true);
+ final var segmentFileSizeResponse =
+ segmentFileSizeRequest.process(cluster.getSolrClient()).getParsed();
+ segmentData = segmentFileSizeResponse.shards.get("shard1").leader.segInfos;
+ assertNotNull(segmentData);
+ final var largeFileList =
segmentData.segments.get("_0").largestFilesByName;
+ assertNotNull(largeFileList);
+ // Hard to assert what the largest index files should be, but:
+ // - there should be at least 1 entry and...
+ // - all keys/values should be non-empty
+ assertTrue(largeFileList.size() > 0);
+ largeFileList.forEach(
+ (fileName, size) -> {
+ assertThat(fileName, is(not(emptyString())));
+ assertThat(size, is(not(emptyString())));
+ });
+ // Ensure field, etc. data not provided
+ assertNull(segmentData.segments.get("_0").fields);
+ }
+
private static final int NUM_DOCS = 10;
@Test
diff --git
a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java
b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java
index 5523ad23c15..54aa6394902 100644
---
a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java
+++
b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java
@@ -19,6 +19,7 @@ package org.apache.solr.handler.admin;
import java.lang.invoke.MethodHandles;
import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@@ -32,6 +33,7 @@ import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.tests.util.TestUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.RamUsageEstimator;
+import org.apache.solr.client.api.model.CollectionStatusResponse;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -39,10 +41,11 @@ import
org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.common.util.Utils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.embedded.JettySolrRunner;
+import org.apache.solr.jersey.SolrJacksonMapper;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.RefCounted;
import org.apache.solr.util.TimeOut;
@@ -177,56 +180,42 @@ public class IndexSizeEstimatorTest extends
SolrCloudTestCase {
assertEquals(0, sampledRsp.getStatus());
for (int i : Arrays.asList(1, 2)) {
@SuppressWarnings({"unchecked"})
- NamedList<Object> segInfos =
- (NamedList<Object>)
- rsp.getResponse()
- .findRecursive(collection, "shards", "shard" + i, "leader",
"segInfos");
- @SuppressWarnings({"unchecked"})
- NamedList<Object> rawSize = (NamedList<Object>) segInfos.get("rawSize");
+ final var segInfosRaw =
+ Utils.getObjectByPath(
+ rsp.getResponse(),
+ false,
+ List.of(collection, "shards", "shard" + i, "leader",
"segInfos"));
+ final var segInfos =
+ SolrJacksonMapper.getObjectMapper()
+ .convertValue(segInfosRaw,
CollectionStatusResponse.SegmentInfo.class);
+
+ final var rawSize = segInfos.rawSize;
assertNotNull("rawSize missing", rawSize);
- @SuppressWarnings({"unchecked"})
- Map<String, Object> rawSizeMap = rawSize.asMap(10);
- @SuppressWarnings({"unchecked"})
- Map<String, Object> fieldsBySize =
- (Map<String, Object>)
rawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE);
+ Map<String, String> fieldsBySize = rawSize.fieldsBySize;
assertNotNull("fieldsBySize missing", fieldsBySize);
assertEquals(fieldsBySize.toString(), fields.size(),
fieldsBySize.size());
fields.forEach(field -> assertNotNull("missing field " + field,
fieldsBySize.get(field)));
- @SuppressWarnings({"unchecked"})
- Map<String, Object> typesBySize =
- (Map<String, Object>)
rawSizeMap.get(IndexSizeEstimator.TYPES_BY_SIZE);
+ Map<String, String> typesBySize = rawSize.typesBySize;
assertNotNull("typesBySize missing", typesBySize);
assertTrue("expected at least 8 types: " + typesBySize,
typesBySize.size() >= 8);
- @SuppressWarnings({"unchecked"})
- Map<String, Object> summary =
- (Map<String, Object>) rawSizeMap.get(IndexSizeEstimator.SUMMARY);
+ Map<String, Object> summary = rawSize.summary;
assertNotNull("summary missing", summary);
assertEquals(summary.toString(), fields.size(), summary.size());
fields.forEach(field -> assertNotNull("missing field " + field,
summary.get(field)));
@SuppressWarnings({"unchecked"})
- Map<String, Object> details =
- (Map<String, Object>) rawSizeMap.get(IndexSizeEstimator.DETAILS);
+ Map<String, Object> details = (Map<String, Object>) rawSize.details;
assertNotNull("details missing", summary);
assertEquals(details.keySet().toString(), 6, details.size());
// compare with sampled
- @SuppressWarnings({"unchecked"})
- NamedList<Object> sampledRawSize =
- (NamedList<Object>)
- rsp.getResponse()
- .findRecursive(
- collection, "shards", "shard" + i, "leader", "segInfos",
"rawSize");
+ final var sampledRawSize = rawSize;
assertNotNull("sampled rawSize missing", sampledRawSize);
- @SuppressWarnings({"unchecked"})
- Map<String, Object> sampledRawSizeMap = rawSize.asMap(10);
- @SuppressWarnings({"unchecked"})
- Map<String, Object> sampledFieldsBySize =
- (Map<String, Object>)
sampledRawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE);
+ Map<String, String> sampledFieldsBySize = sampledRawSize.fieldsBySize;
assertNotNull("sampled fieldsBySize missing", sampledFieldsBySize);
fieldsBySize.forEach(
(k, v) -> {
- double size = fromHumanReadableUnits((String) v);
- double sampledSize = fromHumanReadableUnits((String)
sampledFieldsBySize.get(k));
+ double size = fromHumanReadableUnits(v);
+ double sampledSize =
fromHumanReadableUnits(sampledFieldsBySize.get(k));
double delta = size * 0.5;
assertEquals("sampled size of " + k + " is wildly off", size,
sampledSize, delta);
});
diff --git
a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
index 265968b23c8..8600ed8236b 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
@@ -114,7 +114,6 @@ public class TestApiFramework extends SolrTestCaseJ4 {
methodNames.add(rsp.getValues()._getStr("/spec[1]/methods[0]", null));
methodNames.add(rsp.getValues()._getStr("/spec[2]/methods[0]", null));
assertTrue(methodNames.contains("POST"));
- assertTrue(methodNames.contains("GET"));
methodNames = new HashSet<>();
diff --git
a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java
b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java
index 98d000773c7..1a61b6516fd 100644
---
a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java
+++
b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java
@@ -21,9 +21,7 @@ import static
org.apache.solr.common.params.CollectionAdminParams.COLLECTION;
import static org.apache.solr.common.params.CollectionAdminParams.COLL_CONF;
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
import static org.apache.solr.common.params.CommonParams.ACTION;
-import static org.apache.solr.common.params.CoreAdminParams.SHARD;
-import java.util.Map;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.SolrParams;
@@ -56,7 +54,6 @@ public class V2CollectionAPIMappingTest extends
V2ApiMappingTest<CollectionsHand
apiBag.registerObject(new ModifyCollectionAPI(collectionsHandler));
apiBag.registerObject(new MoveReplicaAPI(collectionsHandler));
apiBag.registerObject(new RebalanceLeadersAPI(collectionsHandler));
- apiBag.registerObject(new CollectionStatusAPI(collectionsHandler));
}
@Override
@@ -69,17 +66,6 @@ public class V2CollectionAPIMappingTest extends
V2ApiMappingTest<CollectionsHand
return false;
}
- @Test
- public void testGetCollectionStatus() throws Exception {
- final SolrParams v1Params =
- captureConvertedV1Params(
- "/collections/collName", "GET", Map.of(SHARD, new String[]
{"shard2"}));
-
- assertEquals(CollectionParams.CollectionAction.CLUSTERSTATUS.toString(),
v1Params.get(ACTION));
- assertEquals("collName", v1Params.get(COLLECTION));
- assertEquals("shard2", v1Params.get(SHARD));
- }
-
@Test
public void testModifyCollectionAllProperties() throws Exception {
final SolrParams v1Params =
diff --git
a/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc
b/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc
index f4811158ef7..6f89932a3a6 100644
---
a/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc
+++
b/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc
@@ -1050,11 +1050,9 @@
http://localhost:8983/solr/admin/collections?action=COLSTATUS&collection=techpro
V2 API::
+
====
-The closest V2 API is this one, but doesn't support all the features of the V1
equivalent.
-
[source,bash]
----
-curl -X GET http://localhost:8983/api/collections/techproducts_v2
+curl -X GET
"http://localhost:8983/api/collections/techproducts_v2?coreInfo=true&segments=true&fieldInfo=true&sizeInfo=true"
----
====
======
@@ -1072,7 +1070,8 @@ Such incompatibilities may result from incompatible
schema changes or after migr
|===
+
Collection name.
-If missing then it means all collections.
+Provided as a query-parameter in v1 requests, and as a path-parameter in v2.
+If missing then information is returned about all collections (supported by v1
requests only).
`coreInfo`::
+