This is an automated email from the ASF dual-hosted git repository.
epugh pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/solr.git
The following commit(s) were added to refs/heads/main by this push:
new 242c1fc4e8a SOLR-17852: Migrate schema designer to filestoreapi part
deux (#3031)
242c1fc4e8a is described below
commit 242c1fc4e8a1402fd31e7c5c837ec381ec741a89
Author: Eric Pugh <[email protected]>
AuthorDate: Sun Aug 10 17:04:44 2025 -0400
SOLR-17852: Migrate schema designer to filestoreapi part deux (#3031)
Now using the FileStore API for persisting working documents used in the
Schema Designer.
---
solr/CHANGES.txt | 2 +
.../apache/solr/filestore/DistribFileStore.java | 16 ++---
.../java/org/apache/solr/filestore/FileStore.java | 21 ++++--
.../solr/handler/designer/SchemaDesignerAPI.java | 23 +++---
.../designer/SchemaDesignerConfigSetHelper.java | 84 ++++++++++++----------
.../handler/designer/SchemaDesignerConstants.java | 1 -
.../handler/designer/TestSchemaDesignerAPI.java | 4 --
.../TestSchemaDesignerConfigSetHelper.java | 27 ++++---
.../java/org/apache/solr/common/util/Utils.java | 6 ++
9 files changed, 105 insertions(+), 79 deletions(-)
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index bd258d80b44..251e4ad5f12 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -49,6 +49,8 @@ Improvements
* SOLR-17739: Uploading a config sets with forbidden file types will produces
a Exception. Previously it was just ignoring those files and logging a WARN.
(Abhishek Umarjikar via Eric Pugh)
+* SOLR-17852: Migrate Schema Designer to use FileStore API instead of
BlobHandler for persisting working data. (Eric Pugh)
+
Optimizations
---------------------
* SOLR-17568: The CLI bin/solr export tool now contacts the appropriate nodes
directly for data instead of proxying through one.
diff --git a/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java
b/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java
index 99b5b90e14c..a64ba7a0341 100644
--- a/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java
+++ b/solr/core/src/java/org/apache/solr/filestore/DistribFileStore.java
@@ -344,18 +344,19 @@ public class DistribFileStore implements FileStore {
private void distribute(FileInfo info) {
try {
String dirName = info.path.substring(0, info.path.lastIndexOf('/'));
+
coreContainer
.getZkController()
.getZkClient()
- .makePath(ZK_PACKAGESTORE + dirName, false, true);
- coreContainer
- .getZkController()
- .getZkClient()
- .create(
+ .makePath(
ZK_PACKAGESTORE + info.path,
info.getDetails().getMetaData().sha512.getBytes(UTF_8),
CreateMode.PERSISTENT,
- true);
+ null,
+ false,
+ true,
+ 0);
+
} catch (Exception e) {
throw new SolrException(SERVER_ERROR, "Unable to create an entry in ZK",
e);
}
@@ -452,7 +453,7 @@ public class DistribFileStore implements FileStore {
}
@Override
- public void get(String path, Consumer<FileEntry> consumer, boolean
fetchmissing)
+ public void get(String path, Consumer<FileEntry> consumer, boolean
fetchMissing)
throws IOException {
Path file = getRealPath(path);
String simpleName = file.getFileName().toString();
@@ -573,7 +574,6 @@ public class DistribFileStore implements FileStore {
@SuppressWarnings({"rawtypes"})
List myFiles = list(path, s -> true);
for (Object f : l) {
- // TODO: https://issues.apache.org/jira/browse/SOLR-15426
// l should be a List<String> and myFiles should be a
List<FileDetails>, so contains
// should always return false!
if (!myFiles.contains(f)) {
diff --git a/solr/core/src/java/org/apache/solr/filestore/FileStore.java
b/solr/core/src/java/org/apache/solr/filestore/FileStore.java
index acdf979496d..6a268b18bae 100644
--- a/solr/core/src/java/org/apache/solr/filestore/FileStore.java
+++ b/solr/core/src/java/org/apache/solr/filestore/FileStore.java
@@ -29,7 +29,11 @@ import java.util.function.Predicate;
import org.apache.solr.filestore.FileStoreAPI.MetaData;
import org.apache.zookeeper.server.ByteBufferInputStream;
-/** The interface to be implemented by any package store provider *
@lucene.experimental */
+/**
+ * The interface to be implemented by any package store provider
+ *
+ * @lucene.experimental
+ */
public interface FileStore {
/**
@@ -38,8 +42,13 @@ public interface FileStore {
*/
void put(FileEntry fileEntry) throws IOException;
- /** read file content from a given path */
- void get(String path, Consumer<FileEntry> filecontent, boolean getMissing)
throws IOException;
+ /**
+ * Read file content from a given path.
+ *
+ * <p>TODO: Is fetchMissing actually used? I don't see it being used, but
the IDE doesn't flag it
+ * not being used!
+ */
+ void get(String path, Consumer<FileEntry> consumer, boolean fetchMissing)
throws IOException;
/** Fetch a resource from another node internal API */
boolean fetch(String path, String from);
@@ -59,7 +68,7 @@ public interface FileStore {
Map<String, byte[]> getKeys() throws IOException;
/**
- * Refresh the files in a path. May be this node does not have all files
+ * Refresh the files in a path. Maybe this node does not have all files?
*
* @param path the path to be refreshed.
*/
@@ -71,12 +80,12 @@ public interface FileStore {
/** Delete file from local file system */
void deleteLocal(String path);
- public class FileEntry {
+ class FileEntry {
final ByteBuffer buf;
final MetaData meta;
final String path;
- FileEntry(ByteBuffer buf, MetaData meta, String path) {
+ public FileEntry(ByteBuffer buf, MetaData meta, String path) {
this.buf = buf;
this.meta = meta;
this.path = path;
diff --git
a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java
b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java
index 29b93142ec8..d5a1a8b8d11 100644
--- a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java
+++ b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java
@@ -86,8 +86,7 @@ import org.slf4j.LoggerFactory;
/** All V2 APIs have a prefix of /api/schema-designer/ */
public class SchemaDesignerAPI implements SchemaDesignerConstants {
- private static final Set<String> excludeConfigSetNames =
- new HashSet<>(Arrays.asList(DEFAULT_CONFIGSET_NAME, BLOB_STORE_ID));
+ private static final Set<String> excludeConfigSetNames =
Set.of(DEFAULT_CONFIGSET_NAME);
private static final Logger log =
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -172,7 +171,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
// don't fail if loading sample docs fails
try {
- responseMap.put("numDocs",
configSetHelper.getStoredSampleDocs(configSet).size());
+ responseMap.put("numDocs",
configSetHelper.retrieveSampleDocs(configSet).size());
} catch (Exception exc) {
log.warn("Failed to load sample docs from blob store for {}", configSet,
exc);
}
@@ -282,7 +281,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
ManagedIndexSchema schema = loadLatestSchema(mutableId);
Map<Object, Throwable> errorsDuringIndexing = null;
SolrException solrExc = null;
- List<SolrInputDocument> docs =
configSetHelper.getStoredSampleDocs(configSet);
+ List<SolrInputDocument> docs =
configSetHelper.retrieveSampleDocs(configSet);
String[] analysisErrorHolder = new String[1];
if (!docs.isEmpty()) {
String idField = schema.getUniqueKeyField().getName();
@@ -318,7 +317,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
final String idField = getRequiredParam(UNIQUE_KEY_FIELD_PARAM, req);
String docId = req.getParams().get(DOC_ID_PARAM);
- final List<SolrInputDocument> docs =
configSetHelper.getStoredSampleDocs(configSet);
+ final List<SolrInputDocument> docs =
configSetHelper.retrieveSampleDocs(configSet);
String textValue = null;
if (StrUtils.isNullOrEmpty(docId)) {
// no doc ID from client ... find the first doc with a non-empty string
value for fieldName
@@ -429,7 +428,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
ManagedIndexSchema schema = loadLatestSchema(mutableId);
Map<String, Object> response =
- buildResponse(configSet, schema, null,
configSetHelper.getStoredSampleDocs(configSet));
+ buildResponse(configSet, schema, null,
configSetHelper.retrieveSampleDocs(configSet));
response.put(action, objectName);
rsp.getValues().addAll(response);
}
@@ -468,7 +467,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
// re-index the docs if no error to this point
final ManagedIndexSchema schema = loadLatestSchema(mutableId);
- List<SolrInputDocument> docs =
configSetHelper.getStoredSampleDocs(configSet);
+ List<SolrInputDocument> docs =
configSetHelper.retrieveSampleDocs(configSet);
Map<Object, Throwable> errorsDuringIndexing = null;
String[] analysisErrorHolder = new String[1];
if (solrExc == null && !docs.isEmpty()) {
@@ -571,7 +570,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
int rf = req.getParams().getInt("replicationFactor", 1);
configSetHelper.createCollection(newCollection, configSet, numShards,
rf);
if (req.getParams().getBool(INDEX_TO_COLLECTION_PARAM, false)) {
- List<SolrInputDocument> docs =
configSetHelper.getStoredSampleDocs(configSet);
+ List<SolrInputDocument> docs =
configSetHelper.retrieveSampleDocs(configSet);
if (!docs.isEmpty()) {
ManagedIndexSchema schema = loadLatestSchema(mutableId);
errorsDuringIndexing =
@@ -773,7 +772,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
mutableId,
version,
currentVersion);
- List<SolrInputDocument> docs =
configSetHelper.getStoredSampleDocs(configSet);
+ List<SolrInputDocument> docs =
configSetHelper.retrieveSampleDocs(configSet);
ManagedIndexSchema schema = loadLatestSchema(mutableId);
errorsDuringIndexing =
indexSampleDocsWithRebuildOnAnalysisError(
@@ -829,7 +828,7 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
if (!docs.isEmpty()) {
// user posted in some docs, if there are already docs stored in the
blob store, then add
// these to the existing set
- List<SolrInputDocument> stored =
configSetHelper.getStoredSampleDocs(configSet);
+ List<SolrInputDocument> stored =
configSetHelper.retrieveSampleDocs(configSet);
if (!stored.isEmpty()) {
// keep the docs in the request as newest
ManagedIndexSchema latestSchema =
loadLatestSchema(getMutableId(configSet));
@@ -838,14 +837,14 @@ public class SchemaDesignerAPI implements
SchemaDesignerConstants {
latestSchema.getUniqueKeyField().getName(), stored,
MAX_SAMPLE_DOCS);
}
- // store in the blob store so that we always have access to these docs
+ // store in the Filestore so that we always have access to these docs
configSetHelper.storeSampleDocs(configSet, docs);
}
}
if (docs == null || docs.isEmpty()) {
// no sample docs in the request ... find in blob store (or fail if no
docs previously stored)
- docs = configSetHelper.getStoredSampleDocs(configSet);
+ docs = configSetHelper.retrieveSampleDocs(configSet);
// no docs? but if this schema has already been published, it's OK, we
can skip the docs part
if (docs.isEmpty() && !configExists(configSet)) {
diff --git
a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
index 296f3966b9a..3def33a6e8c 100644
---
a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
+++
b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
@@ -27,9 +27,11 @@ import static
org.apache.solr.schema.IndexSchema.ROOT_FIELD_NAME;
import static
org.apache.solr.schema.ManagedIndexSchemaFactory.DEFAULT_MANAGED_SCHEMA_RESOURCE_NAME;
import java.io.ByteArrayOutputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
@@ -49,6 +51,7 @@ import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
@@ -59,8 +62,6 @@ import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.InputStreamResponseParser;
-import org.apache.solr.client.solrj.impl.JavaBinResponseParser;
import org.apache.solr.client.solrj.impl.JsonMapResponseParser;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.GenericSolrRequest;
@@ -78,13 +79,16 @@ import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkMaintenanceUtils;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.filestore.ClusterFileStore;
+import org.apache.solr.filestore.DistribFileStore;
+import org.apache.solr.filestore.FileStore;
+import org.apache.solr.filestore.FileStoreAPI;
import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.schema.CopyField;
import org.apache.solr.schema.FieldType;
@@ -446,7 +450,7 @@ class SchemaDesignerConfigSetHelper implements
SchemaDesignerConstants {
protected void validateMultiValuedChange(String configSet, SchemaField
field, Boolean multiValued)
throws IOException {
- List<SolrInputDocument> docs = getStoredSampleDocs(configSet);
+ List<SolrInputDocument> docs = retrieveSampleDocs(configSet);
if (!docs.isEmpty()) {
boolean isMV = schemaSuggester.isMultiValued(field.getName(), docs);
if (isMV && !multiValued) {
@@ -466,44 +470,58 @@ class SchemaDesignerConfigSetHelper implements
SchemaDesignerConstants {
SolrException.ErrorCode.BAD_REQUEST,
"Cannot change type of the _version_ field; it must be a plong.");
}
- List<SolrInputDocument> docs = getStoredSampleDocs(configSet);
+ List<SolrInputDocument> docs = retrieveSampleDocs(configSet);
if (!docs.isEmpty()) {
schemaSuggester.validateTypeChange(field, toType, docs);
}
}
+ String getSampleDocsPathFromConfigSet(String configSet) {
+ return "schemadesigner" + "/" + configSet + "_sampledocs.javabin";
+ }
+
void deleteStoredSampleDocs(String configSet) {
- try {
- cloudClient().deleteByQuery(BLOB_STORE_ID, "id:" + configSet +
"_sample/*", 10);
- } catch (IOException | SolrServerException | SolrException exc) {
- final String excStr = exc.toString();
- log.warn("Failed to delete sample docs from blob store for {} due to:
{}", configSet, excStr);
- }
+ String path = getSampleDocsPathFromConfigSet(configSet);
+ // why do I have to do this in two stages?
+ DistribFileStore.deleteZKFileEntry(cc.getZkController().getZkClient(),
path);
+ cc.getFileStore().delete(path);
}
@SuppressWarnings("unchecked")
- List<SolrInputDocument> getStoredSampleDocs(final String configSet) throws
IOException {
- var request = new GenericSolrRequest(SolrRequest.METHOD.GET, "/blob/" +
configSet + "_sample");
- request.setRequiresCollection(true);
- request.setResponseParser(new InputStreamResponseParser("filestream"));
- InputStream inputStream = null;
+ List<SolrInputDocument> retrieveSampleDocs(final String configSet) throws
IOException {
+ AtomicReference<List<SolrInputDocument>> docs = new
AtomicReference<>(List.of());
+ String path = getSampleDocsPathFromConfigSet(configSet);
+
try {
- var resp = request.process(cloudClient(), BLOB_STORE_ID).getResponse();
- inputStream = (InputStream) resp.get("stream");
- var bytes = inputStream.readAllBytes();
- if (bytes.length > 0) {
- return (List<SolrInputDocument>) Utils.fromJavabin(bytes);
- } else return Collections.emptyList();
- } catch (SolrServerException e) {
- throw new IOException("Failed to lookup stored docs for " + configSet +
" due to: " + e);
- } finally {
- IOUtils.closeQuietly(inputStream);
+ cc.getFileStore()
+ .get(
+ path,
+ entry -> {
+ try (InputStream is = entry.getInputStream()) {
+ docs.set((List<SolrInputDocument>) Utils.fromJavabin(is));
+ } catch (IOException e) {
+ log.error("Error reading file content at path {}", path, e);
+ }
+ },
+ true);
+ } catch (FileNotFoundException e) {
+ log.info("File at path {} not found.", path);
}
+
+ return docs.get();
}
void storeSampleDocs(final String configSet, List<SolrInputDocument> docs)
throws IOException {
docs.forEach(d -> d.removeField(VERSION_FIELD)); // remove _version_ field
before storing ...
- postDataToBlobStore(cloudClient(), configSet + "_sample", readAllBytes(()
-> toJavabin(docs)));
+ storeSampleDocs(configSet, readAllBytes(() -> toJavabin(docs)));
+ }
+
+ protected void storeSampleDocs(String configSet, byte[] bytes) throws
IOException {
+ String path = getSampleDocsPathFromConfigSet(configSet);
+
+ FileStoreAPI.MetaData meta = ClusterFileStore._createJsonMetaData(bytes,
null);
+
+ cc.getFileStore().put(new FileStore.FileEntry(ByteBuffer.wrap(bytes),
meta, path));
}
/** Gets the stream, reads all the bytes, closes the stream. */
@@ -513,18 +531,6 @@ class SchemaDesignerConfigSetHelper implements
SchemaDesignerConstants {
}
}
- protected void postDataToBlobStore(CloudSolrClient cloudClient, String
blobName, byte[] bytes)
- throws IOException {
- var request = new GenericSolrRequest(SolrRequest.METHOD.POST, "/blob/" +
blobName);
- request.withContent(bytes, JavaBinResponseParser.JAVABIN_CONTENT_TYPE);
- request.setRequiresCollection(true);
- try {
- request.process(cloudClient, BLOB_STORE_ID);
- } catch (SolrServerException e) {
- throw new SolrException(ErrorCode.SERVER_ERROR, e);
- }
- }
-
private String getBaseUrl(final String collection) {
String baseUrl = null;
try {
diff --git
a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConstants.java
b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConstants.java
index b6afdc66152..0ad93d90d27 100644
---
a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConstants.java
+++
b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConstants.java
@@ -40,7 +40,6 @@ public interface SchemaDesignerConstants {
String DESIGNER_KEY = "_designer.";
String LANGUAGES_PARAM = "languages";
String CONFIGOVERLAY_JSON = "configoverlay.json";
- String BLOB_STORE_ID = ".system";
String UPDATE_ERROR = "updateError";
String ANALYSIS_ERROR = "analysisError";
String ERROR_DETAILS = "errorDetails";
diff --git
a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java
b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java
index eea602082da..29779e8732f 100644
---
a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java
+++
b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java
@@ -35,7 +35,6 @@ import java.util.Map;
import java.util.Optional;
import java.util.stream.Stream;
import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.SolrDocumentList;
@@ -72,9 +71,6 @@ public class TestSchemaDesignerAPI extends SolrCloudTestCase
implements SchemaDe
configureCluster(1)
.addConfig(DEFAULT_CONFIGSET_NAME, ExternalPaths.DEFAULT_CONFIGSET)
.configure();
- // SchemaDesignerAPI depends on the blob store ".system" collection
existing.
- CollectionAdminRequest.createCollection(BLOB_STORE_ID, 1,
1).process(cluster.getSolrClient());
- cluster.waitForActiveCollection(BLOB_STORE_ID, 1, 1);
}
@AfterClass
diff --git
a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java
b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java
index 9961afa6d07..851fe64f892 100644
---
a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java
+++
b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java
@@ -31,13 +31,13 @@ import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrConfig;
+import org.apache.solr.filestore.FileStore;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.ManagedIndexSchema;
import org.apache.solr.schema.SchemaField;
@@ -59,9 +59,6 @@ public class TestSchemaDesignerConfigSetHelper extends
SolrCloudTestCase
configureCluster(1)
.addConfig(DEFAULT_CONFIGSET_NAME, ExternalPaths.DEFAULT_CONFIGSET)
.configure();
- // SchemaDesignerConfigSetHelper depends on the blob store
- CollectionAdminRequest.createCollection(BLOB_STORE_ID, 1,
1).process(cluster.getSolrClient());
- cluster.waitForActiveCollection(BLOB_STORE_ID, 1, 1);
}
@AfterClass
@@ -253,14 +250,26 @@ public class TestSchemaDesignerConfigSetHelper extends
SolrCloudTestCase
doc.setField("pages", 809);
doc.setField("published_year", 1989);
- helper.postDataToBlobStore(
- cluster.getSolrClient(),
- configSet + "_sample",
- SchemaDesignerConfigSetHelper.readAllBytes(() ->
toJavabin(List.of(doc))));
+ helper.storeSampleDocs(
+ configSet, SchemaDesignerConfigSetHelper.readAllBytes(() ->
toJavabin(List.of(doc))));
- List<SolrInputDocument> docs = helper.getStoredSampleDocs(configSet);
+ List<SolrInputDocument> docs = helper.retrieveSampleDocs(configSet);
assertTrue(docs != null && docs.size() == 1);
assertEquals("1", docs.get(0).getFieldValue("id"));
+
+ helper.deleteStoredSampleDocs(configSet);
+
+ String path = helper.getSampleDocsPathFromConfigSet(configSet);
+ FileStore.FileType type = cc.getFileStore().getType(path, true);
+ assertEquals(FileStore.FileType.NOFILE, type);
+ }
+
+ @Test
+ public void testRetrieveNonExistentDocsReturnsEmptyDocList() throws
Exception {
+ String configSet = "testRetrieveNonExistentDocsReturnsEmptyDocList";
+ List<SolrInputDocument> docs = helper.retrieveSampleDocs(configSet);
+ assertNotNull(docs);
+ assertTrue(docs.isEmpty());
}
@Test
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
index 31f15190073..ee7823eb6d6 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
@@ -216,6 +216,12 @@ public class Utils {
}
}
+ public static Object fromJavabin(InputStream is) throws IOException {
+ try (JavaBinCodec jbc = new JavaBinCodec()) {
+ return jbc.unmarshal(is);
+ }
+ }
+
public static Collection<?> getDeepCopy(Collection<?> c, int maxDepth,
boolean mutable) {
return getDeepCopy(c, maxDepth, mutable, false);
}