This is an automated email from the ASF dual-hosted git repository.
mkataria pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
The following commit(s) were added to refs/heads/trunk by this push:
new b03171acb6 OAK-11714: Add jmx to expose inferenceConfig (#2290)
b03171acb6 is described below
commit b03171acb632f8fbe6414215d54c1030a3a43db6
Author: Mohit Kataria <[email protected]>
AuthorDate: Sat May 10 12:00:04 2025 +0530
OAK-11714: Add jmx to expose inferenceConfig (#2290)
* OAK-11714: Add jmx to expose inferenceConfig
* OAK-11714: added more tests
---
.../jackrabbit/oak/api/jmx/InferenceMBean.java | 40 +++
.../index/elastic/ElasticIndexProviderService.java | 13 +
.../elastic/query/inference/EnricherStatus.java | 26 ++
.../elastic/query/inference/InferenceConfig.java | 59 +++-
.../query/inference/InferenceHeaderPayload.java | 8 +-
.../query/inference/InferenceIndexConfig.java | 27 +-
.../query/inference/InferenceMBeanImpl.java | 49 +++
.../query/inference/InferenceModelConfig.java | 30 +-
.../elastic/query/inference/InferencePayload.java | 21 ++
.../InferenceConfigSerializationTest.java | 365 +++++++++++++++++++++
.../query/inference/InferenceConfigTest.java | 311 ++++++++++++++++--
11 files changed, 887 insertions(+), 62 deletions(-)
diff --git
a/oak-api/src/main/java/org/apache/jackrabbit/oak/api/jmx/InferenceMBean.java
b/oak-api/src/main/java/org/apache/jackrabbit/oak/api/jmx/InferenceMBean.java
new file mode 100644
index 0000000000..2690b6b64d
--- /dev/null
+++
b/oak-api/src/main/java/org/apache/jackrabbit/oak/api/jmx/InferenceMBean.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.api.jmx;
+
+import org.osgi.annotation.versioning.ProviderType;
+
+/**
+ * An MBean that provides the inference configuration.
+ */
+@ProviderType
+public interface InferenceMBean {
+
+ String TYPE = "Inference";
+
+ /**
+ * Get the inference configuration as a Json string.
+ */
+ String getConfigJson();
+
+ /**
+ * Get the inference configuration as a Json string.
+ */
+ String getConfigNodeStateJson();
+}
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/ElasticIndexProviderService.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/ElasticIndexProviderService.java
index 43565495dd..a53af22e10 100644
---
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/ElasticIndexProviderService.java
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/ElasticIndexProviderService.java
@@ -16,6 +16,7 @@
*/
package org.apache.jackrabbit.oak.plugins.index.elastic;
+import org.apache.jackrabbit.oak.api.jmx.InferenceMBean;
import org.apache.jackrabbit.oak.commons.IOUtils;
import org.apache.jackrabbit.oak.osgi.OsgiWhiteboard;
import org.apache.jackrabbit.oak.plugins.index.AsyncIndexInfoService;
@@ -25,6 +26,7 @@ import
org.apache.jackrabbit.oak.plugins.index.elastic.index.ElasticIndexEditorP
import
org.apache.jackrabbit.oak.plugins.index.elastic.query.ElasticIndexProvider;
import
org.apache.jackrabbit.oak.plugins.index.elastic.query.inference.InferenceConfig;
import
org.apache.jackrabbit.oak.plugins.index.elastic.query.inference.InferenceConstants;
+import
org.apache.jackrabbit.oak.plugins.index.elastic.query.inference.InferenceMBeanImpl;
import
org.apache.jackrabbit.oak.plugins.index.fulltext.PreExtractedTextProvider;
import org.apache.jackrabbit.oak.plugins.index.search.ExtractedTextCache;
import org.apache.jackrabbit.oak.query.QueryEngineSettings;
@@ -209,6 +211,13 @@ public class ElasticIndexProviderService {
ElasticIndexMBean.TYPE,
"Elastic Index statistics"));
+ InferenceMBeanImpl inferenceMBean = new InferenceMBeanImpl();
+ oakRegs.add(registerMBean(whiteboard,
+ InferenceMBean.class,
+ inferenceMBean,
+ InferenceMBean.TYPE,
+ "Inference"));
+
LOG.info("Registering Index and Editor providers with connection {}",
elasticConnection);
registerIndexProvider(bundleContext);
@@ -284,4 +293,8 @@ public class ElasticIndexProviderService {
.withApiKeys(apiKeyId, apiSecretId)
.build();
}
+
+ public InferenceConfig getInferenceConfig() {
+ return InferenceConfig.getInstance();
+ }
}
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/EnricherStatus.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/EnricherStatus.java
index 678b3daaa7..21d2c312f7 100644
---
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/EnricherStatus.java
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/EnricherStatus.java
@@ -18,9 +18,11 @@
*/
package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.slf4j.Logger;
@@ -85,4 +87,28 @@ public class EnricherStatus {
return enricherStatusJsonMapping;
}
+ @Override
+ public String toString() {
+ JsopBuilder builder = new JsopBuilder().object();
+ // Add the mapping data
+
builder.key(InferenceConstants.ENRICHER_STATUS_MAPPING).value(enricherStatusJsonMapping);
+
+ // Add enricher status data
+ builder.key(InferenceConstants.ENRICHER_STATUS_DATA).object();
+ for (Map.Entry<String, Object> entry : enricherStatusData.entrySet()) {
+ builder.key(entry.getKey());
+ if (entry.getValue() instanceof String) {
+ builder.value((String) entry.getValue());
+ } else {
+ try {
+
builder.encodedValue(MAPPER.writeValueAsString(entry.getValue()));
+ } catch (JsonProcessingException e) {
+ LOG.warn("Failed to serialize value for key {}: {}",
entry.getKey(), e.getMessage());
+ builder.value(entry.getValue().toString());
+ }
+ }
+ }
+ builder.endObject().endObject();
+ return JsopBuilder.prettyPrint(builder.toString());
+ }
}
\ No newline at end of file
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfig.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfig.java
index 34730b7904..167dea0f67 100644
---
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfig.java
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfig.java
@@ -18,7 +18,10 @@
*/
package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
+import org.apache.jackrabbit.oak.json.JsonUtils;
import org.apache.jackrabbit.oak.plugins.index.IndexName;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
@@ -85,7 +88,7 @@ public class InferenceConfig {
reInitialize(nodeStore, inferenceConfigPath, isInferenceEnabled, true);
}
- public static void reInitialize(){
+ public static void reInitialize() {
reInitialize(INSTANCE.nodeStore, INSTANCE.inferenceConfigPath,
INSTANCE.isInferenceEnabled, true);
}
@@ -101,7 +104,7 @@ public class InferenceConfig {
}
}
- private static void reInitialize(NodeStore nodeStore, String
inferenceConfigPath, boolean isInferenceEnabled, boolean
updateActiveInferenceConfig){
+ private static void reInitialize(NodeStore nodeStore, String
inferenceConfigPath, boolean isInferenceEnabled, boolean
updateActiveInferenceConfig) {
lock.writeLock().lock();
try {
if (updateActiveInferenceConfig) {
@@ -156,11 +159,11 @@ public class InferenceConfig {
InferenceIndexConfig inferenceIndexConfig;
IndexName indexNameObject;
Function<String, InferenceIndexConfig> getInferenceIndexConfig
= (iName) ->
- getIndexConfigs().getOrDefault(iName,
InferenceIndexConfig.NOOP);
+ getIndexConfigs().getOrDefault(iName,
InferenceIndexConfig.NOOP);
if (!InferenceIndexConfig.NOOP.equals(inferenceIndexConfig =
getInferenceIndexConfig.apply(indexName))) {
LOG.debug("InferenceIndexConfig for indexName: {} is: {}",
indexName, inferenceIndexConfig);
} else if ((indexNameObject = IndexName.parse(indexName)) !=
null && indexNameObject.isLegal()
- && indexNameObject.getBaseName() != null
+ && indexNameObject.getBaseName() != null
) {
LOG.debug("InferenceIndexConfig is using baseIndexName {}
and is: {}", indexNameObject.getBaseName(), inferenceIndexConfig);
inferenceIndexConfig =
getInferenceIndexConfig.apply(indexNameObject.getBaseName());
@@ -175,7 +178,7 @@ public class InferenceConfig {
public @NotNull InferenceModelConfig getInferenceModelConfig(String
inferenceIndexName, String inferenceModelConfigName) {
lock.readLock().lock();
try {
- if (inferenceModelConfigName == null){
+ if (inferenceModelConfigName == null) {
return InferenceModelConfig.NOOP;
} else if (inferenceModelConfigName.isEmpty()) {
return
getInferenceIndexConfig(inferenceIndexName).getDefaultEnabledModel();
@@ -188,7 +191,7 @@ public class InferenceConfig {
}
- public Map<String, Object> getEnricherStatus(){
+ public Map<String, Object> getEnricherStatus() {
lock.readLock().lock();
try {
return INSTANCE.enricherStatus.getEnricherStatus();
@@ -197,7 +200,7 @@ public class InferenceConfig {
}
}
- public String getEnricherStatusMapping(){
+ public String getEnricherStatusMapping() {
lock.readLock().lock();
try {
return INSTANCE.enricherStatus.getEnricherStatusJsonMapping();
@@ -206,11 +209,32 @@ public class InferenceConfig {
}
}
+ public String getInferenceConfigNodeState() {
+ if (nodeStore != null) {
+ NodeState ns = nodeStore.getRoot();
+ for (String elem : PathUtils.elements(inferenceConfigPath)) {
+ ns = ns.getChildNode(elem);
+ }
+ if (!ns.exists()) {
+ LOG.warn("InferenceConfig: NodeState does not exist for path:
" + inferenceConfigPath);
+ return "{}";
+ }
+ try {
+ return JsonUtils.nodeStateToJson(ns, 5);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ LOG.warn("InferenceConfig: NodeStore is null");
+ return "{}";
+ }
+ }
+
private @NotNull Map<String, InferenceIndexConfig> getIndexConfigs() {
lock.readLock().lock();
try {
return isEnabled() ?
- Collections.unmodifiableMap(indexConfigs) : Map.of();
+ Collections.unmodifiableMap(indexConfigs) : Map.of();
} finally {
lock.readLock().unlock();
}
@@ -241,4 +265,23 @@ public class InferenceConfig {
return UUID.randomUUID().toString();
}
+ @Override
+ public String toString() {
+ JsopBuilder builder = new JsopBuilder().object().
+ key("type").value(TYPE).
+ key("enabled").value(enabled).
+ key("inferenceConfigPath").value(inferenceConfigPath).
+ key("currentInferenceConfig").value(currentInferenceConfig).
+ key("activeInferenceConfig").value(activeInferenceConfig).
+ key("isInferenceEnabled").value(isInferenceEnabled).
+ key("indexConfigs").object();
+ // Serialize each index config
+ for (Map.Entry<String, InferenceIndexConfig> e :
indexConfigs.entrySet()) {
+ builder.key(e.getKey()).encodedValue(e.getValue().toString());
+ }
+ builder.endObject();
+ // Serialize enricherStatus
+
builder.key(":enrich").encodedValue(enricherStatus.toString()).endObject();
+ return JsopBuilder.prettyPrint(builder.toString());
+ }
}
\ No newline at end of file
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceHeaderPayload.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceHeaderPayload.java
index 53c387e20d..36e5e8c618 100644
---
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceHeaderPayload.java
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceHeaderPayload.java
@@ -18,6 +18,7 @@
*/
package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import org.apache.jackrabbit.oak.json.JsonUtils;
import
org.apache.jackrabbit.oak.plugins.index.elastic.util.EnvironmentVariableProcessorUtil;
import org.apache.jackrabbit.oak.spi.state.NodeState;
@@ -64,7 +65,12 @@ public class InferenceHeaderPayload {
@Override
public String toString() {
- return inferenceHeaderPayloadMap.toString();
+ JsopBuilder builder = new JsopBuilder().object();
+ for (Map.Entry<String, String> entry :
inferenceHeaderPayloadMap.entrySet()) {
+ builder.key(entry.getKey()).value(entry.getValue());
+ }
+ builder.endObject();
+ return JsopBuilder.prettyPrint(builder.toString());
}
}
\ No newline at end of file
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceIndexConfig.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceIndexConfig.java
index a7243655ed..5a2b78bb9d 100644
---
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceIndexConfig.java
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceIndexConfig.java
@@ -18,6 +18,7 @@
*/
package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import org.apache.jackrabbit.oak.json.JsonUtils;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.slf4j.Logger;
@@ -79,7 +80,7 @@ public class InferenceIndexConfig {
this.enricherConfig = getOptionalValue(nodeState,
InferenceConstants.ENRICHER_CONFIG, DISABLED_ENRICHER_CONFIG);
inferenceModelConfigs = Map.of();
LOG.warn("inference index config for indexName: {} is not valid.
Node: {}",
- indexName, nodeState);
+ indexName, nodeState);
}
}
@@ -108,18 +109,26 @@ public class InferenceIndexConfig {
*/
public InferenceModelConfig getDefaultEnabledModel() {
return inferenceModelConfigs.values().stream()
- .filter(InferenceModelConfig::isDefault)
- .filter(InferenceModelConfig::isEnabled)
- .findFirst()
- .orElse(InferenceModelConfig.NOOP);
+ .filter(InferenceModelConfig::isDefault)
+ .filter(InferenceModelConfig::isEnabled)
+ .findFirst()
+ .orElse(InferenceModelConfig.NOOP);
}
@Override
public String toString() {
- return TYPE + "{" +
- ENRICHER_CONFIG + "='" + enricherConfig + '\'' +
- ", " + InferenceModelConfig.TYPE + "=" + inferenceModelConfigs
+
- '}';
+ JsopBuilder builder = new JsopBuilder().object().
+ key("type").value(TYPE).
+ key(ENRICHER_CONFIG).value(enricherConfig).
+ key(InferenceConstants.ENABLED).value(isEnabled).
+ key("inferenceModelConfigs").object();
+
+ // Serialize each model config
+ for (Map.Entry<String, InferenceModelConfig> e :
inferenceModelConfigs.entrySet()) {
+ builder.key(e.getKey()).encodedValue(e.getValue().toString());
+ }
+ builder.endObject().endObject();
+ return JsopBuilder.prettyPrint(builder.toString());
}
}
\ No newline at end of file
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceMBeanImpl.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceMBeanImpl.java
new file mode 100644
index 0000000000..bfd9f5f6fc
--- /dev/null
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceMBeanImpl.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.jackrabbit.oak.api.jmx.InferenceMBean;
+import org.apache.jackrabbit.oak.commons.jmx.AnnotatedStandardMBean;
+import
org.apache.jackrabbit.oak.plugins.index.elastic.ElasticIndexProviderService;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.Objects;
+
+/**
+ * An MBean that provides the inference configuration.
+ */
+public class InferenceMBeanImpl extends AnnotatedStandardMBean implements
InferenceMBean {
+ private static final ObjectMapper MAPPER = new ObjectMapper();
+
+ public InferenceMBeanImpl() {
+ super(InferenceMBean.class);
+ }
+
+ @Override
+ public String getConfigJson() {
+ return InferenceConfig.getInstance().toString();
+ }
+
+ @Override
+ public String getConfigNodeStateJson() {
+ return InferenceConfig.getInstance().getInferenceConfigNodeState();
+ }
+}
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceModelConfig.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceModelConfig.java
index b4f79d0a0f..04a825acd8 100644
---
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceModelConfig.java
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceModelConfig.java
@@ -18,6 +18,7 @@
*/
package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import
org.apache.jackrabbit.oak.plugins.index.elastic.util.EnvironmentVariableProcessorUtil;
import org.apache.jackrabbit.oak.spi.query.fulltext.VectorQueryConfig;
import org.apache.jackrabbit.oak.spi.state.NodeState;
@@ -102,7 +103,7 @@ public class InferenceModelConfig {
this.isDefault = getOptionalValue(nodeState, IS_DEFAULT, false);
this.model = getOptionalValue(nodeState, MODEL, "");
this.embeddingServiceUrl =
EnvironmentVariableProcessorUtil.processEnvironmentVariable(
- InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX,
getOptionalValue(nodeState, EMBEDDING_SERVICE_URL, ""),
DEFAULT_ENVIRONMENT_VARIABLE_VALUE);
+ InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX,
getOptionalValue(nodeState, EMBEDDING_SERVICE_URL, ""),
DEFAULT_ENVIRONMENT_VARIABLE_VALUE);
this.similarityThreshold = getOptionalValue(nodeState,
SIMILARITY_THRESHOLD, DEFAULT_SIMILARITY_THRESHOLD);
this.minTerms = getOptionalValue(nodeState, MIN_TERMS,
DEFAULT_MIN_TERMS);
this.timeout = getOptionalValue(nodeState, TIMEOUT,
DEFAULT_TIMEOUT_MILLIS);
@@ -112,18 +113,21 @@ public class InferenceModelConfig {
@Override
public String toString() {
- return TYPE + "{" +
- MODEL + "='" + model + '\'' +
- ", " + EMBEDDING_SERVICE_URL + "='" + embeddingServiceUrl +
'\'' +
- ", " + SIMILARITY_THRESHOLD + similarityThreshold +
- ", " + MIN_TERMS + "=" + minTerms +
- ", " + IS_DEFAULT + "=" + isDefault +
- ", " + ENABLED + "=" + enabled +
- ", " + HEADER + "=" + header +
- ", " + INFERENCE_PAYLOAD + "=" + payload +
- ", " + TIMEOUT + "=" + timeout +
- ", " + NUM_CANDIDATES + "=" + numCandidates +
- "}";
+ JsopBuilder builder = new JsopBuilder().object().
+ key("type").value(TYPE).
+ key(MODEL).value(model).
+ key(EMBEDDING_SERVICE_URL).value(embeddingServiceUrl).
+ key(SIMILARITY_THRESHOLD).encodedValue("" + similarityThreshold).
+ key(MIN_TERMS).value(minTerms).
+ key(IS_DEFAULT).value(isDefault).
+ key(ENABLED).value(enabled).
+ key(HEADER).encodedValue(header.toString()).
+ key(INFERENCE_PAYLOAD).encodedValue(payload.toString()).
+ key(TIMEOUT).value(timeout).
+ key(NUM_CANDIDATES).value(numCandidates).
+ key(CACHE_SIZE).value(cacheSize);
+ builder.endObject();
+ return JsopBuilder.prettyPrint(builder.toString());
}
public String getInferenceModelConfigName() {
diff --git
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferencePayload.java
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferencePayload.java
index ac230014a9..93a6065581 100644
---
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferencePayload.java
+++
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferencePayload.java
@@ -20,6 +20,7 @@ package
org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import org.apache.jackrabbit.oak.json.JsonUtils;
import
org.apache.jackrabbit.oak.plugins.index.elastic.util.EnvironmentVariableProcessorUtil;
import org.apache.jackrabbit.oak.spi.state.NodeState;
@@ -58,6 +59,7 @@ public class InferencePayload {
//replace current keys with swapped
inferencePayloadMap.putAll(swappedEnvVarsMap);
}
+
/*
* Get the inference payload as a json string
*
@@ -76,4 +78,23 @@ public class InferencePayload {
}
}
+ @Override
+ public String toString() {
+ JsopBuilder builder = new JsopBuilder().object();
+ for (Map.Entry<String, Object> entry : inferencePayloadMap.entrySet())
{
+ builder.key(entry.getKey());
+ if (entry.getValue() instanceof String) {
+ builder.value((String) entry.getValue());
+ } else {
+ try {
+
builder.encodedValue(objectMapper.writeValueAsString(entry.getValue()));
+ } catch (JsonProcessingException e) {
+ LOG.warn("Failed to serialize value for key {}: {}",
entry.getKey(), e.getMessage());
+ builder.value(entry.getValue().toString());
+ }
+ }
+ }
+ builder.endObject();
+ return JsopBuilder.prettyPrint(builder.toString());
+ }
}
\ No newline at end of file
diff --git
a/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigSerializationTest.java
b/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigSerializationTest.java
new file mode 100644
index 0000000000..503b685ef1
--- /dev/null
+++
b/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigSerializationTest.java
@@ -0,0 +1,365 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState;
+import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeBuilder;
+import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeStore;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests for the toString() methods of the inference-related classes which use
JsopBuilder
+ */
+public class InferenceConfigSerializationTest {
+
+ private static final ObjectMapper MAPPER = new ObjectMapper();
+ private static final String DEFAULT_CONFIG_PATH =
InferenceConstants.DEFAULT_OAK_INDEX_INFERENCE_CONFIG_PATH;
+ private static final String ENRICHER_CONFIG =
"{\"enricher\":{\"config\":{\"vectorSpaces\":{\"semantic\":{\"pipeline\":{\"steps\":[{\"inputFields\":{\"description\":\"STRING\",\"title\":\"STRING\"},\"chunkingConfig\":{\"enabled\":true},\"name\":\"sentence-embeddings\",\"model\":\"text-embedding-ada-002\",\"optional\":true,\"type\":\"embeddings\"}]},\"default\":false}},\"version\":\"0.0.1\"}}}";
+ private static final String DEFAULT_ENRICHER_STATUS_MAPPING =
"{\"properties\":{\"processingTimeMs\":{\"type\":\"date\"},\"latestError\":{\"type\":\"keyword\",\"index\":false},\"errorCount\":{\"type\":\"short\"},\"status\":{\"type\":\"keyword\"}}}";
+ private static final String DEFAULT_ENRICHER_STATUS_DATA =
"{\"processingTimeMs\":0,\"latestError\":\"\",\"errorCount\":0,\"status\":\"PENDING\"}";
+
+ private NodeBuilder rootBuilder;
+ private NodeStore nodeStore;
+
+ @Before
+ public void setup() {
+ // Initialize memory node store
+ rootBuilder = new MemoryNodeBuilder(EmptyNodeState.EMPTY_NODE);
+ nodeStore = new MemoryNodeStore(rootBuilder.getNodeState());
+ }
+
+ @After
+ public void tearDown() {
+ rootBuilder = null;
+ nodeStore = null;
+ }
+
+ /**
+ * Test for InferenceConfig.toString()
+ */
+ @Test
+ public void testInferenceConfigToString() throws Exception {
+ // Setup: Create a basic inference config
+ NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder,
DEFAULT_CONFIG_PATH);
+ inferenceConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceConfig.TYPE);
+ inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+ // Add index config
+ String indexName = "testIndex";
+ NodeBuilder indexConfigBuilder =
inferenceConfigBuilder.child(indexName);
+ indexConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceIndexConfig.TYPE);
+ indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG,
ENRICHER_CONFIG);
+
+ // Commit the changes
+ nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+ // Initialize the inference config
+ InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+ InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+ // Get the toString representation
+ String json = inferenceConfig.toString();
+
+ // Verify it's valid JSON
+ JsonNode node = MAPPER.readTree(json);
+
+ // Verify the structure
+ assertTrue("JSON should contain 'type' key", node.has("type"));
+ assertEquals("Type should be inferenceConfig", InferenceConfig.TYPE,
node.get("type").asText());
+ assertTrue("JSON should contain 'enabled' key", node.has("enabled"));
+ assertTrue("enabled should be true", node.get("enabled").asBoolean());
+ assertTrue("JSON should contain 'indexConfigs' key",
node.has("indexConfigs"));
+ assertTrue("indexConfigs should be an object",
node.get("indexConfigs").isObject());
+ assertTrue("indexConfigs should contain testIndex",
node.get("indexConfigs").has(indexName));
+ }
+
+ /**
+ * Test for InferenceIndexConfig.toString()
+ */
+ @Test
+ public void testInferenceIndexConfigToString() throws Exception {
+ // Create a simple index config
+ NodeBuilder indexConfigBuilder = rootBuilder.child("testIndex");
+ indexConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceIndexConfig.TYPE);
+ indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG,
ENRICHER_CONFIG);
+
+ // Create the index config object
+ InferenceIndexConfig indexConfig = new
InferenceIndexConfig("testIndex", indexConfigBuilder.getNodeState());
+
+ // Get the toString representation
+ String json = indexConfig.toString();
+
+ // Verify it's valid JSON
+ JsonNode node = MAPPER.readTree(json);
+
+ // Verify the structure
+ assertTrue("JSON should contain 'type' key", node.has("type"));
+ assertEquals("Type should be inferenceIndexConfig",
InferenceIndexConfig.TYPE, node.get("type").asText());
+ assertTrue("JSON should contain 'enricherConfig' key",
node.has(InferenceIndexConfig.ENRICHER_CONFIG));
+ assertEquals("Enricher config should match", ENRICHER_CONFIG,
node.get(InferenceIndexConfig.ENRICHER_CONFIG).asText());
+ assertTrue("JSON should contain 'enabled' key",
node.has(InferenceConstants.ENABLED));
+ assertTrue("enabled should be true",
node.get(InferenceConstants.ENABLED).asBoolean());
+ assertTrue("JSON should contain 'inferenceModelConfigs' key",
node.has("inferenceModelConfigs"));
+ assertTrue("inferenceModelConfigs should be an object",
node.get("inferenceModelConfigs").isObject());
+ }
+
+ /**
+ * Test for InferenceModelConfig.toString()
+ */
+ @Test
+ public void testInferenceModelConfigToString() throws Exception {
+ // Create a model config with header and payload
+ NodeBuilder modelConfigBuilder = rootBuilder.child("testModel");
+ modelConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceModelConfig.TYPE);
+ modelConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ modelConfigBuilder.setProperty(InferenceModelConfig.IS_DEFAULT, true);
+ modelConfigBuilder.setProperty(InferenceModelConfig.MODEL,
"test-model");
+
modelConfigBuilder.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL,
"http://test-service");
+
modelConfigBuilder.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD, 0.85);
+ modelConfigBuilder.setProperty(InferenceModelConfig.MIN_TERMS, 3);
+ modelConfigBuilder.setProperty(InferenceModelConfig.TIMEOUT, 10000);
+ modelConfigBuilder.setProperty(InferenceModelConfig.NUM_CANDIDATES,
50);
+ modelConfigBuilder.setProperty(InferenceModelConfig.CACHE_SIZE, 200);
+
+ // Create header node
+ NodeBuilder headerBuilder =
modelConfigBuilder.child(InferenceModelConfig.HEADER);
+ headerBuilder.setProperty("Authorization", "Bearer test-token");
+ headerBuilder.setProperty("Content-Type", "application/json");
+
+ // Create payload node
+ NodeBuilder payloadBuilder =
modelConfigBuilder.child(InferenceModelConfig.INFERENCE_PAYLOAD);
+ payloadBuilder.setProperty("model", "text-embedding-ada-002");
+ payloadBuilder.setProperty("dimensions", 1536);
+
+ // Create the model config object
+ InferenceModelConfig modelConfig = new
InferenceModelConfig("testModel", modelConfigBuilder.getNodeState());
+
+ // Get the toString representation
+ String json = modelConfig.toString();
+
+ // Verify it's valid JSON
+ JsonNode node = MAPPER.readTree(json);
+
+ // Verify structure
+ assertTrue("JSON should contain 'TYPE' key", node.has("type"));
+ assertEquals("Type should match", InferenceModelConfig.TYPE,
node.get("type").asText());
+ assertTrue("JSON should contain 'model' key",
node.has(InferenceModelConfig.MODEL));
+ assertEquals("Model should match", "test-model",
node.get(InferenceModelConfig.MODEL).asText());
+ assertTrue("JSON should contain 'embeddingServiceUrl' key",
node.has(InferenceModelConfig.EMBEDDING_SERVICE_URL));
+ assertEquals("Service URL should match", "http://test-service",
node.get(InferenceModelConfig.EMBEDDING_SERVICE_URL).asText());
+ assertTrue("JSON should contain 'similarityThreshold' key",
node.has(InferenceModelConfig.SIMILARITY_THRESHOLD));
+ assertEquals("Similarity threshold should match", 0.85,
node.get(InferenceModelConfig.SIMILARITY_THRESHOLD).asDouble(), 0.001);
+ assertTrue("JSON should contain 'minTerms' key",
node.has(InferenceModelConfig.MIN_TERMS));
+ assertEquals("Min terms should match", 3,
node.get(InferenceModelConfig.MIN_TERMS).asInt());
+ assertTrue("JSON should contain 'isDefault' key",
node.has(InferenceModelConfig.IS_DEFAULT));
+ assertTrue("isDefault should be true",
node.get(InferenceModelConfig.IS_DEFAULT).asBoolean());
+ assertTrue("JSON should contain 'enabled' key",
node.has(InferenceModelConfig.ENABLED));
+ assertTrue("enabled should be true",
node.get(InferenceModelConfig.ENABLED).asBoolean());
+ assertTrue("JSON should contain 'header' key",
node.has(InferenceModelConfig.HEADER));
+ assertTrue("JSON should contain 'inferencePayload' key",
node.has(InferenceModelConfig.INFERENCE_PAYLOAD));
+ assertTrue("JSON should contain 'timeout' key",
node.has(InferenceModelConfig.TIMEOUT));
+ assertEquals("Timeout should match", 10000,
node.get(InferenceModelConfig.TIMEOUT).asInt());
+ assertTrue("JSON should contain 'numCandidates' key",
node.has(InferenceModelConfig.NUM_CANDIDATES));
+ assertEquals("Num candidates should match", 50,
node.get(InferenceModelConfig.NUM_CANDIDATES).asInt());
+ assertTrue("JSON should contain 'cacheSize' key",
node.has(InferenceModelConfig.CACHE_SIZE));
+ assertEquals("Cache size should match", 200,
node.get(InferenceModelConfig.CACHE_SIZE).asInt());
+ }
+
+ /**
+ * Test for InferenceHeaderPayload.toString()
+ */
+ @Test
+ public void testInferenceHeaderPayloadToString() throws Exception {
+ // Create a header payload
+ NodeBuilder headerBuilder = rootBuilder.child("header");
+ headerBuilder.setProperty("Authorization", "Bearer test-token");
+ headerBuilder.setProperty("Content-Type", "application/json");
+
+ // Create the header payload object
+ InferenceHeaderPayload headerPayload = new
InferenceHeaderPayload(headerBuilder.getNodeState());
+
+ // Get the toString representation
+ String json = headerPayload.toString();
+
+ // Verify it's valid JSON
+ JsonNode node = MAPPER.readTree(json);
+
+ // Verify structure
+ assertTrue("JSON should contain Authorization",
node.has("Authorization"));
+ assertEquals("Authorization should match", "Bearer test-token",
node.get("Authorization").asText());
+ assertTrue("JSON should contain Content-Type",
node.has("Content-Type"));
+ assertEquals("Content-Type should match", "application/json",
node.get("Content-Type").asText());
+ }
+
+ /**
+ * Test for InferencePayload.toString()
+ */
+ @Test
+ public void testInferencePayloadToString() throws Exception {
+ // Create a payload
+ NodeBuilder payloadBuilder = rootBuilder.child("payload");
+ payloadBuilder.setProperty("model", "text-embedding-ada-002");
+ payloadBuilder.setProperty("dimensions", 1536);
+
+ // Create the payload object
+ InferencePayload payload = new InferencePayload("testModel",
payloadBuilder.getNodeState());
+
+ // Get the toString representation
+ String json = payload.toString();
+
+ // Verify it's valid JSON
+ JsonNode node = MAPPER.readTree(json);
+
+ // Verify structure
+ assertTrue("JSON should contain model", node.has("model"));
+ assertEquals("Model should match", "text-embedding-ada-002",
node.get("model").asText());
+ assertTrue("JSON should contain dimensions", node.has("dimensions"));
+ assertEquals("Dimensions should match", 1536,
node.get("dimensions").asInt());
+ }
+
+ /**
+ * Test for EnricherStatus.toString()
+ */
+ @Test
+ public void testEnricherStatusToString() throws Exception {
+ // Setup: Create a node structure with enricher status data
+ NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder,
DEFAULT_CONFIG_PATH);
+ NodeBuilder enrichNode =
inferenceConfigBuilder.child(InferenceConstants.ENRICH_NODE);
+ enrichNode.setProperty(InferenceConstants.ENRICHER_STATUS_MAPPING,
DEFAULT_ENRICHER_STATUS_MAPPING);
+ enrichNode.setProperty(InferenceConstants.ENRICHER_STATUS_DATA,
DEFAULT_ENRICHER_STATUS_DATA);
+
+ // Commit the changes
+ nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+ // Create the enricher status object
+ EnricherStatus status = new EnricherStatus(nodeStore,
DEFAULT_CONFIG_PATH);
+
+ // Get the toString representation
+ String json = status.toString();
+
+ // Verify it's valid JSON
+ JsonNode node = MAPPER.readTree(json);
+
+ // Verify structure
+ assertTrue("JSON should contain enricherStatusMapping",
node.has(InferenceConstants.ENRICHER_STATUS_MAPPING));
+ JsonNode mappingNode =
MAPPER.readTree(node.get(InferenceConstants.ENRICHER_STATUS_MAPPING).asText());
+ assertTrue("Mapping should contain properties",
mappingNode.has("properties"));
+
+ assertTrue("JSON should contain enricherStatusData",
node.has("enricherStatusData"));
+ JsonNode statusData = node.get("enricherStatusData");
+ assertTrue("Status data should contain processingTimeMs",
statusData.has("processingTimeMs"));
+ assertEquals("Processing time should be 0", 0,
statusData.get("processingTimeMs").asInt());
+ assertTrue("Status data should contain status",
statusData.has("status"));
+ assertEquals("Status should be PENDING", "PENDING",
statusData.get("status").asText());
+ assertTrue("Status data should contain errorCount",
statusData.has("errorCount"));
+ assertEquals("Error count should be 0", 0,
statusData.get("errorCount").asInt());
+ assertTrue("Status data should contain latestError",
statusData.has("latestError"));
+ assertEquals("Latest error should be empty", "",
statusData.get("latestError").asText());
+ }
+
+ /**
+ * More comprehensive test for InferenceConfig.toString() to verify all
fields
+ */
+ @Test
+ public void testComprehensiveInferenceConfigToString() throws Exception {
+ // Setup: Create a basic inference config
+ NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder,
DEFAULT_CONFIG_PATH);
+ inferenceConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceConfig.TYPE);
+ inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+ // Add index config
+ String indexName = "testIndex";
+ NodeBuilder indexConfigBuilder =
inferenceConfigBuilder.child(indexName);
+ indexConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceIndexConfig.TYPE);
+ indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG,
ENRICHER_CONFIG);
+
+ // Add enricher status
+ NodeBuilder enrichNode =
inferenceConfigBuilder.child(InferenceConstants.ENRICH_NODE);
+ enrichNode.setProperty(InferenceConstants.ENRICHER_STATUS_MAPPING,
DEFAULT_ENRICHER_STATUS_MAPPING);
+ enrichNode.setProperty(InferenceConstants.ENRICHER_STATUS_DATA,
DEFAULT_ENRICHER_STATUS_DATA);
+
+ // Commit the changes
+ nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+ // Initialize the inference config
+ InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+ InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+ // Get the toString representation
+ String json = inferenceConfig.toString();
+
+ // Verify it's valid JSON
+ JsonNode node = MAPPER.readTree(json);
+
+ // Verify the structure includes all fields from the toString method
+ assertTrue("JSON should contain 'type' key", node.has("type"));
+ assertEquals("Type should be inferenceConfig", InferenceConfig.TYPE,
node.get("type").asText());
+
+ assertTrue("JSON should contain 'enabled' key", node.has("enabled"));
+ assertTrue("enabled should be true", node.get("enabled").asBoolean());
+
+ assertTrue("JSON should contain 'inferenceConfigPath' key",
node.has("inferenceConfigPath"));
+ assertEquals("inferenceConfigPath should match", DEFAULT_CONFIG_PATH,
node.get("inferenceConfigPath").asText());
+
+ assertTrue("JSON should contain 'currentInferenceConfig' key",
node.has("currentInferenceConfig"));
+ assertTrue("currentInferenceConfig should not be empty",
!node.get("currentInferenceConfig").asText().isEmpty());
+
+ assertTrue("JSON should contain 'activeInferenceConfig' key",
node.has("activeInferenceConfig"));
+ assertTrue("activeInferenceConfig should not be empty",
!node.get("activeInferenceConfig").asText().isEmpty());
+
+ assertTrue("JSON should contain 'isInferenceEnabled' key",
node.has("isInferenceEnabled"));
+ assertTrue("isInferenceEnabled should be true",
node.get("isInferenceEnabled").asBoolean());
+
+ assertTrue("JSON should contain 'indexConfigs' key",
node.has("indexConfigs"));
+ assertTrue("indexConfigs should be an object",
node.get("indexConfigs").isObject());
+ assertTrue("indexConfigs should contain testIndex",
node.get("indexConfigs").has(indexName));
+
+ assertTrue("JSON should contain ':enrich' key", node.has(":enrich"));
+ JsonNode enrichNode2 = node.get(":enrich");
+ assertTrue("enrichNode should contain 'enricherStatusMapping'",
enrichNode2.has(InferenceConstants.ENRICHER_STATUS_MAPPING));
+ assertTrue("enrichNode should contain 'enricherStatusData'",
enrichNode2.has(InferenceConstants.ENRICHER_STATUS_DATA));
+ }
+
+ /**
+ * Helper method to create node paths
+ */
+ private NodeBuilder createNodePath(NodeBuilder rootBuilder, String path) {
+ NodeBuilder currentBuilder = rootBuilder;
+ for (String element : path.split("/")) {
+ if (!element.isEmpty()) {
+ currentBuilder = currentBuilder.child(element);
+ }
+ }
+ return currentBuilder;
+ }
+}
\ No newline at end of file
diff --git
a/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigTest.java
b/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigTest.java
index 790426c10a..0af97b2428 100644
---
a/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigTest.java
+++
b/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigTest.java
@@ -19,6 +19,8 @@
package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
import joptsimple.internal.Strings;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.commons.PathUtils;
@@ -36,6 +38,7 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
import java.util.Map;
import static org.junit.Assert.assertEquals;
@@ -87,7 +90,7 @@ public class InferenceConfigTest {
}
/**
- * Test 1: Basic test - Disabled InferenceConfig
+ * Basic test - Disabled InferenceConfig
* Verifies that when inference config is created but disabled, the
InferenceConfig object reflects this state
*/
@Test
@@ -109,7 +112,7 @@ public class InferenceConfigTest {
}
/**
- * Test 2: Enabled InferenceConfig but no index configs
+ * Enabled InferenceConfig but no index configs
* Verifies that when an empty inference config is enabled, the
InferenceConfig object reflects this state
*/
@Test
@@ -131,7 +134,7 @@ public class InferenceConfigTest {
}
/**
- * Test 3: Basic InferenceIndexConfig creation
+ * Basic InferenceIndexConfig creation
* Tests the creation of a simple InferenceIndexConfig within
InferenceConfig
*/
@Test
@@ -167,7 +170,7 @@ public class InferenceConfigTest {
}
/**
- * Test 4: Disabled InferenceIndexConfig
+ * Disabled InferenceIndexConfig
* Tests that a disabled InferenceIndexConfig is properly handled
*/
@Test
@@ -201,7 +204,7 @@ public class InferenceConfigTest {
}
/**
- * Test 5: Invalid InferenceIndexConfig (missing type)
+ * Invalid InferenceIndexConfig (missing type)
* Tests that an invalid InferenceIndexConfig (missing type) is properly
handled
*/
@Test
@@ -234,7 +237,7 @@ public class InferenceConfigTest {
}
/**
- * Test 6: Basic InferenceModelConfig
+ * Basic InferenceModelConfig
* Tests the creation of an InferenceModelConfig within an
InferenceIndexConfig
*/
@Test
@@ -287,7 +290,7 @@ public class InferenceConfigTest {
}
/**
- * Test 7: Multiple InferenceModelConfigs with one default
+ * Multiple InferenceModelConfigs with one default
* Tests multiple InferenceModelConfigs within an InferenceIndexConfig,
with one marked as default
*/
@Test
@@ -353,7 +356,7 @@ public class InferenceConfigTest {
}
/**
- * Test 8: Test EnricherStatus JSON Mapping
+ * Test EnricherStatus JSON Mapping
* Tests that the EnricherStatus JSON mapping is properly stored and
retrieved
*/
@Test
@@ -384,7 +387,7 @@ public class InferenceConfigTest {
}
/**
- * Test 9: Test Complete Integration with EnricherStatus
+ * Test Complete Integration with EnricherStatus
* Tests the complete integration of InferenceConfig,
InferenceIndexConfig, InferenceModelConfig, and EnricherStatus
*/
@Test
@@ -470,18 +473,7 @@ public class InferenceConfigTest {
}
/**
- * Utility method to verify enricher status fields
- */
- private void verifyEnricherStatusFields(Map<String, Object> status, String
expectedStatus,
- int expectedProcessingTime, String
expectedError, int expectedErrorCount) {
- assertEquals("Status should match", expectedStatus,
status.get("status"));
- assertEquals("Processing time should match", expectedProcessingTime,
status.get("processingTimeMs"));
- assertEquals("Latest error should match", expectedError,
status.get("latestError"));
- assertEquals("Error count should match", expectedErrorCount,
status.get("errorCount"));
- }
-
- /**
- * Test 10: Test EnricherStatus
+ * Test EnricherStatus
* Tests that the EnricherStatus is properly loaded from the inference
config
*/
@Test
@@ -519,7 +511,29 @@ public class InferenceConfigTest {
}
/**
- * Test 11: Test EnricherStatus Refresh
+ * Utility method to verify enricher status fields
+ */
+ private void verifyEnricherStatusFields(Map<String, Object> status, String
expectedStatus,
+ int expectedProcessingTime, String
expectedError, int expectedErrorCount) {
+ assertEquals("Status should match", expectedStatus,
status.get("status"));
+ assertEquals("Processing time should match", expectedProcessingTime,
status.get("processingTimeMs"));
+ assertEquals("Latest error should match", expectedError,
status.get("latestError"));
+ assertEquals("Error count should match", expectedErrorCount,
status.get("errorCount"));
+ }
+
+ /**
+ * Utility method to create a path of nodes
+ */
+ private NodeBuilder createNodePath(NodeBuilder rootBuilder, String path) {
+ NodeBuilder builder = rootBuilder;
+ for (String elem : PathUtils.elements(path)) {
+ builder = builder.child(elem);
+ }
+ return builder;
+ }
+
+ /**
+ * Test EnricherStatus Refresh
* Tests that the EnricherStatus is properly refreshed when the inference
config is updated
*/
@Test
@@ -571,7 +585,7 @@ public class InferenceConfigTest {
}
/**
- * Test 12: Test EnricherStatus with Error Information
+ * Test EnricherStatus with Error Information
* Tests that the EnricherStatus properly handles error information
*/
@Test
@@ -603,7 +617,7 @@ public class InferenceConfigTest {
}
/**
- * Test 13: Test Complete Configuration with Multiple Indexes and Models
including EnricherStatus
+ * Test Complete Configuration with Multiple Indexes and Models including
EnricherStatus
* Tests a complex configuration with multiple indexes, models, and
enricher status
*/
@Test
@@ -738,13 +752,248 @@ public class InferenceConfigTest {
}
/**
- * Utility method to create a path of nodes
+ * Test getInferenceConfigNodeState
+ * Comprehensively tests the getInferenceConfigNodeState method's
functionality
+ * including normal operation, handling of non-existent paths, and complex
JSON structures
*/
- private NodeBuilder createNodePath(NodeBuilder rootBuilder, String path) {
- NodeBuilder builder = rootBuilder;
- for (String elem : PathUtils.elements(path)) {
- builder = builder.child(elem);
- }
- return builder;
+ @Test
+ public void testGetInferenceConfigNodeState() throws
CommitFailedException, IOException {
+ // Part 1: Test with a complete configuration (happy path)
+ // ----------------------------------------------------------
+ // Create enabled inference config with complete configuration
+ NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder,
DEFAULT_CONFIG_PATH);
+ inferenceConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceConfig.TYPE);
+ inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+ // Add custom property to verify in JSON output
+ inferenceConfigBuilder.setProperty("customProperty", "customValue");
+
+ // Add enricher status node
+ NodeBuilder enrichBuilder =
inferenceConfigBuilder.child(InferenceConstants.ENRICH_NODE);
+ enrichBuilder.setProperty(InferenceConstants.ENRICHER_STATUS_MAPPING,
defaultEnricherStatusMapping);
+ enrichBuilder.setProperty(InferenceConstants.ENRICHER_STATUS_DATA,
defaultEnricherStatusData);
+
+ // Add index config
+ String indexName = "testJsonIndex";
+ NodeBuilder indexConfigBuilder =
inferenceConfigBuilder.child(indexName);
+ indexConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceIndexConfig.TYPE);
+ indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG,
ENRICHER_CONFIG);
+
+ // Add model config
+ String modelName = "testJsonModel";
+ NodeBuilder modelConfigBuilder = indexConfigBuilder.child(modelName);
+ modelConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceModelConfig.TYPE);
+ modelConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ modelConfigBuilder.setProperty(InferenceModelConfig.IS_DEFAULT, true);
+ modelConfigBuilder.setProperty(InferenceModelConfig.MODEL,
"json-model");
+
modelConfigBuilder.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL,
"http://localhost:8080/test");
+
modelConfigBuilder.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD, 0.8);
+ modelConfigBuilder.setProperty(InferenceModelConfig.MIN_TERMS, 3L);
+
+ // Add complex structure with various data types and special characters
+ NodeBuilder complexBuilder =
inferenceConfigBuilder.child("complexNode");
+ complexBuilder.setProperty("string", "simple string value");
+ complexBuilder.setProperty("boolean", true);
+ complexBuilder.setProperty("number", 12345);
+ complexBuilder.setProperty("special",
"test\"with\\quotes\nand\tnewlines");
+ complexBuilder.setProperty("unicode", "测试unicode字符");
+
+ // Add a child node to test nesting
+ NodeBuilder childBuilder = complexBuilder.child("childNode");
+ childBuilder.setProperty("childProp", "child value");
+
+ // Commit the changes
+ nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+ // Initialize InferenceConfig object
+ InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+ InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+ // Get the node state as JSON
+ String nodeStateJson = inferenceConfig.getInferenceConfigNodeState();
+
+ // Parse the JSON
+ ObjectMapper mapper = new ObjectMapper();
+ JsonNode rootNode = mapper.readTree(nodeStateJson);
+
+ // Validate the complete config JSON
+ assertNotNull("Node state JSON should not be null", nodeStateJson);
+ assertFalse("Node state JSON should not be empty",
nodeStateJson.isEmpty());
+ assertFalse("Node state JSON should not be {}", rootNode.isEmpty());
+
+ // Verify it contains expected elements using JsonNode structure
+ assertEquals("JSON should contain the correct type",
+ InferenceConfig.TYPE, rootNode.path("type").asText());
+
+ assertTrue("JSON should have enabled set to true",
+ rootNode.path("enabled").asBoolean());
+
+ assertEquals("JSON should contain the custom property",
+ "customValue", rootNode.path("customProperty").asText());
+
+ // Verify index node exists
+ JsonNode indexNode = rootNode.path(indexName);
+ assertTrue("JSON should contain the index node", indexNode.isObject());
+
+ // Verify model node exists within the index node
+ JsonNode modelNode = indexNode.path(modelName);
+ assertTrue("JSON should contain the model node", modelNode.isObject());
+
+ // Verify the model properties
+ assertEquals("Model should have correct type",
+ InferenceModelConfig.TYPE, modelNode.path("type").asText());
+ assertTrue("Model should be enabled",
+ modelNode.path("enabled").asBoolean());
+ assertTrue("Model should be default",
+ modelNode.path(InferenceModelConfig.IS_DEFAULT).asBoolean());
+ assertEquals("Model should have correct name",
+ "json-model", modelNode.path(InferenceModelConfig.MODEL).asText());
+
+ // Verify the enrich node exists
+ JsonNode enrichNode = rootNode.path(InferenceConstants.ENRICH_NODE);
+ assertTrue("JSON should contain the enrich node",
enrichNode.isObject());
+
+ // Verify enrich status properties
+ assertTrue("Enrich node should contain status mapping",
+ enrichNode.has(InferenceConstants.ENRICHER_STATUS_MAPPING));
+ assertTrue("Enrich node should contain status data",
+ enrichNode.has(InferenceConstants.ENRICHER_STATUS_DATA));
+
+ // Verify complex node structure
+ JsonNode complexNode = rootNode.path("complexNode");
+ assertTrue("JSON should contain complex node", complexNode.isObject());
+
+ // Verify basic properties with different types
+ assertEquals("String property should match", "simple string value",
complexNode.path("string").asText());
+ assertTrue("Boolean property should be true",
complexNode.path("boolean").asBoolean());
+ assertEquals("Number property should match", 12345,
complexNode.path("number").asInt());
+
+ // Verify special characters handling
+ String specialValue = complexNode.path("special").asText();
+ assertTrue("Special characters should be preserved",
+ specialValue.contains("test") &&
+ specialValue.contains("with") &&
+ specialValue.contains("quotes"));
+
+ // Verify unicode characters
+ assertEquals("Unicode characters should be preserved",
+ "测试unicode字符", complexNode.path("unicode").asText());
+
+ // Verify nested child node
+ JsonNode childNode = complexNode.path("childNode");
+ assertTrue("Child node should exist", childNode.isObject());
+ assertEquals("Child property should match", "child value",
childNode.path("childProp").asText());
+
+ // Part 2: Test with a non-existent path
+ // -------------------------------------
+ String nonExistentPath = "/oak:index/nonExistentConfig";
+ InferenceConfig.reInitialize(nodeStore, nonExistentPath, true);
+ inferenceConfig = InferenceConfig.getInstance();
+
+ // Get JSON for non-existent path
+ String nonExistentJson = inferenceConfig.getInferenceConfigNodeState();
+ JsonNode nonExistentNode = mapper.readTree(nonExistentJson);
+
+ // Should return empty JSON object
+ assertTrue("Should return empty JSON object for non-existent path",
nonExistentNode.isEmpty());
+
+ // Part 3: Test with disabled inference
+ // -----------------------------------
+ // Create config but disable it
+ NodeBuilder disabledConfigBuilder = createNodePath(rootBuilder,
DEFAULT_CONFIG_PATH);
+ disabledConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceConfig.TYPE);
+ disabledConfigBuilder.setProperty(InferenceConstants.ENABLED, false);
+ nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+ // Initialize with disabled config
+ InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+ inferenceConfig = InferenceConfig.getInstance();
+
+ // Get JSON for disabled inference
+ String disabledJson = inferenceConfig.getInferenceConfigNodeState();
+ JsonNode disabledNode = mapper.readTree(disabledJson);
+
+ // Should contain basic structure but with enabled=false
+ assertFalse("Disabled config should not be empty",
disabledNode.isEmpty());
+ assertEquals("Disabled config should have type", InferenceConfig.TYPE,
disabledNode.path("type").asText());
+ assertFalse("Disabled config should have enabled=false",
disabledNode.path("enabled").asBoolean());
+
+ // Reset to the default path for other tests
+ inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+ InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+ }
+
+ /**
+ * Test getInferenceModelConfig
+ * Tests all paths of the getInferenceModelConfig method
+ */
+ @Test
+ public void testGetInferenceModelConfig() throws CommitFailedException {
+ // Create enabled inference config with an index config containing a
model config
+ NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder,
DEFAULT_CONFIG_PATH);
+ inferenceConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceConfig.TYPE);
+ inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+ // Add index config
+ String indexName = "testModelLookupIndex";
+ NodeBuilder indexConfigBuilder =
inferenceConfigBuilder.child(indexName);
+ indexConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceIndexConfig.TYPE);
+ indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+ indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG,
ENRICHER_CONFIG);
+
+ // Add default model config
+ String defaultModelName = "defaultModel";
+ NodeBuilder defaultModelConfigBuilder =
indexConfigBuilder.child(defaultModelName);
+ defaultModelConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceModelConfig.TYPE);
+ defaultModelConfigBuilder.setProperty(InferenceConstants.ENABLED,
true);
+ defaultModelConfigBuilder.setProperty(InferenceModelConfig.IS_DEFAULT,
true);
+ defaultModelConfigBuilder.setProperty(InferenceModelConfig.MODEL,
"default-embedding-model");
+
defaultModelConfigBuilder.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL,
"http://localhost:8080/default-embeddings");
+
defaultModelConfigBuilder.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD,
0.8);
+ defaultModelConfigBuilder.setProperty(InferenceModelConfig.MIN_TERMS,
3L);
+
+ // Add non-default model config
+ String nonDefaultModelName = "nonDefaultModel";
+ NodeBuilder nonDefaultModelConfigBuilder =
indexConfigBuilder.child(nonDefaultModelName);
+ nonDefaultModelConfigBuilder.setProperty(InferenceConstants.TYPE,
InferenceModelConfig.TYPE);
+ nonDefaultModelConfigBuilder.setProperty(InferenceConstants.ENABLED,
true);
+
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.IS_DEFAULT,
false);
+ nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.MODEL,
"non-default-embedding-model");
+
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL,
"http://localhost:8080/non-default-embeddings");
+
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD,
0.7);
+
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.MIN_TERMS, 2L);
+
+ // Commit the changes
+ nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+ // Create InferenceConfig object
+ InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+ InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+ // Test case 1: null model name should return NOOP
+ InferenceModelConfig resultForNullModelName =
inferenceConfig.getInferenceModelConfig(indexName, null);
+ assertEquals("Null model name should return NOOP model config",
InferenceModelConfig.NOOP, resultForNullModelName);
+
+ // Test case 2: empty model name should return default model
+ InferenceModelConfig resultForEmptyModelName =
inferenceConfig.getInferenceModelConfig(indexName, "");
+ assertNotEquals("Empty model name should return default model",
InferenceModelConfig.NOOP, resultForEmptyModelName);
+ assertEquals("Empty model name should return default model",
defaultModelName, resultForEmptyModelName.getInferenceModelConfigName());
+ assertTrue("Empty model name should return default model that is
marked as default", resultForEmptyModelName.isDefault());
+
+ // Test case 3: specific model name should return that model
+ InferenceModelConfig resultForSpecificModelName =
inferenceConfig.getInferenceModelConfig(indexName, nonDefaultModelName);
+ assertNotEquals("Specific model name should return that model",
InferenceModelConfig.NOOP, resultForSpecificModelName);
+ assertEquals("Specific model name should return that model",
nonDefaultModelName, resultForSpecificModelName.getInferenceModelConfigName());
+ assertFalse("Specific model name should return that model with the
correct default flag", resultForSpecificModelName.isDefault());
+
+ // Test case 4: non-existent model name should return NOOP
+ InferenceModelConfig resultForNonExistentModelName =
inferenceConfig.getInferenceModelConfig(indexName, "nonExistentModel");
+ assertEquals("Non-existent model name should return NOOP",
InferenceModelConfig.NOOP, resultForNonExistentModelName);
+
+ // Test case 5: non-existent index name should return NOOP
+ InferenceModelConfig resultForNonExistentIndexName =
inferenceConfig.getInferenceModelConfig("nonExistentIndex", defaultModelName);
+ assertEquals("Non-existent index name should return NOOP",
InferenceModelConfig.NOOP, resultForNonExistentIndexName);
}
}
\ No newline at end of file