This is an automated email from the ASF dual-hosted git repository.

mkataria pushed a commit to branch OAK-11692
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git

commit 80e078efec11f0e09585f14a37976053d48b4568
Author: Mohit Kataria <[email protected]>
AuthorDate: Mon May 5 14:24:11 2025 +0530

    OAK-11692: Add inference config in oak.
---
 oak-search-elastic/pom.xml                         |  12 +
 .../elastic/query/inference/InferenceConfig.java   | 226 +++++++++
 .../query/inference/InferenceConstants.java        |  37 ++
 .../query/inference/InferenceHeaderPayload.java    |  70 +++
 .../query/inference/InferenceIndexConfig.java      | 125 +++++
 .../query/inference/InferenceModelConfig.java      | 176 +++++++
 .../elastic/query/inference/InferencePayload.java  |  79 +++
 .../elastic/query/inference/VectorDocument.java    |  56 +++
 .../util/EnvironmentVariableProcessorUtil.java     |  54 +++
 .../query/inference/InferenceConfigTest.java       | 534 +++++++++++++++++++++
 10 files changed, 1369 insertions(+)

diff --git a/oak-search-elastic/pom.xml b/oak-search-elastic/pom.xml
index 854e162057..4f951259c6 100644
--- a/oak-search-elastic/pom.xml
+++ b/oak-search-elastic/pom.xml
@@ -85,6 +85,18 @@
           </excludes>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>3.2.5</version>
+        <configuration>
+          <environmentVariables>
+            <Authorization>Bearer test-token</Authorization>
+            
<inferenceServiceUrl>http://localhost:8080/embeddings</inferenceServiceUrl>
+            <EMBEDDING_MODEL>embedding-test-model</EMBEDDING_MODEL>
+          </environmentVariables>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
 
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfig.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfig.java
new file mode 100644
index 0000000000..51e25ad9e2
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfig.java
@@ -0,0 +1,226 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.plugins.index.IndexName;
+import org.apache.jackrabbit.oak.query.QueryEngineSettings;
+import org.apache.jackrabbit.oak.spi.query.QueryLimits;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.apache.jackrabbit.oak.spi.state.NodeStore;
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.function.Function;
+
+import static 
org.apache.jackrabbit.oak.plugins.index.search.util.ConfigUtil.getOptionalValue;
+
+/**
+ * Data model class representing the inference configuration stored under 
/oak:index/:inferenceConfig (default path)
+ */
+public class InferenceConfig {
+    private static final Logger LOG = 
LoggerFactory.getLogger(InferenceConfig.class.getName());
+    private static final ReadWriteLock lock = new ReentrantReadWriteLock(true);
+    private static final InferenceConfig INSTANCE = new InferenceConfig();
+    public static final String TYPE = "inferenceConfig";
+    /**
+     * Semantic search is enabled if this flag is true
+     */
+    private boolean enabled;
+    /**
+     * Map of index names to their respective inference configurations
+     */
+    private Map<String, InferenceIndexConfig> indexConfigs;
+    private NodeStore nodeStore;
+    private String inferenceConfigPath;
+    private String currentInferenceConfig;
+    private volatile String activeInferenceConfig;
+    private boolean isInferenceEnabled;
+
+    public boolean isInferenceEnabled() {
+        return isInferenceEnabled;
+    }
+
+    /**
+     * Loads configuration from the given NodeState
+     */
+
+    private InferenceConfig() {
+        lock.writeLock().lock();
+        try {
+            enabled = false;
+            indexConfigs = Map.of();
+            activeInferenceConfig = getNewInferenceConfigId();
+            currentInferenceConfig = activeInferenceConfig;
+            isInferenceEnabled = false;
+        } finally {
+            lock.writeLock().unlock();
+        }
+    }
+
+    public static void reInitialize(NodeStore nodeStore, String 
inferenceConfigPath, boolean isInferenceEnabled) {
+        reInitialize(nodeStore, inferenceConfigPath, isInferenceEnabled, true);
+    }
+
+    public static void reInitialize(){
+        reInitialize(INSTANCE.nodeStore, INSTANCE.inferenceConfigPath, 
INSTANCE.isInferenceEnabled, true);
+    }
+
+    public static InferenceConfig getInstance() {
+        lock.readLock().lock();
+        try {
+            if (INSTANCE.activeInferenceConfig != null && 
!INSTANCE.activeInferenceConfig.equals(INSTANCE.currentInferenceConfig)) {
+                reInitialize(INSTANCE.nodeStore, INSTANCE.inferenceConfigPath, 
INSTANCE.isInferenceEnabled, false);
+            }
+            return INSTANCE;
+        } finally {
+            lock.readLock().unlock();
+        }
+    }
+
+    private static void reInitialize(NodeStore nodeStore, String 
inferenceConfigPath, boolean isInferenceEnabled, boolean 
updateActiveInferenceConfig){
+        lock.writeLock().lock();
+        try {
+            if (updateActiveInferenceConfig) {
+                INSTANCE.activeInferenceConfig = getNewInferenceConfigId();
+            }
+            INSTANCE.currentInferenceConfig = INSTANCE.activeInferenceConfig;
+            INSTANCE.nodeStore = nodeStore;
+            INSTANCE.inferenceConfigPath = inferenceConfigPath;
+            INSTANCE.isInferenceEnabled = isInferenceEnabled;
+
+            if (!isValidInferenceConfig(nodeStore, inferenceConfigPath)) {
+                INSTANCE.enabled = false;
+                INSTANCE.indexConfigs = Map.of();
+            } else {
+                NodeState nodeState = nodeStore.getRoot();
+                for (String elem : PathUtils.elements(inferenceConfigPath)) {
+                    nodeState = nodeState.getChildNode(elem);
+                }
+                // Inference enabled or not.
+                INSTANCE.enabled = getOptionalValue(nodeState, 
InferenceConstants.ENABLED, false);
+                Map<String, InferenceIndexConfig> temp_indexConfigs = new 
HashMap<>();
+                // Read index configurations
+                for (String indexName : nodeState.getChildNodeNames()) {
+                    temp_indexConfigs.put(indexName, new 
InferenceIndexConfig(indexName, nodeState.getChildNode(indexName)));
+
+                }
+                INSTANCE.indexConfigs = 
Collections.unmodifiableMap(temp_indexConfigs);
+                //TODO Check if we we are also logging sensitive info.
+                LOG.info("Loaded inference configuration: " + INSTANCE);
+            }
+        } finally {
+            lock.writeLock().unlock();
+        }
+    }
+
+    public boolean isEnabled() {
+        lock.readLock().lock();
+        try {
+            return enabled;
+        } finally {
+            lock.readLock().unlock();
+        }
+    }
+
+    public @NotNull InferenceIndexConfig getInferenceIndexConfig(String 
indexName) {
+        lock.readLock().lock();
+        try {
+            if (!isEnabled()) {
+                return InferenceIndexConfig.NOOP;
+            } else {
+                InferenceIndexConfig inferenceIndexConfig;
+                IndexName indexNameObject;
+                Function<String, InferenceIndexConfig> getInferenceIndexConfig 
= (iName) ->
+                        getIndexConfigs().getOrDefault(iName, 
InferenceIndexConfig.NOOP);
+                if (!InferenceIndexConfig.NOOP.equals(inferenceIndexConfig = 
getInferenceIndexConfig.apply(indexName))) {
+                    LOG.debug("InferenceIndexConfig for indexName: {} is: {}", 
indexName, inferenceIndexConfig);
+                } else if ((indexNameObject = IndexName.parse(indexName)) != 
null && indexNameObject.isLegal()
+                        && indexNameObject.getBaseName() != null
+                ) {
+                    LOG.debug("InferenceIndexConfig is using baseIndexName {} 
and is: {}", indexNameObject.getBaseName(), inferenceIndexConfig);
+                    inferenceIndexConfig = 
getInferenceIndexConfig.apply(indexNameObject.getBaseName());
+                }
+                return inferenceIndexConfig.isEnabled() ? inferenceIndexConfig 
: InferenceIndexConfig.NOOP;
+            }
+        } finally {
+            lock.readLock().unlock();
+        }
+    }
+
+    public @NotNull InferenceModelConfig getInferenceModelConfig(String 
inferenceIndexName, String inferenceModelConfigName) {
+        lock.readLock().lock();
+        try {
+            if (inferenceModelConfigName == null){
+                return InferenceModelConfig.NOOP;
+            } else if (inferenceModelConfigName.isEmpty()) {
+                return 
getInferenceIndexConfig(inferenceIndexName).getDefaultEnabledModel();
+            } else {
+                return 
getInferenceIndexConfig(inferenceIndexName).getInferenceModelConfigs().getOrDefault(inferenceModelConfigName,
 InferenceModelConfig.NOOP);
+            }
+        } finally {
+            lock.readLock().unlock();
+        }
+
+    }
+
+    private @NotNull Map<String, InferenceIndexConfig> getIndexConfigs() {
+        lock.readLock().lock();
+        try {
+            return isEnabled() ?
+                    Collections.unmodifiableMap(indexConfigs) : Map.of();
+        } finally {
+            lock.readLock().unlock();
+        }
+    }
+
+    private static boolean isValidInferenceConfig(NodeStore nodeStore, String 
inferenceConfigPath) {
+
+        if (nodeStore == null) {
+            LOG.warn("InferenceConfig: NodeStore is null");
+            return false;
+        }
+        NodeState nodeState = nodeStore.getRoot();
+        if (inferenceConfigPath == null || inferenceConfigPath.isEmpty()) {
+            LOG.warn("InferenceConfig: Inference config path is null or 
empty");
+            return false;
+        }
+        for (String elem : PathUtils.elements(inferenceConfigPath)) {
+            nodeState = nodeState.getChildNode(elem);
+            if (!nodeState.exists()) {
+                LOG.warn("InferenceConfig: NodeState does not exist for path: 
" + inferenceConfigPath);
+                return false;
+            }
+        }
+        return getOptionalValue(nodeState, InferenceConstants.ENABLED, false);
+    }
+
+    private static String getNewInferenceConfigId() {
+        return UUID.randomUUID().toString();
+    }
+
+} 
\ No newline at end of file
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConstants.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConstants.java
new file mode 100644
index 0000000000..ff96ddb05d
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConstants.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+public interface InferenceConstants {
+    String ENABLED = "enabled";
+    String ENRICHER_CONFIG = "enricherConfig";
+    String TYPE = "type";
+    String DEFAULT_OAK_INDEX_INFERENCE_CONFIG_PATH = 
"/oak:index/:inferenceConfig";
+    String VECTOR_SPACES = ":vectorSpaces";
+    String VECTOR = "vector";
+    String ENRICH_NODE = ":enrich";
+    String ENRICH_STATUS = "status";
+    String ENRICH_STATUS_PENDING = "PENDING";
+    String ENRICH_STATUS_COMPLETED = "COMPLETED";
+    String ENRICH_STATUS_INFERENCE_DISABLED = "inferenceDisabled";
+    String DEFAULT_ENVIRONMENT_VARIABLE_PREFIX = "$";
+    String INFERENCE_ENVIRONMENT_VARIABLE_PREFIX = 
System.getProperty("org.apache.jackrabbit.oak.plugins.index.elastic.query.inference",
 DEFAULT_ENVIRONMENT_VARIABLE_PREFIX);
+    String DEFAULT_ENVIRONMENT_VARIABLE_VALUE = "";
+
+}
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceHeaderPayload.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceHeaderPayload.java
new file mode 100644
index 0000000000..53c387e20d
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceHeaderPayload.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import org.apache.jackrabbit.oak.json.JsonUtils;
+import 
org.apache.jackrabbit.oak.plugins.index.elastic.util.EnvironmentVariableProcessorUtil;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Configuration for inference payload
+ */
+public class InferenceHeaderPayload {
+    private static final Logger LOG = 
LoggerFactory.getLogger(InferenceHeaderPayload.class);
+
+    public static final InferenceHeaderPayload NOOP = new 
InferenceHeaderPayload();
+    private final Map<String, String> inferenceHeaderPayloadMap;
+
+    public InferenceHeaderPayload() {
+        this.inferenceHeaderPayloadMap = Map.of();
+    }
+
+    public InferenceHeaderPayload(NodeState nodeState) {
+        inferenceHeaderPayloadMap = JsonUtils.convertNodeStateToMap(nodeState, 
0, false)
+            .entrySet().stream().filter(entry -> entry.getValue() instanceof 
String)
+            .filter(entry -> !entry.getKey().equals("jcr:primaryType"))
+            .collect(HashMap::new, (map, entry) -> {
+                    String value = 
EnvironmentVariableProcessorUtil.processEnvironmentVariable(
+                        
InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX, (String) 
entry.getValue(), InferenceConstants.DEFAULT_ENVIRONMENT_VARIABLE_VALUE);
+                    map.put(entry.getKey(), value);
+                },
+                HashMap::putAll);
+    }
+
+    /*
+     * Get the inference payload as a json string
+     *
+     * @param text
+     * @return
+     */
+    public Map<String, String> getInferenceHeaderPayload() {
+        return inferenceHeaderPayloadMap;
+    }
+
+    @Override
+    public String toString() {
+        return inferenceHeaderPayloadMap.toString();
+    }
+
+} 
\ No newline at end of file
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceIndexConfig.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceIndexConfig.java
new file mode 100644
index 0000000000..a7243655ed
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceIndexConfig.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import org.apache.jackrabbit.oak.json.JsonUtils;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static 
org.apache.jackrabbit.oak.plugins.index.search.util.ConfigUtil.getOptionalValue;
+
+/**
+ * Configuration class for Inference Index settings.
+ * Represents the configuration structure for inference-enabled indexes.
+ */
+public class InferenceIndexConfig {
+    private final static Logger LOG = 
LoggerFactory.getLogger(InferenceIndexConfig.class.getName());
+    public static final InferenceIndexConfig NOOP = new InferenceIndexConfig();
+    public static final String TYPE = "inferenceIndexConfig";
+    public static final String ENRICHER_CONFIG = "enricherConfig";
+    public static final String DISABLED_ENRICHER_CONFIG = "";
+
+    /**
+     * The enricher configuration as JSON string.
+     */
+    private final String enricherConfig;
+    /**
+     * Indicates whether the inference index is enabled or not.
+     */
+    private boolean isEnabled;
+    /**
+     * Map of inference model configurations keyed by their names.
+     */
+    private final Map<String, InferenceModelConfig> inferenceModelConfigs;
+
+    private InferenceIndexConfig() {
+        this.enricherConfig = DISABLED_ENRICHER_CONFIG;
+        this.isEnabled = false;
+        this.inferenceModelConfigs = Map.of();
+    }
+
+    public InferenceIndexConfig(String indexName, NodeState nodeState) {
+        boolean isValidType = getOptionalValue(nodeState, 
InferenceConstants.TYPE, "").equals(InferenceIndexConfig.TYPE);
+        this.isEnabled = getOptionalValue(nodeState, 
InferenceConstants.ENABLED, false);
+        String enricherString = getOptionalValue(nodeState, 
InferenceConstants.ENRICHER_CONFIG, "");
+        boolean isValidEnricherConfig = JsonUtils.isValidJson(enricherString, 
false);
+
+        if (isValidType && isEnabled && isValidEnricherConfig) {
+            this.enricherConfig = enricherString;
+            Map<String, InferenceModelConfig> tempInferenceModelConfigs = new 
HashMap<>();
+            // Iterate through child nodes to find inference model configs
+            for (String childName : nodeState.getChildNodeNames()) {
+                NodeState childNode = nodeState.getChildNode(childName);
+                tempInferenceModelConfigs.put(childName, new 
InferenceModelConfig(childName, childNode));
+            }
+            inferenceModelConfigs = 
Collections.unmodifiableMap(tempInferenceModelConfigs);
+        } else {
+            this.isEnabled = false;
+            this.enricherConfig = getOptionalValue(nodeState, 
InferenceConstants.ENRICHER_CONFIG, DISABLED_ENRICHER_CONFIG);
+            inferenceModelConfigs = Map.of();
+            LOG.warn("inference index config for indexName: {} is not valid. 
Node: {}",
+                    indexName, nodeState);
+        }
+    }
+
+    /**
+     * @return The enricher configuration JSON string
+     */
+    public String getEnricherConfig() {
+        return enricherConfig;
+    }
+
+    public boolean isEnabled() {
+        return isEnabled;
+    }
+
+    /**
+     * @return Map of inference model configurations keyed by their names
+     */
+    public Map<String, InferenceModelConfig> getInferenceModelConfigs() {
+        return inferenceModelConfigs;
+    }
+
+    /**
+     * Gets the enabled default inference model configuration if one exists
+     *
+     * @return The default InferenceModelConfig.java or null if none is marked 
as default
+     */
+    public InferenceModelConfig getDefaultEnabledModel() {
+        return inferenceModelConfigs.values().stream()
+                .filter(InferenceModelConfig::isDefault)
+                .filter(InferenceModelConfig::isEnabled)
+                .findFirst()
+                .orElse(InferenceModelConfig.NOOP);
+    }
+
+    @Override
+    public String toString() {
+        return TYPE + "{" +
+                ENRICHER_CONFIG + "='" + enricherConfig + '\'' +
+                ", " + InferenceModelConfig.TYPE + "=" + inferenceModelConfigs 
+
+                '}';
+    }
+
+}
\ No newline at end of file
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceModelConfig.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceModelConfig.java
new file mode 100644
index 0000000000..e308cc71c8
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceModelConfig.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import 
org.apache.jackrabbit.oak.plugins.index.elastic.util.EnvironmentVariableProcessorUtil;
+import org.apache.jackrabbit.oak.spi.query.fulltext.InferenceQueryConfig;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static 
org.apache.jackrabbit.oak.plugins.index.search.util.ConfigUtil.getOptionalValue;
+
+/**
+ * Configuration class for Inference Model settings.
+ * Currently only hybrid search is implemented
+ */
+public class InferenceModelConfig {
+    private static final Logger log = 
LoggerFactory.getLogger(InferenceModelConfig.class);
+    private static final String DEFAULT_ENVIRONMENT_VARIABLE_VALUE = "";
+
+    public static final InferenceModelConfig NOOP = new InferenceModelConfig();
+    public static final String MODEL = "model";
+    public static final String EMBEDDING_SERVICE_URL = "embeddingServiceUrl";
+    public static final String SIMILARITY_THRESHOLD = "similarityThreshold";
+    public static final String INFERENCE_PAYLOAD = "inferencePayload";
+    // InferenceQueryConfig also uses InferenceModelConfig.TYPE so referencing
+    // it from InferenceQueryConfig.
+    public static final String TYPE = InferenceQueryConfig.TYPE;
+    public static final String MIN_TERMS = "minTerms";
+    public static final String IS_DEFAULT = "isDefault";
+    public static final String ENABLED = "enabled";
+    public static final String HEADER = "header";
+    public static final String TIMEOUT = "timeout";
+    public static final String NUM_CANDIDATES = "numCandidates";
+    public static final String CACHE_SIZE = "cacheSize";
+    private static final double DEFAULT_SIMILARITY_THRESHOLD = 0.8;
+    private static final long DEFAULT_MIN_TERMS = 2;
+    private static final long DEFAULT_TIMEOUT_MILLIS = 5000L;
+    private static final int DEFAULT_NUM_CANDIDATES = 100;
+    private static final int DEFAULT_CACHE_SIZE = 100;
+
+
+    private final String model;
+    private final String embeddingServiceUrl;
+    private final boolean isDefault;
+    private final boolean enabled;
+    private final InferenceHeaderPayload header;
+    private final InferencePayload payload;
+    private final String inferenceModelConfigName;
+    private final double similarityThreshold;
+    private final long minTerms;
+    //The number of candidates to be returned by the query. Default is 100.
+    private final int numCandidates;
+    //The timeout for the query in milliseconds. Default is 5000.
+    private final long timeout;
+    private final int cacheSize;
+
+
+    public InferenceModelConfig() {
+        this.isDefault = false;
+        this.enabled = false;
+
+        this.model = "";
+        this.embeddingServiceUrl = "";
+        this.header = InferenceHeaderPayload.NOOP;
+        this.payload = InferencePayload.NOOP;
+        this.inferenceModelConfigName = "";
+        this.similarityThreshold = 0.0;
+        this.minTerms = 0L;
+        this.numCandidates = 0;
+        this.timeout = 0;
+        this.cacheSize = DEFAULT_CACHE_SIZE;
+    }
+
+    public InferenceModelConfig(String inferenceModelConfigName, NodeState 
nodeState) {
+        this.inferenceModelConfigName = inferenceModelConfigName;
+        this.enabled = getOptionalValue(nodeState, InferenceConstants.ENABLED, 
false);
+        boolean isValidType = getOptionalValue(nodeState, 
InferenceConstants.TYPE, "").equals(InferenceModelConfig.TYPE);
+        if (this.enabled && isValidType) {
+            this.header = new 
InferenceHeaderPayload(nodeState.getChildNode(HEADER));
+            this.payload = new InferencePayload(inferenceModelConfigName, 
nodeState.getChildNode(INFERENCE_PAYLOAD));
+        } else {
+            this.header = InferenceHeaderPayload.NOOP;
+            this.payload = InferencePayload.NOOP;
+        }
+        this.isDefault = getOptionalValue(nodeState, IS_DEFAULT, false);
+        this.model = getOptionalValue(nodeState, MODEL, "");
+        this.embeddingServiceUrl = 
EnvironmentVariableProcessorUtil.processEnvironmentVariable(
+                InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX, 
getOptionalValue(nodeState, EMBEDDING_SERVICE_URL, ""), 
DEFAULT_ENVIRONMENT_VARIABLE_VALUE);
+        this.similarityThreshold = getOptionalValue(nodeState, 
SIMILARITY_THRESHOLD, DEFAULT_SIMILARITY_THRESHOLD);
+        this.minTerms = getOptionalValue(nodeState, MIN_TERMS, 
DEFAULT_MIN_TERMS);
+        this.timeout = getOptionalValue(nodeState, TIMEOUT, 
DEFAULT_TIMEOUT_MILLIS);
+        this.numCandidates = getOptionalValue(nodeState, NUM_CANDIDATES, 
DEFAULT_NUM_CANDIDATES);
+        this.cacheSize = getOptionalValue(nodeState, CACHE_SIZE, 100);
+    }
+
+    @Override
+    public String toString() {
+        return TYPE + "{" +
+                MODEL + "='" + model + '\'' +
+                ", " + EMBEDDING_SERVICE_URL + "='" + embeddingServiceUrl + 
'\'' +
+                ", " + SIMILARITY_THRESHOLD + similarityThreshold +
+                ", " + MIN_TERMS + "=" + minTerms +
+                ", " + IS_DEFAULT + "=" + isDefault +
+                ", " + ENABLED + "=" + enabled +
+                ", " + HEADER + "=" + header +
+                ", " + INFERENCE_PAYLOAD + "=" + payload +
+                ", " + TIMEOUT + "=" + timeout +
+                ", " + NUM_CANDIDATES + "=" + numCandidates +
+                "}";
+    }
+
+    public String getInferenceModelConfigName() {
+        return inferenceModelConfigName;
+    }
+
+    public String getModel() {
+        return model;
+    }
+
+    public String getEmbeddingServiceUrl() {
+        return embeddingServiceUrl;
+    }
+
+    public boolean isDefault() {
+        return isDefault;
+    }
+
+    public boolean isEnabled() {
+        return enabled;
+    }
+
+    public InferenceHeaderPayload getHeader() {
+        return header;
+    }
+
+    public InferencePayload getPayload() {
+        return payload;
+    }
+
+    public double getSimilarityThreshold() {
+        return similarityThreshold;
+    }
+
+    public long getMinTerms() {
+        return minTerms;
+    }
+
+    public int getNumCandidates() {
+        return numCandidates;
+    }
+
+    public long getTimeoutMillis() {
+        return timeout;
+    }
+
+    public int getCacheSize() {
+        return cacheSize;
+    }
+}
\ No newline at end of file
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferencePayload.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferencePayload.java
new file mode 100644
index 0000000000..ac230014a9
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferencePayload.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.jackrabbit.oak.json.JsonUtils;
+import 
org.apache.jackrabbit.oak.plugins.index.elastic.util.EnvironmentVariableProcessorUtil;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Configuration for inference payload. We support open-ai api standard
+ */
+public class InferencePayload {
+    public static final InferencePayload NOOP = new InferencePayload();
+    private static final Logger LOG = 
LoggerFactory.getLogger(InferencePayload.class);
+    private static final ObjectMapper objectMapper = new ObjectMapper();
+    private final Map<String, Object> inferencePayloadMap;
+    private final String inputTextKey = "input";
+
+    public InferencePayload() {
+        inferencePayloadMap = Map.of();
+    }
+
+    public InferencePayload(String inferenceModelName, NodeState nodeState) {
+        inferencePayloadMap = JsonUtils.convertNodeStateToMap(nodeState, 0, 
false);
+        inferencePayloadMap.remove("jcr:primaryType");
+        Map<String, String> swappedEnvVarsMap = 
inferencePayloadMap.entrySet().stream()
+            .filter(entry -> entry.getValue() instanceof String)
+            .collect(HashMap::new, (map, entry) -> {
+                    String value = 
EnvironmentVariableProcessorUtil.processEnvironmentVariable(
+                        
InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX, (String) 
entry.getValue(), InferenceConstants.DEFAULT_ENVIRONMENT_VARIABLE_VALUE);
+                    map.put(entry.getKey(), value);
+                },
+                HashMap::putAll);
+        //replace current keys with swapped
+        inferencePayloadMap.putAll(swappedEnvVarsMap);
+    }
+    /*
+     * Get the inference payload as a json string
+     *
+     * @param text
+     * @return
+     */
+    public String getInferencePayload(String text) {
+
+        // This creates a shallow copy - only the map structure is cloned but 
values are still references
+        Map<String, Object> inferencePayloadMapCopy = new 
HashMap<>(inferencePayloadMap);
+        inferencePayloadMapCopy.put(inputTextKey, List.of(text));
+        try {
+            return 
objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(inferencePayloadMapCopy);
+        } catch (JsonProcessingException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+} 
\ No newline at end of file
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/VectorDocument.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/VectorDocument.java
new file mode 100644
index 0000000000..2021bda439
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/VectorDocument.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import java.util.List;
+import java.util.Map;
+
+public class VectorDocument {
+
+    public static final String ID = "id";
+    public static final String VECTOR = "vector";
+    public static final String METADATA = "metadata";
+
+    public final String id;
+
+    public final List<Float> vector;
+
+    public final Map<String, Object> metadata;
+
+    public VectorDocument() {
+        this.id = null;
+        this.vector = null;
+        this.metadata = null;
+    }
+
+    public VectorDocument(String id, List<Float> vector, Map<String, Object> 
metadata) {
+        this.id = id;
+        this.vector = vector;
+        this.metadata = metadata;
+    }
+
+    @Override
+    public String toString() {
+        return "VectorDocument{" +
+                "id='" + id + '\'' +
+                ", vector=" + vector +
+                ", metadata=" + metadata +
+                '}';
+    }
+}
\ No newline at end of file
diff --git 
a/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/util/EnvironmentVariableProcessorUtil.java
 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/util/EnvironmentVariableProcessorUtil.java
new file mode 100644
index 0000000000..a44d5ef6cc
--- /dev/null
+++ 
b/oak-search-elastic/src/main/java/org/apache/jackrabbit/oak/plugins/index/elastic/util/EnvironmentVariableProcessorUtil.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.util;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class EnvironmentVariableProcessorUtil {
+    private static final Logger LOG = 
LoggerFactory.getLogger(EnvironmentVariableProcessorUtil.class);
+
+    public static String getEnvironmentVariable(String variableName) {
+        String value = System.getenv(variableName);
+        if (value == null) {
+            throw new IllegalArgumentException("Environment variable " + 
variableName + " not found");
+        }
+        return value;
+    }
+
+    public static String processEnvironmentVariable(String envVariablePrefix, 
String variableNameIncludingPrefix, String defaultValue) {
+        String value = null;
+        if (variableNameIncludingPrefix.startsWith(envVariablePrefix)) {
+            String variableName = 
variableNameIncludingPrefix.substring(envVariablePrefix.length());
+            value = System.getenv(variableName);
+        } else {
+            value = variableNameIncludingPrefix;
+        }
+        if (value == null) {
+            LOG.warn("EnvironmentVariable {} used in inference header is not 
set", variableNameIncludingPrefix);
+            return defaultValue;
+        }
+        return value;
+    }
+
+    public static String processEnvironmentVariable(String envVariablePrefix, 
String variableNameIncludingPrefix) {
+        return processEnvironmentVariable(envVariablePrefix, 
variableNameIncludingPrefix, null);
+    }
+
+}
diff --git 
a/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigTest.java
 
b/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigTest.java
new file mode 100644
index 0000000000..bd3902215f
--- /dev/null
+++ 
b/oak-search-elastic/src/test/java/org/apache/jackrabbit/oak/plugins/index/elastic/query/inference/InferenceConfigTest.java
@@ -0,0 +1,534 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.index.elastic.query.inference;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import joptsimple.internal.Strings;
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.commons.PathUtils;
+import 
org.apache.jackrabbit.oak.plugins.index.elastic.util.EnvironmentVariableProcessorUtil;
+import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState;
+import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeBuilder;
+import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeStore;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+public class InferenceConfigTest {
+
+    private static final Logger LOG = 
LoggerFactory.getLogger(InferenceConfigTest.class);
+    private static final String DEFAULT_CONFIG_PATH = 
InferenceConstants.DEFAULT_OAK_INDEX_INFERENCE_CONFIG_PATH;
+    private static final String ENRICHER_CONFIG = 
"{\"enricher\":{\"config\":{\"vectorSpaces\":{\"semantic\":{\"pipeline\":{\"steps\":[{\"inputFields\":{\"description\":\"STRING\",\"title\":\"STRING\"},\"chunkingConfig\":{\"enabled\":true},\"name\":\"sentence-embeddings\",\"model\":\"text-embedding-ada-002\",\"optional\":true,\"type\":\"embeddings\"}]},\"default\":false}},\"version\":\"0.0.1\"}}}";
+
+    private NodeBuilder rootBuilder;
+    private NodeStore nodeStore;
+
+    private final String AUTH_ENV_VARIABLE = "$Authorization";
+    private final String INFERENCE_SERVICE_URL_ENV_VARIABLE = 
"$inferenceServiceUrl";
+    private final String INFERENCE_PAYLOAD_MODEL = "$EMBEDDING_MODEL";
+
+    private boolean isAuthEnvVarDefined;
+    private boolean isInferenceUrlEnvVarDefined;
+    private boolean isInferencePayloadModelDefined;
+
+    @Before
+    public void setup() {
+        // Initialize memory node store
+        rootBuilder = new MemoryNodeBuilder(EmptyNodeState.EMPTY_NODE);
+        nodeStore = new MemoryNodeStore(rootBuilder.getNodeState());
+
+        isAuthEnvVarDefined = 
!EnvironmentVariableProcessorUtil.processEnvironmentVariable(
+                InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX, 
AUTH_ENV_VARIABLE, "").equals(Strings.EMPTY);
+        isInferenceUrlEnvVarDefined = 
!EnvironmentVariableProcessorUtil.processEnvironmentVariable(
+                InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX, 
INFERENCE_SERVICE_URL_ENV_VARIABLE, "").equals(Strings.EMPTY);
+        isInferencePayloadModelDefined = 
!EnvironmentVariableProcessorUtil.processEnvironmentVariable(
+            InferenceConstants.INFERENCE_ENVIRONMENT_VARIABLE_PREFIX, 
INFERENCE_PAYLOAD_MODEL, "").equals(Strings.EMPTY);
+    }
+
+    @After
+    public void tearDown() {
+        rootBuilder = null;
+        nodeStore = null;
+    }
+
+    /**
+     * Test 1: Basic test - Disabled InferenceConfig
+     * Verifies that when inference config is created but disabled, the 
InferenceConfig object reflects this state
+     */
+    @Test
+    public void testDisabledInferenceConfig() throws CommitFailedException {
+        // Create disabled inference config node structure
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, false);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object using the nodeStore
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertFalse("InferenceConfig should be disabled", 
inferenceConfig.isEnabled());
+    }
+
+    /**
+     * Test 2: Enabled InferenceConfig but no index configs
+     * Verifies that when an empty inference config is enabled, the 
InferenceConfig object reflects this state
+     */
+    @Test
+    public void testEnabledEmptyInferenceConfig() throws CommitFailedException 
{
+        // Create enabled inference config node structure
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object using the nodeStore
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+    }
+
+    /**
+     * Test 3: Basic InferenceIndexConfig creation
+     * Tests the creation of a simple InferenceIndexConfig within 
InferenceConfig
+     */
+    @Test
+    public void testBasicInferenceIndexConfig() throws CommitFailedException {
+        // Create enabled inference config with one index config
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // Add index config
+        String indexName = "testIndex";
+        NodeBuilder indexConfigBuilder = 
inferenceConfigBuilder.child(indexName);
+        indexConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+        indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+//        assertEquals("Should have one index config", 1, 
inferenceConfig.getIndexConfigs().size());
+        assertTrue("Should contain the index config", 
inferenceConfig.getInferenceIndexConfig(indexName).isEnabled());
+
+        InferenceIndexConfig indexConfig = 
inferenceConfig.getInferenceIndexConfig(indexName);
+        assertTrue("Index config should be enabled", indexConfig.isEnabled());
+        assertEquals("Enricher config should match", ENRICHER_CONFIG, 
indexConfig.getEnricherConfig());
+        assertTrue("Model configs should be empty", 
indexConfig.getInferenceModelConfigs().isEmpty());
+    }
+
+    /**
+     * Test 4: Disabled InferenceIndexConfig
+     * Tests that a disabled InferenceIndexConfig is properly handled
+     */
+    @Test
+    public void testDisabledInferenceIndexConfig() throws 
CommitFailedException, JsonProcessingException {
+        // Create enabled inference config with one disabled index config
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // Add disabled index config
+        String indexName = "testIndex";
+        NodeBuilder indexConfigBuilder = 
inferenceConfigBuilder.child(indexName);
+        indexConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        indexConfigBuilder.setProperty(InferenceConstants.ENABLED, false);
+        indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+
+        InferenceIndexConfig indexConfig = 
inferenceConfig.getInferenceIndexConfig(indexName);
+        assertFalse("Index config should be disabled", 
indexConfig.isEnabled());
+        // When indexConfig is disabled, we should get the NOOP instance
+        assertEquals("Should get NOOP instance", InferenceIndexConfig.NOOP, 
indexConfig);
+    }
+
+    /**
+     * Test 5: Invalid InferenceIndexConfig (missing type)
+     * Tests that an invalid InferenceIndexConfig (missing type) is properly 
handled
+     */
+    @Test
+    public void testInvalidInferenceIndexConfig() throws CommitFailedException 
{
+        // Create enabled inference config with one invalid index config 
(missing type)
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // Add invalid index config (missing type)
+        String indexName = "testIndex";
+        NodeBuilder indexConfigBuilder = 
inferenceConfigBuilder.child(indexName);
+        // Intentionally not setting the TYPE property
+        indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+        indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+
+        InferenceIndexConfig indexConfig = 
inferenceConfig.getInferenceIndexConfig(indexName);
+        assertFalse("Invalid index config should be treated as disabled", 
indexConfig.isEnabled());
+        assertEquals("Should get NOOP instance", InferenceIndexConfig.NOOP, 
indexConfig);
+    }
+
+    /**
+     * Test 6: Basic InferenceModelConfig
+     * Tests the creation of an InferenceModelConfig within an 
InferenceIndexConfig
+     */
+    @Test
+    public void testBasicInferenceModelConfig() throws CommitFailedException {
+        // Create enabled inference config with an index config containing a 
model config
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // Add index config
+        String indexName = "testIndex";
+        NodeBuilder indexConfigBuilder = 
inferenceConfigBuilder.child(indexName);
+        indexConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+        indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Add model config
+        String modelName = "testModel";
+        NodeBuilder modelConfigBuilder = indexConfigBuilder.child(modelName);
+        modelConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceModelConfig.TYPE);
+        modelConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+        modelConfigBuilder.setProperty(InferenceModelConfig.IS_DEFAULT, true);
+        modelConfigBuilder.setProperty(InferenceModelConfig.MODEL, 
"test-embedding-model");
+        
modelConfigBuilder.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL, 
"http://localhost:8080/embeddings";);
+        
modelConfigBuilder.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD, 0.8);
+        modelConfigBuilder.setProperty(InferenceModelConfig.MIN_TERMS, 3L);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+
+        InferenceIndexConfig indexConfig = 
inferenceConfig.getInferenceIndexConfig(indexName);
+        assertTrue("Index config should be enabled", indexConfig.isEnabled());
+        assertEquals("Should have one model config", 1, 
indexConfig.getInferenceModelConfigs().size());
+
+        InferenceModelConfig modelConfig = 
indexConfig.getInferenceModelConfigs().get(modelName);
+        assertNotNull("Model config should exist", modelConfig);
+        assertTrue("Model config should be enabled", modelConfig.isEnabled());
+        assertTrue("Model config should be default", modelConfig.isDefault());
+        assertEquals("Model name should match", "test-embedding-model", 
modelConfig.getModel());
+        assertEquals("Embedding service URL should match", 
"http://localhost:8080/embeddings";, modelConfig.getEmbeddingServiceUrl());
+        assertEquals("Similarity threshold should match", 0.8, 
modelConfig.getSimilarityThreshold(), 0.001);
+        assertEquals("Min terms should match", 3L, modelConfig.getMinTerms());
+    }
+
+    /**
+     * Test 7: Multiple InferenceModelConfigs with one default
+     * Tests multiple InferenceModelConfigs within an InferenceIndexConfig, 
with one marked as default
+     */
+    @Test
+    public void testMultipleInferenceModelConfigs() throws 
CommitFailedException {
+        // Create enabled inference config with an index config containing 
multiple model configs
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // Add index config
+        String indexName = "testIndex";
+        NodeBuilder indexConfigBuilder = 
inferenceConfigBuilder.child(indexName);
+        indexConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+        indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Add default model config
+        String defaultModelName = "defaultModel";
+        NodeBuilder defaultModelConfigBuilder = 
indexConfigBuilder.child(defaultModelName);
+        defaultModelConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceModelConfig.TYPE);
+        defaultModelConfigBuilder.setProperty(InferenceConstants.ENABLED, 
true);
+        defaultModelConfigBuilder.setProperty(InferenceModelConfig.IS_DEFAULT, 
true);
+        defaultModelConfigBuilder.setProperty(InferenceModelConfig.MODEL, 
"default-embedding-model");
+        
defaultModelConfigBuilder.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL,
 "http://localhost:8080/default-embeddings";);
+        
defaultModelConfigBuilder.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD,
 0.8);
+        defaultModelConfigBuilder.setProperty(InferenceModelConfig.MIN_TERMS, 
3L);
+
+        // Add non-default model config
+        String nonDefaultModelName = "nonDefaultModel";
+        NodeBuilder nonDefaultModelConfigBuilder = 
indexConfigBuilder.child(nonDefaultModelName);
+        nonDefaultModelConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceModelConfig.TYPE);
+        nonDefaultModelConfigBuilder.setProperty(InferenceConstants.ENABLED, 
true);
+        
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.IS_DEFAULT, 
false);
+        nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.MODEL, 
"non-default-embedding-model");
+        
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL,
 "http://localhost:8080/non-default-embeddings";);
+        
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD,
 0.7);
+        
nonDefaultModelConfigBuilder.setProperty(InferenceModelConfig.MIN_TERMS, 2L);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+
+        InferenceIndexConfig indexConfig = 
inferenceConfig.getInferenceIndexConfig(indexName);
+        assertTrue("Index config should be enabled", indexConfig.isEnabled());
+        assertEquals("Should have two model configs", 2, 
indexConfig.getInferenceModelConfigs().size());
+
+        // Verify default model config
+        InferenceModelConfig defaultModel = 
indexConfig.getDefaultEnabledModel();
+        assertNotNull("Default model should exist", defaultModel);
+        assertEquals("Default model name should match", defaultModelName, 
defaultModel.getInferenceModelConfigName());
+        assertTrue("Default model should be marked as default", 
defaultModel.isDefault());
+
+        // Verify non-default model config
+        InferenceModelConfig nonDefaultModel = 
indexConfig.getInferenceModelConfigs().get(nonDefaultModelName);
+        assertNotNull("Non-default model should exist", nonDefaultModel);
+        assertFalse("Non-default model should not be marked as default", 
nonDefaultModel.isDefault());
+    }
+
+    /**
+     * Test 8: Complete configuration with multiple indexes and models
+     * Tests a complex configuration with multiple indexes and models
+     */
+    @Test
+    public void testCompleteConfiguration() throws CommitFailedException {
+
+        assertTrue(isAuthEnvVarDefined && isInferenceUrlEnvVarDefined && 
isInferencePayloadModelDefined);
+        // Create enabled inference config with multiple index configs
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // First index config
+        String indexName1 = "testIndex1";
+        NodeBuilder indexConfigBuilder1 = 
inferenceConfigBuilder.child(indexName1);
+        indexConfigBuilder1.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        indexConfigBuilder1.setProperty(InferenceConstants.ENABLED, true);
+        indexConfigBuilder1.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Add model config to first index
+        String modelName1 = "testModel1";
+        NodeBuilder modelConfigBuilder1 = 
indexConfigBuilder1.child(modelName1);
+        modelConfigBuilder1.setProperty(InferenceConstants.TYPE, 
InferenceModelConfig.TYPE);
+        modelConfigBuilder1.setProperty(InferenceConstants.ENABLED, true);
+        modelConfigBuilder1.setProperty(InferenceModelConfig.IS_DEFAULT, true);
+        modelConfigBuilder1.setProperty(InferenceModelConfig.MODEL, "model1");
+        
modelConfigBuilder1.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL, 
INFERENCE_SERVICE_URL_ENV_VARIABLE);
+        
modelConfigBuilder1.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD, 0.8);
+        modelConfigBuilder1.setProperty(InferenceModelConfig.MIN_TERMS, 3L);
+
+        // Add header and payload for model1
+        NodeBuilder headerBuilder1 = 
modelConfigBuilder1.child(InferenceModelConfig.HEADER);
+        headerBuilder1.setProperty("Content-Type", "application/json");
+        headerBuilder1.setProperty("Authorization", AUTH_ENV_VARIABLE);
+        headerBuilder1.setProperty("jcr:primaryType", "nt:unstructured");
+
+        NodeBuilder payloadBuilder1 = 
modelConfigBuilder1.child(InferenceModelConfig.INFERENCE_PAYLOAD);
+        payloadBuilder1.setProperty("model", INFERENCE_PAYLOAD_MODEL);
+        payloadBuilder1.setProperty("jcr:primaryType", "nt:unstructured");
+
+        // Second index config
+        String indexName2 = "testIndex2";
+        NodeBuilder indexConfigBuilder2 = 
inferenceConfigBuilder.child(indexName2);
+        indexConfigBuilder2.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        indexConfigBuilder2.setProperty(InferenceConstants.ENABLED, true);
+        indexConfigBuilder2.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Add model config to second index
+        String modelName2 = "testModel2";
+        NodeBuilder modelConfigBuilder2 = 
indexConfigBuilder2.child(modelName2);
+        modelConfigBuilder2.setProperty(InferenceConstants.TYPE, 
InferenceModelConfig.TYPE);
+        modelConfigBuilder2.setProperty(InferenceConstants.ENABLED, true);
+        modelConfigBuilder2.setProperty(InferenceModelConfig.IS_DEFAULT, true);
+        modelConfigBuilder2.setProperty(InferenceModelConfig.MODEL, "model2");
+        
modelConfigBuilder2.setProperty(InferenceModelConfig.EMBEDDING_SERVICE_URL, 
INFERENCE_SERVICE_URL_ENV_VARIABLE);
+        
modelConfigBuilder2.setProperty(InferenceModelConfig.SIMILARITY_THRESHOLD, 0.7);
+        modelConfigBuilder2.setProperty(InferenceModelConfig.MIN_TERMS, 2L);
+        modelConfigBuilder2.setProperty(InferenceModelConfig.TIMEOUT, 10000L);
+        modelConfigBuilder2.setProperty(InferenceModelConfig.NUM_CANDIDATES, 
200);
+        modelConfigBuilder2.setProperty(InferenceModelConfig.CACHE_SIZE, 200);
+
+        // Add header and payload for model2
+        NodeBuilder headerBuilder2 = 
modelConfigBuilder2.child(InferenceModelConfig.HEADER);
+        headerBuilder2.setProperty("Content-Type", "application/json");
+        headerBuilder2.setProperty("Authorization", AUTH_ENV_VARIABLE);
+        headerBuilder2.setProperty("jcr:primaryType", "nt:unstructured");
+
+        NodeBuilder payloadBuilder2 = 
modelConfigBuilder2.child(InferenceModelConfig.INFERENCE_PAYLOAD);
+        payloadBuilder2.setProperty("model", "text-embedding-3-large");
+        payloadBuilder2.setProperty("dimensions", 1024);
+        payloadBuilder2.setProperty("jcr:primaryType", "nt:unstructured");
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify the state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+
+        // Test first index config
+        InferenceIndexConfig indexConfig1 = 
inferenceConfig.getInferenceIndexConfig(indexName1);
+        assertTrue("Index config 1 should be enabled", 
indexConfig1.isEnabled());
+        assertEquals("Index config 1 should have one model config", 1, 
indexConfig1.getInferenceModelConfigs().size());
+
+        // Test model config in first index
+        InferenceModelConfig modelConfig1 = 
indexConfig1.getInferenceModelConfigs().get(modelName1);
+        assertNotNull("Model config 1 should exist", modelConfig1);
+        assertTrue("Model config 1 should be enabled", 
modelConfig1.isEnabled());
+        assertTrue("Model config 1 should be default", 
modelConfig1.isDefault());
+        assertEquals("Model 1 name should match", "model1", 
modelConfig1.getModel());
+        assertEquals("Model 1 similarity threshold should match", 0.8, 
modelConfig1.getSimilarityThreshold(), 0.001);
+        assertFalse("Payload should not have jcr:primaryType property",
+            modelConfig1.getPayload().getInferencePayload("input 
text").contains("jcr:primaryType"));
+        assertFalse("Header Payload should not have jcr:primaryType property", 
modelConfig1.getHeader().getInferenceHeaderPayload().containsKey("jcr:primaryType"));
+        assertFalse("Model 1 payload model should not contain " + 
INFERENCE_PAYLOAD_MODEL, 
modelConfig1.getPayload().getInferencePayload("input-text").contains(INFERENCE_PAYLOAD_MODEL));
+
+        // Test second index config
+        InferenceIndexConfig indexConfig2 = 
inferenceConfig.getInferenceIndexConfig(indexName2);
+        assertTrue("Index config 2 should be enabled", 
indexConfig2.isEnabled());
+        assertEquals("Index config 2 should have one model config", 1, 
indexConfig2.getInferenceModelConfigs().size());
+
+        // Test model config in second index
+        InferenceModelConfig modelConfig2 = 
indexConfig2.getInferenceModelConfigs().get(modelName2);
+        assertNotNull("Model config 2 should exist", modelConfig2);
+        assertTrue("Model config 2 should be enabled", 
modelConfig2.isEnabled());
+        assertTrue("Model config 2 should be default", 
modelConfig2.isDefault());
+        assertEquals("Model 2 name should match", "model2", 
modelConfig2.getModel());
+        assertEquals("Model 2 similarity threshold should match", 0.7, 
modelConfig2.getSimilarityThreshold(), 0.001);
+        assertEquals("Model 2 timeout should match", 10000L, 
modelConfig2.getTimeoutMillis());
+        assertEquals("Model 2 num candidates should match", 200, 
modelConfig2.getNumCandidates());
+        assertEquals("Model 2 cache size should match", 200, 
modelConfig2.getCacheSize());
+        assertNotEquals("Model 2 embedding service URL should not match", 
INFERENCE_SERVICE_URL_ENV_VARIABLE, modelConfig2.getEmbeddingServiceUrl());
+        assertNotEquals("Model 2 embedding service URL should not match empty 
string", "", modelConfig2.getEmbeddingServiceUrl());
+        // this is picked from pom.xml during test
+        assertEquals("Model 2 embedding service URL should match", 
"http://localhost:8080/embeddings";, modelConfig2.getEmbeddingServiceUrl());
+        assertFalse("Payload should not have jcr:primaryType property",
+            modelConfig2.getPayload().getInferencePayload("input 
text").contains("jcr:primaryType"));
+        assertFalse("Header Payload should not have jcr:primaryType property", 
modelConfig2.getHeader().getInferenceHeaderPayload().containsKey("jcr:primaryType"));
+
+    }
+
+    /**
+     * Test 9: Test refreshConfig method
+     * Tests that the refreshConfig method properly updates the configuration
+     */
+    @Test
+    public void testRefreshConfig() throws CommitFailedException {
+        // Create initial enabled inference config with one index
+        NodeBuilder inferenceConfigBuilder = createNodePath(rootBuilder, 
DEFAULT_CONFIG_PATH);
+        inferenceConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceConfig.TYPE);
+        inferenceConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+
+        // Add index config
+        String indexName = "testIndex";
+        NodeBuilder indexConfigBuilder = 
inferenceConfigBuilder.child(indexName);
+        indexConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        indexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+        indexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Commit the changes
+        nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+
+        // Create InferenceConfig object
+        InferenceConfig.reInitialize(nodeStore, DEFAULT_CONFIG_PATH, true);
+        InferenceConfig inferenceConfig = InferenceConfig.getInstance();
+
+        // Verify initial state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+
+        // Modify the configuration in the nodeStore
+        NodeBuilder updatedRootBuilder = nodeStore.getRoot().builder();
+        NodeBuilder updatedConfigBuilder = createNodePath(updatedRootBuilder, 
DEFAULT_CONFIG_PATH);
+
+        // Add a new index config
+        String newIndexName = "newIndex";
+        NodeBuilder newIndexConfigBuilder = 
updatedConfigBuilder.child(newIndexName);
+        newIndexConfigBuilder.setProperty(InferenceConstants.TYPE, 
InferenceIndexConfig.TYPE);
+        newIndexConfigBuilder.setProperty(InferenceConstants.ENABLED, true);
+        newIndexConfigBuilder.setProperty(InferenceConstants.ENRICHER_CONFIG, 
ENRICHER_CONFIG);
+
+        // Commit the changes
+        nodeStore.merge(updatedRootBuilder, EmptyHook.INSTANCE, 
CommitInfo.EMPTY);
+        assertFalse("Should not have new index config", 
inferenceConfig.getInferenceIndexConfig(newIndexName).isEnabled());
+
+        // Refresh the InferenceConfig
+        InferenceConfig.reInitialize();
+        // Verify updated state
+        assertTrue("InferenceConfig should be enabled", 
inferenceConfig.isEnabled());
+        assertTrue("Should contain the new index config", 
inferenceConfig.getInferenceIndexConfig(newIndexName).isEnabled());
+    }
+
+    /**
+     * Utility method to create a path of nodes
+     */
+    private NodeBuilder createNodePath(NodeBuilder rootBuilder, String path) {
+        NodeBuilder builder = rootBuilder;
+        for (String elem : PathUtils.elements(path)) {
+            builder = builder.child(elem);
+        }
+        return builder;
+    }
+} 
\ No newline at end of file

Reply via email to