This is an automated email from the ASF dual-hosted git repository.

martinzink pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi-minifi-cpp.git

commit e2de4ca723be926301c9be0c18f55bfe7e70bbb9
Author: Gabor Gyimesi <[email protected]>
AuthorDate: Tue Mar 10 13:11:04 2026 +0100

    MINIFICPP-2686 Move Prometheus tests to modular docker tests
    
    This closes #2092
    
    Signed-off-by: Martin Zink <[email protected]>
---
 .../containers/minifi_container.py                 |   8 +-
 .../minifi_test_framework/steps/checking_steps.py  |   2 -
 .../steps/configuration_steps.py                   |  18 ++-
 .../src/minifi_test_framework/steps/core_steps.py  |  10 --
 docker/RunBehaveTests.sh                           |   3 +-
 docker/requirements.txt                            |   1 -
 docker/test/integration/cluster/ContainerStore.py  |  24 ----
 .../test/integration/cluster/DockerTestCluster.py  |  17 ---
 .../cluster/checkers/PrometheusChecker.py          | 136 ------------------
 .../cluster/containers/MinifiContainer.py          |  17 ---
 .../cluster/containers/PrometheusContainer.py      | 100 -------------
 .../features/MiNiFi_integration_test_driver.py     |  15 --
 docker/test/integration/features/steps/steps.py    |  37 -----
 .../prometheus/tests/features/environment.py       |  43 ++++++
 .../prometheus/tests}/features/prometheus.feature  |   8 +-
 .../tests/features/resources/prometheus_checker.py | 156 +++++++++++++++++++++
 .../tests/features/steps/prometheus_container.py   |  92 ++++++++++++
 .../prometheus/tests/features/steps/steps.py       |  73 ++++++++++
 18 files changed, 390 insertions(+), 370 deletions(-)

diff --git 
a/behave_framework/src/minifi_test_framework/containers/minifi_container.py 
b/behave_framework/src/minifi_test_framework/containers/minifi_container.py
index 8d2b5f992..4fc936a9b 100644
--- a/behave_framework/src/minifi_test_framework/containers/minifi_container.py
+++ b/behave_framework/src/minifi_test_framework/containers/minifi_container.py
@@ -14,7 +14,6 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 #
-
 import logging
 import os
 from pathlib import Path
@@ -42,6 +41,8 @@ class MinifiContainer(Container):
         self.files.append(File("/tmp/resources/root_ca.crt", 
crypto.dump_certificate(type=crypto.FILETYPE_PEM, 
cert=test_context.root_ca_cert)))
         self.files.append(File("/tmp/resources/minifi_client.crt", 
crypto.dump_certificate(type=crypto.FILETYPE_PEM, cert=minifi_client_cert)))
         self.files.append(File("/tmp/resources/minifi_client.key", 
crypto.dump_privatekey(type=crypto.FILETYPE_PEM, pkey=minifi_client_key)))
+        self.files.append(File("/tmp/resources/minifi_merged_cert.crt",
+                               
crypto.dump_certificate(type=crypto.FILETYPE_PEM, cert=minifi_client_cert) + 
crypto.dump_privatekey(type=crypto.FILETYPE_PEM, pkey=minifi_client_key)))
 
         clientuser_cert, clientuser_key = make_client_cert("clientuser", 
ca_cert=test_context.root_ca_cert, ca_key=test_context.root_ca_key)
         self.files.append(File("/tmp/resources/clientuser.crt", 
crypto.dump_certificate(type=crypto.FILETYPE_PEM, cert=clientuser_cert)))
@@ -97,6 +98,11 @@ class MinifiContainer(Container):
     def enable_openssl_fips_mode(self):
         self.properties["nifi.openssl.fips.support.enable"] = "true"
 
+    def enable_log_metrics_publisher(self):
+        self.properties["nifi.metrics.publisher.LogMetricsPublisher.metrics"] 
= "RepositoryMetrics"
+        
self.properties["nifi.metrics.publisher.LogMetricsPublisher.logging.interval"] 
= "1s"
+        self.properties["nifi.metrics.publisher.class"] = "LogMetricsPublisher"
+
     def fetch_flow_config_from_flow_url(self):
         self.properties["nifi.c2.flow.url"] = 
f"http://minifi-c2-server-{self.scenario_id}:10090/c2/config?class=minifi-test-class";
 
diff --git a/behave_framework/src/minifi_test_framework/steps/checking_steps.py 
b/behave_framework/src/minifi_test_framework/steps/checking_steps.py
index 2538953d7..7c546d968 100644
--- a/behave_framework/src/minifi_test_framework/steps/checking_steps.py
+++ b/behave_framework/src/minifi_test_framework/steps/checking_steps.py
@@ -14,8 +14,6 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 #
-
-
 import time
 import re
 
diff --git 
a/behave_framework/src/minifi_test_framework/steps/configuration_steps.py 
b/behave_framework/src/minifi_test_framework/steps/configuration_steps.py
index a9794936f..ee697a32c 100644
--- a/behave_framework/src/minifi_test_framework/steps/configuration_steps.py
+++ b/behave_framework/src/minifi_test_framework/steps/configuration_steps.py
@@ -15,7 +15,7 @@
 #  limitations under the License.
 #
 
-from behave import step
+from behave import step, given
 
 from minifi_test_framework.core.minifi_test_context import MinifiTestContext
 
@@ -26,12 +26,20 @@ def step_impl(context: MinifiTestContext, config_key: str, 
config_value: str):
 
 
 @step("log metrics publisher is enabled in MiNiFi")
-def step_impl(context):
-    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.LogMetricsPublisher.metrics",
 "RepositoryMetrics")
-    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.LogMetricsPublisher.logging.interval",
 "1s")
-    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.class",
 "LogMetricsPublisher")
+def step_impl(context: MinifiTestContext):
+    
context.get_or_create_default_minifi_container().enable_log_metrics_publisher()
 
 
 @step('log property "{log_property_key}" is set to "{log_property_value}"')
 def step_impl(context: MinifiTestContext, log_property_key: str, 
log_property_value: str):
     
context.get_or_create_default_minifi_container().set_log_property(log_property_key,
 log_property_value)
+
+
+@given("OpenSSL FIPS mode is enabled in MiNiFi")
+def step_impl(context: MinifiTestContext):
+    context.get_or_create_default_minifi_container().enable_openssl_fips_mode()
+
+
+@given("flow configuration path is set up in flow url property")
+def step_impl(context: MinifiTestContext):
+    
context.get_or_create_default_minifi_container().fetch_flow_config_from_flow_url()
diff --git a/behave_framework/src/minifi_test_framework/steps/core_steps.py 
b/behave_framework/src/minifi_test_framework/steps/core_steps.py
index b332b7e46..9401056f5 100644
--- a/behave_framework/src/minifi_test_framework/steps/core_steps.py
+++ b/behave_framework/src/minifi_test_framework/steps/core_steps.py
@@ -136,11 +136,6 @@ def step_impl(context: MinifiTestContext, container_name: 
str):
     context.get_or_create_minifi_container(container_name).kill()
 
 
-@given("OpenSSL FIPS mode is enabled in MiNiFi")
-def step_impl(context: MinifiTestContext):
-    context.get_or_create_default_minifi_container().enable_openssl_fips_mode()
-
-
 @step("the http proxy server is set up")
 def step_impl(context: MinifiTestContext):
     context.containers["http-proxy"] = HttpProxy(context)
@@ -151,11 +146,6 @@ def step_impl(context: MinifiTestContext):
     context.containers["nifi"] = NifiContainer(context)
 
 
-@given("flow configuration path is set up in flow url property")
-def step_impl(context: MinifiTestContext):
-    
context.get_or_create_default_minifi_container().fetch_flow_config_from_flow_url()
-
-
 @step("{duration} later")
 def step_impl(context: MinifiTestContext, duration: str):
     time.sleep(humanfriendly.parse_timespan(duration))
diff --git a/docker/RunBehaveTests.sh b/docker/RunBehaveTests.sh
index f982e3c6a..e4f6cda9f 100755
--- a/docker/RunBehaveTests.sh
+++ b/docker/RunBehaveTests.sh
@@ -208,4 +208,5 @@ exec \
     "${docker_dir}/../extensions/grafana-loki/tests/features" \
     "${docker_dir}/../extensions/lua/tests/features/" \
     "${docker_dir}/../extensions/civetweb/tests/features/" \
-    "${docker_dir}/../extensions/mqtt/tests/features/"
+    "${docker_dir}/../extensions/mqtt/tests/features/" \
+    "${docker_dir}/../extensions/prometheus/tests/features/"
diff --git a/docker/requirements.txt b/docker/requirements.txt
index 0092b5e25..a96d59bd6 100644
--- a/docker/requirements.txt
+++ b/docker/requirements.txt
@@ -7,6 +7,5 @@ m2crypto==0.46.2
 watchdog==6.0.0
 pyopenssl==25.0.0
 azure-storage-blob==12.24.1
-prometheus-api-client==0.5.5
 humanfriendly==10.0
 requests<2.29  # https://github.com/docker/docker-py/issues/3113
diff --git a/docker/test/integration/cluster/ContainerStore.py 
b/docker/test/integration/cluster/ContainerStore.py
index 4b1b69a29..acf1dd82a 100644
--- a/docker/test/integration/cluster/ContainerStore.py
+++ b/docker/test/integration/cluster/ContainerStore.py
@@ -24,7 +24,6 @@ from .containers.PostgreSQLServerContainer import 
PostgreSQLServerContainer
 from .containers.SyslogUdpClientContainer import SyslogUdpClientContainer
 from .containers.SyslogTcpClientContainer import SyslogTcpClientContainer
 from .containers.MinifiAsPodInKubernetesCluster import 
MinifiAsPodInKubernetesCluster
-from .containers.PrometheusContainer import PrometheusContainer
 from .FeatureContext import FeatureContext
 
 
@@ -145,23 +144,6 @@ class ContainerStore:
                                                   network=self.network,
                                                   image_store=self.image_store,
                                                   command=command))
-        elif engine == "prometheus":
-            return self.containers.setdefault(container_name,
-                                              
PrometheusContainer(feature_context=feature_context,
-                                                                  
name=container_name,
-                                                                  
vols=self.vols,
-                                                                  
network=self.network,
-                                                                  
image_store=self.image_store,
-                                                                  
command=command))
-        elif engine == "prometheus-ssl":
-            return self.containers.setdefault(container_name,
-                                              
PrometheusContainer(feature_context=feature_context,
-                                                                  
name=container_name,
-                                                                  
vols=self.vols,
-                                                                  
network=self.network,
-                                                                  
image_store=self.image_store,
-                                                                  
command=command,
-                                                                  ssl=True))
         else:
             raise Exception('invalid flow engine: \'%s\'' % engine)
 
@@ -206,12 +188,6 @@ class ContainerStore:
     def set_ssl_context_properties_in_minifi(self):
         self.minifi_options.set_ssl_context_properties = True
 
-    def enable_prometheus_in_minifi(self):
-        self.minifi_options.enable_prometheus = True
-
-    def enable_prometheus_with_ssl_in_minifi(self):
-        self.minifi_options.enable_prometheus_with_ssl = True
-
     def enable_sql_in_minifi(self):
         self.minifi_options.enable_sql = True
 
diff --git a/docker/test/integration/cluster/DockerTestCluster.py 
b/docker/test/integration/cluster/DockerTestCluster.py
index 006aa97c5..9f95afacf 100644
--- a/docker/test/integration/cluster/DockerTestCluster.py
+++ b/docker/test/integration/cluster/DockerTestCluster.py
@@ -21,7 +21,6 @@ from .ContainerStore import ContainerStore
 from .DockerCommunicator import DockerCommunicator
 from .checkers.AzureChecker import AzureChecker
 from .checkers.PostgresChecker import PostgresChecker
-from .checkers.PrometheusChecker import PrometheusChecker
 from .checkers.ModbusChecker import ModbusChecker
 from utils import get_peak_memory_usage, get_minifi_pid, get_memory_usage
 
@@ -34,7 +33,6 @@ class DockerTestCluster:
         self.container_store = 
ContainerStore(self.container_communicator.create_docker_network(feature_id), 
context.image_store, context.kubernetes_proxy, feature_id=feature_id)
         self.azure_checker = AzureChecker(self.container_communicator)
         self.postgres_checker = PostgresChecker(self.container_communicator)
-        self.prometheus_checker = PrometheusChecker()
         self.modbus_checker = ModbusChecker(self.container_communicator)
 
     def cleanup(self):
@@ -79,12 +77,6 @@ class DockerTestCluster:
     def set_ssl_context_properties_in_minifi(self):
         self.container_store.set_ssl_context_properties_in_minifi()
 
-    def enable_prometheus_in_minifi(self):
-        self.container_store.enable_prometheus_in_minifi()
-
-    def enable_prometheus_with_ssl_in_minifi(self):
-        self.container_store.enable_prometheus_with_ssl_in_minifi()
-
     def enable_openssl_fips_mode_in_minifi(self):
         self.container_store.enable_openssl_fips_mode_in_minifi()
 
@@ -196,15 +188,6 @@ class DockerTestCluster:
     def segfault_happened(self):
         return self.segfault
 
-    def wait_for_metric_class_on_prometheus(self, metric_class, 
timeout_seconds):
-        return 
self.prometheus_checker.wait_for_metric_class_on_prometheus(metric_class, 
timeout_seconds)
-
-    def wait_for_processor_metric_on_prometheus(self, metric_class, 
timeout_seconds, processor_name):
-        return 
self.prometheus_checker.wait_for_processor_metric_on_prometheus(metric_class, 
timeout_seconds, processor_name)
-
-    def verify_all_metric_types_are_defined_once(self):
-        return 
self.prometheus_checker.verify_all_metric_types_are_defined_once()
-
     def check_minifi_log_matches_regex(self, regex, timeout_seconds=60, 
count=1):
         for container_name in 
self.container_store.get_container_names("minifi-cpp"):
             line_found = self.wait_for_app_logs_regex(container_name, regex, 
timeout_seconds, count)
diff --git a/docker/test/integration/cluster/checkers/PrometheusChecker.py 
b/docker/test/integration/cluster/checkers/PrometheusChecker.py
deleted file mode 100644
index d73859f58..000000000
--- a/docker/test/integration/cluster/checkers/PrometheusChecker.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import requests
-from prometheus_api_client import PrometheusConnect
-from utils import wait_for
-
-
-class PrometheusChecker:
-    def __init__(self):
-        self.prometheus_client = 
PrometheusConnect(url="http://localhost:9090";, disable_ssl=True)
-
-    def wait_for_metric_class_on_prometheus(self, metric_class, 
timeout_seconds):
-        return wait_for(lambda: self.verify_metric_class(metric_class), 
timeout_seconds)
-
-    def wait_for_processor_metric_on_prometheus(self, metric_class, 
timeout_seconds, processor_name):
-        return wait_for(lambda: self.verify_processor_metric(metric_class, 
processor_name), timeout_seconds)
-
-    def verify_processor_metric(self, metric_class, processor_name):
-        if metric_class == "GetFileMetrics":
-            return self.verify_getfile_metrics(metric_class, processor_name)
-        else:
-            return self.verify_general_processor_metrics(metric_class, 
processor_name)
-
-    def verify_metric_class(self, metric_class):
-        if metric_class == "RepositoryMetrics":
-            return self.verify_repository_metrics()
-        elif metric_class == "QueueMetrics":
-            return self.verify_queue_metrics()
-        elif metric_class == "FlowInformation":
-            return self.verify_flow_information_metrics()
-        elif metric_class == "DeviceInfoNode":
-            return self.verify_device_info_node_metrics()
-        elif metric_class == "AgentStatus":
-            return self.verify_agent_status_metrics()
-        else:
-            raise Exception("Metric class '%s' verification is not 
implemented" % metric_class)
-
-    def verify_repository_metrics(self):
-        label_list = [{'repository_name': 'provenance'}, {'repository_name': 
'flowfile'}, {'repository_name': 'content'}]
-        # Only flowfile and content repositories are using rocksdb by default, 
so rocksdb specific metrics are only present there
-        return all((self.verify_metrics_exist(['minifi_is_running', 
'minifi_is_full', 'minifi_repository_size_bytes', 
'minifi_max_repository_size_bytes', 'minifi_repository_entry_count'], 
'RepositoryMetrics', labels) for labels in label_list)) and \
-            
all((self.verify_metric_larger_than_zero('minifi_repository_size_bytes', 
'RepositoryMetrics', labels) for labels in label_list[1:3])) and \
-            
all((self.verify_metrics_exist(['minifi_rocksdb_table_readers_size_bytes', 
'minifi_rocksdb_all_memory_tables_size_bytes'], 'RepositoryMetrics', labels) 
for labels in label_list[1:3]))
-
-    def verify_queue_metrics(self):
-        return self.verify_metrics_exist(['minifi_queue_data_size', 
'minifi_queue_data_size_max', 'minifi_queue_size', 'minifi_queue_size_max'], 
'QueueMetrics')
-
-    def verify_general_processor_metrics(self, metric_class, processor_name):
-        labels = {'processor_name': processor_name}
-        return 
self.verify_metrics_exist(['minifi_average_onTrigger_runtime_milliseconds', 
'minifi_last_onTrigger_runtime_milliseconds',
-                                          
'minifi_average_session_commit_runtime_milliseconds', 
'minifi_last_session_commit_runtime_milliseconds',
-                                          'minifi_incoming_flow_files', 
'minifi_incoming_bytes', 'minifi_bytes_read', 'minifi_bytes_written'], 
metric_class, labels) and \
-            
self.verify_metrics_larger_than_zero(['minifi_onTrigger_invocations', 
'minifi_transferred_flow_files', 'minifi_transferred_to_success',
-                                                  'minifi_transferred_bytes', 
'minifi_processing_nanos'],
-                                                 metric_class, labels)
-
-    def verify_getfile_metrics(self, metric_class, processor_name):
-        labels = {'processor_name': processor_name}
-        return self.verify_general_processor_metrics(metric_class, 
processor_name) and \
-            self.verify_metrics_exist(['minifi_input_bytes', 
'minifi_accepted_files'], metric_class, labels)
-
-    def verify_flow_information_metrics(self):
-        return self.verify_metrics_exist(['minifi_queue_data_size', 
'minifi_queue_data_size_max', 'minifi_queue_size', 'minifi_queue_size_max',
-                                          'minifi_bytes_read', 
'minifi_bytes_written', 'minifi_flow_files_in', 'minifi_flow_files_out', 
'minifi_bytes_in', 'minifi_bytes_out',
-                                          'minifi_invocations', 
'minifi_processing_nanos'], 'FlowInformation') and \
-            self.verify_metric_exists('minifi_is_running', 'FlowInformation', 
{'component_name': 'FlowController'})
-
-    def verify_device_info_node_metrics(self):
-        return self.verify_metrics_exist(['minifi_physical_mem', 
'minifi_memory_usage', 'minifi_cpu_utilization', 'minifi_cpu_load_average'], 
'DeviceInfoNode')
-
-    def verify_agent_status_metrics(self):
-        label_list = [{'repository_name': 'flowfile'}, {'repository_name': 
'content'}]
-        # Only flowfile and content repositories are using rocksdb by default, 
so rocksdb specific metrics are only present there
-        for labels in label_list:
-            if not (self.verify_metric_exists('minifi_is_running', 
'AgentStatus', labels)
-                    and self.verify_metric_exists('minifi_is_full', 
'AgentStatus', labels)
-                    and 
self.verify_metric_exists('minifi_max_repository_size_bytes', 'AgentStatus', 
labels)
-                    and 
self.verify_metric_larger_than_zero('minifi_repository_size_bytes', 
'AgentStatus', labels)
-                    and 
self.verify_metric_exists('minifi_repository_entry_count', 'AgentStatus', 
labels)
-                    and 
self.verify_metric_exists('minifi_rocksdb_table_readers_size_bytes', 
'AgentStatus', labels)
-                    and 
self.verify_metric_exists('minifi_rocksdb_all_memory_tables_size_bytes', 
'AgentStatus', labels)):
-                return False
-
-        # provenance repository is NoOpRepository by default which has zero 
size
-        if not (self.verify_metric_exists('minifi_is_running', 'AgentStatus', 
{'repository_name': 'provenance'})
-                and self.verify_metric_exists('minifi_is_full', 'AgentStatus', 
{'repository_name': 'provenance'})
-                and 
self.verify_metric_exists('minifi_max_repository_size_bytes', 'AgentStatus', 
{'repository_name': 'provenance'})
-                and self.verify_metric_exists('minifi_repository_size_bytes', 
'AgentStatus', {'repository_name': 'provenance'})
-                and self.verify_metric_exists('minifi_repository_entry_count', 
'AgentStatus', {'repository_name': 'provenance'})):
-            return False
-        return self.verify_metric_exists('minifi_uptime_milliseconds', 
'AgentStatus') and \
-            self.verify_metric_exists('minifi_agent_memory_usage_bytes', 
'AgentStatus') and \
-            self.verify_metric_exists('minifi_agent_cpu_utilization', 
'AgentStatus')
-
-    def verify_metric_exists(self, metric_name, metric_class, labels={}):
-        labels['metric_class'] = metric_class
-        labels['agent_identifier'] = "Agent1"
-        return 
len(self.prometheus_client.get_current_metric_value(metric_name=metric_name, 
label_config=labels)) > 0
-
-    def verify_metrics_exist(self, metric_names, metric_class, labels={}):
-        return all((self.verify_metric_exists(metric_name, metric_class, 
labels) for metric_name in metric_names))
-
-    def verify_metric_larger_than_zero(self, metric_name, metric_class, 
labels={}):
-        labels['metric_class'] = metric_class
-        result = 
self.prometheus_client.get_current_metric_value(metric_name=metric_name, 
label_config=labels)
-        return len(result) > 0 and int(result[0]['value'][1]) > 0
-
-    def verify_metrics_larger_than_zero(self, metric_names, metric_class, 
labels={}):
-        return all((self.verify_metric_larger_than_zero(metric_name, 
metric_class, labels) for metric_name in metric_names))
-
-    def verify_all_metric_types_are_defined_once(self):
-        response = requests.get("http://127.0.0.1:9936/metrics";)
-        if response.status_code < 200 or response.status_code >= 300:
-            return False
-
-        metric_types = set()
-        for line in response.text.split("\n"):
-            if line.startswith("# TYPE"):
-                metric_type = line.split(" ")[2]
-                if metric_type in metric_types:
-                    return False
-                metric_types.add(metric_type)
-
-        return True
diff --git a/docker/test/integration/cluster/containers/MinifiContainer.py 
b/docker/test/integration/cluster/containers/MinifiContainer.py
index fcb73c0a4..e3675ac81 100644
--- a/docker/test/integration/cluster/containers/MinifiContainer.py
+++ b/docker/test/integration/cluster/containers/MinifiContainer.py
@@ -28,8 +28,6 @@ from minifi.flow_serialization.Minifi_flow_json_serializer 
import Minifi_flow_js
 class MinifiOptions:
     def __init__(self):
         self.enable_provenance = False
-        self.enable_prometheus = False
-        self.enable_prometheus_with_ssl = False
         self.enable_sql = False
         self.use_nifi_python_processors_with_system_python_packages_installed 
= False
         self.use_nifi_python_processors_with_virtualenv = False
@@ -140,16 +138,6 @@ class MinifiContainer(FlowContainer):
                 
f.write("nifi.provenance.repository.class.name=NoOpRepository\n")
 
             metrics_publisher_classes = []
-            if self.options.enable_prometheus or 
self.options.enable_prometheus_with_ssl:
-                f.write("nifi.metrics.publisher.agent.identifier=Agent1\n")
-                
f.write("nifi.metrics.publisher.PrometheusMetricsPublisher.port=9936\n")
-                
f.write("nifi.metrics.publisher.PrometheusMetricsPublisher.metrics=RepositoryMetrics,QueueMetrics,PutFileMetrics,processorMetrics/Get.*,FlowInformation,DeviceInfoNode,AgentStatus\n")
-                metrics_publisher_classes.append("PrometheusMetricsPublisher")
-
-            if self.options.enable_prometheus_with_ssl:
-                
f.write("nifi.metrics.publisher.PrometheusMetricsPublisher.certificate=/tmp/resources/minifi_merged_cert.crt\n")
-                
f.write("nifi.metrics.publisher.PrometheusMetricsPublisher.ca.certificate=/tmp/resources/root_ca.crt\n")
-
             if self.options.enable_log_metrics_publisher:
                 
f.write("nifi.metrics.publisher.LogMetricsPublisher.metrics=RepositoryMetrics\n")
                 
f.write("nifi.metrics.publisher.LogMetricsPublisher.logging.interval=1s\n")
@@ -202,16 +190,11 @@ class MinifiContainer(FlowContainer):
         else:
             image = 'apacheminificpp:' + MinifiContainer.MINIFI_TAG_PREFIX + 
MinifiContainer.MINIFI_VERSION
 
-        ports = {}
-        if self.options.enable_prometheus or 
self.options.enable_prometheus_with_ssl:
-            ports = {'9936/tcp': 9936}
-
         self.client.containers.run(
             image,
             detach=True,
             name=self.name,
             network=self.network.name,
             entrypoint=self.command,
-            ports=ports,
             volumes=self.vols)
         logging.info('Added container \'%s\'', self.name)
diff --git a/docker/test/integration/cluster/containers/PrometheusContainer.py 
b/docker/test/integration/cluster/containers/PrometheusContainer.py
deleted file mode 100644
index bc1da33cb..000000000
--- a/docker/test/integration/cluster/containers/PrometheusContainer.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import tempfile
-import docker.types
-
-from .Container import Container
-from OpenSSL import crypto
-from ssl_utils.SSL_cert_utils import make_cert_without_extended_usage
-
-
-class PrometheusContainer(Container):
-    def __init__(self, feature_context, name, vols, network, image_store, 
command=None, ssl=False):
-        engine = "prometheus-ssl" if ssl else "prometheus"
-        super().__init__(feature_context, name, engine, vols, network, 
image_store, command)
-        self.ssl = ssl
-        extra_ssl_settings = ""
-        if ssl:
-            prometheus_cert, prometheus_key = 
make_cert_without_extended_usage(f"prometheus-{feature_context.id}", 
feature_context.root_ca_cert, feature_context.root_ca_key)
-
-            self.root_ca_file = tempfile.NamedTemporaryFile(delete=False)
-            
self.root_ca_file.write(crypto.dump_certificate(type=crypto.FILETYPE_PEM, 
cert=feature_context.root_ca_cert))
-            self.root_ca_file.close()
-            os.chmod(self.root_ca_file.name, 0o644)
-
-            self.prometheus_cert_file = 
tempfile.NamedTemporaryFile(delete=False)
-            
self.prometheus_cert_file.write(crypto.dump_certificate(type=crypto.FILETYPE_PEM,
 cert=prometheus_cert))
-            self.prometheus_cert_file.close()
-            os.chmod(self.prometheus_cert_file.name, 0o644)
-
-            self.prometheus_key_file = 
tempfile.NamedTemporaryFile(delete=False)
-            
self.prometheus_key_file.write(crypto.dump_privatekey(type=crypto.FILETYPE_PEM, 
pkey=prometheus_key))
-            self.prometheus_key_file.close()
-            os.chmod(self.prometheus_key_file.name, 0o644)
-
-            extra_ssl_settings = """
-    scheme: https
-    tls_config:
-      ca_file: /etc/prometheus/certs/root-ca.pem
-"""
-
-        prometheus_yml_content = """
-global:
-  scrape_interval: 2s
-  evaluation_interval: 15s
-scrape_configs:
-  - job_name: "minifi"
-    static_configs:
-      - targets: ["minifi-cpp-flow-{feature_id}:9936"]
-{extra_ssl_settings}
-""".format(feature_id=self.feature_context.id, 
extra_ssl_settings=extra_ssl_settings)
-
-        self.yaml_file = tempfile.NamedTemporaryFile(delete=False)
-        self.yaml_file.write(prometheus_yml_content.encode())
-        self.yaml_file.close()
-        os.chmod(self.yaml_file.name, 0o644)
-
-    def get_startup_finished_log_entry(self):
-        return "Server is ready to receive web requests."
-
-    def deploy(self):
-        if not self.set_deployed():
-            return
-
-        logging.info('Creating and running Prometheus docker container...')
-
-        mounts = [docker.types.Mount(
-            type='bind',
-            source=self.yaml_file.name,
-            target='/etc/prometheus/prometheus.yml'
-        )]
-
-        if self.ssl:
-            mounts.append(docker.types.Mount(
-                type='bind',
-                source=self.root_ca_file.name,
-                target='/etc/prometheus/certs/root-ca.pem'
-            ))
-
-        self.client.containers.run(
-            image="prom/prometheus:v2.35.0",
-            detach=True,
-            name=self.name,
-            network=self.network.name,
-            ports={'9090/tcp': 9090},
-            mounts=mounts,
-            entrypoint=self.command)
diff --git a/docker/test/integration/features/MiNiFi_integration_test_driver.py 
b/docker/test/integration/features/MiNiFi_integration_test_driver.py
index a85c48bb7..e6bb48a60 100644
--- a/docker/test/integration/features/MiNiFi_integration_test_driver.py
+++ b/docker/test/integration/features/MiNiFi_integration_test_driver.py
@@ -303,15 +303,6 @@ class MiNiFi_integration_test:
     def check_azure_blob_and_snapshot_count(self, blob_and_snapshot_count, 
timeout_seconds):
         assert 
self.cluster.check_azure_blob_and_snapshot_count(blob_and_snapshot_count, 
timeout_seconds) or self.cluster.log_app_output()
 
-    def check_metric_class_on_prometheus(self, metric_class, timeout_seconds):
-        assert self.cluster.wait_for_metric_class_on_prometheus(metric_class, 
timeout_seconds) or self.cluster.log_app_output()
-
-    def check_processor_metric_on_prometheus(self, metric_class, 
timeout_seconds, processor_name):
-        assert 
self.cluster.wait_for_processor_metric_on_prometheus(metric_class, 
timeout_seconds, processor_name) or self.cluster.log_app_output()
-
-    def check_all_prometheus_metric_types_are_defined_once(self):
-        assert self.cluster.verify_all_metric_types_are_defined_once() or 
self.cluster.log_app_output()
-
     def check_if_peak_memory_usage_exceeded(self, minimum_peak_memory_usage: 
int, timeout_seconds: int) -> None:
         assert 
self.cluster.wait_for_peak_memory_usage_to_exceed(minimum_peak_memory_usage, 
timeout_seconds) or self.cluster.log_app_output()
 
@@ -339,12 +330,6 @@ class MiNiFi_integration_test:
     def set_ssl_context_properties_in_minifi(self):
         self.cluster.set_ssl_context_properties_in_minifi()
 
-    def enable_prometheus_in_minifi(self):
-        self.cluster.enable_prometheus_in_minifi()
-
-    def enable_prometheus_with_ssl_in_minifi(self):
-        self.cluster.enable_prometheus_with_ssl_in_minifi()
-
     def enable_sql_in_minifi(self):
         self.cluster.enable_sql_in_minifi()
 
diff --git a/docker/test/integration/features/steps/steps.py 
b/docker/test/integration/features/steps/steps.py
index 521679e6f..94e0117bb 100644
--- a/docker/test/integration/features/steps/steps.py
+++ b/docker/test/integration/features/steps/steps.py
@@ -370,21 +370,11 @@ def step_impl(context):
     context.test.enable_c2_in_minifi()
 
 
-@given("Prometheus is enabled in MiNiFi")
-def step_impl(context):
-    context.test.enable_prometheus_in_minifi()
-
-
 @given("log metrics publisher is enabled in MiNiFi")
 def step_impl(context):
     context.test.enable_log_metrics_publisher_in_minifi()
 
 
-@given("Prometheus with SSL is enabled in MiNiFi")
-def step_impl(context):
-    context.test.enable_prometheus_with_ssl_in_minifi()
-
-
 @given("OpenSSL FIPS mode is enabled in MiNiFi")
 def step_impl(context):
     context.test.enable_openssl_fips_mode_in_minifi()
@@ -773,33 +763,6 @@ def step_impl(context, minifi_container_name, log_pattern, 
duration):
     context.test.check_container_log_matches_regex(minifi_container_name, 
log_pattern, humanfriendly.parse_timespan(duration), count=1)
 
 
-# Prometheus
-@given("a Prometheus server is set up")
-def step_impl(context):
-    context.test.acquire_container(context=context, name="prometheus", 
engine="prometheus")
-
-
-@given("a Prometheus server is set up with SSL")
-def step_impl(context):
-    context.test.acquire_container(context=context, name="prometheus", 
engine="prometheus-ssl")
-
-
-@then("\"{metric_class}\" are published to the Prometheus server in less than 
{timeout_seconds:d} seconds")
-@then("\"{metric_class}\" is published to the Prometheus server in less than 
{timeout_seconds:d} seconds")
-def step_impl(context, metric_class, timeout_seconds):
-    context.test.check_metric_class_on_prometheus(metric_class, 
timeout_seconds)
-
-
-@then("\"{metric_class}\" processor metric is published to the Prometheus 
server in less than {timeout_seconds:d} seconds for \"{processor_name}\" 
processor")
-def step_impl(context, metric_class, timeout_seconds, processor_name):
-    context.test.check_processor_metric_on_prometheus(metric_class, 
timeout_seconds, processor_name)
-
-
-@then("all Prometheus metric types are only defined once")
-def step_impl(context):
-    context.test.check_all_prometheus_metric_types_are_defined_once()
-
-
 @given("SSL properties are set in MiNiFi")
 def step_impl(context):
     context.test.set_ssl_context_properties_in_minifi()
diff --git a/extensions/prometheus/tests/features/environment.py 
b/extensions/prometheus/tests/features/environment.py
new file mode 100644
index 000000000..6b7a9bbc0
--- /dev/null
+++ b/extensions/prometheus/tests/features/environment.py
@@ -0,0 +1,43 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from pathlib import Path
+from minifi_test_framework.containers.docker_image_builder import 
DockerImageBuilder
+from minifi_test_framework.core.hooks import common_before_scenario
+from minifi_test_framework.core.hooks import common_after_scenario
+
+
+def before_all(context):
+    check_log_lines_path = Path(__file__).resolve().parent / "resources" / 
"prometheus_checker.py"
+    check_log_lines_content = None
+    with open(check_log_lines_path, "rb") as f:
+        check_log_lines_content = f.read()
+    dockerfile = """
+        FROM python:3.13-slim-bookworm
+        RUN pip install requests prometheus-api-client==0.7.0
+        COPY prometheus_checker.py /scripts/prometheus_checker.py"""
+    prometheus_helper_builder = DockerImageBuilder(
+        image_tag="minifi-prometheus-helper:latest",
+        dockerfile_content=dockerfile,
+        files_on_context={"prometheus_checker.py": check_log_lines_content}
+    )
+    prometheus_helper_builder.build()
+
+
+def before_scenario(context, scenario):
+    common_before_scenario(context, scenario)
+
+
+def after_scenario(context, scenario):
+    common_after_scenario(context, scenario)
diff --git a/docker/test/integration/features/prometheus.feature 
b/extensions/prometheus/tests/features/prometheus.feature
similarity index 96%
rename from docker/test/integration/features/prometheus.feature
rename to extensions/prometheus/tests/features/prometheus.feature
index f336990b5..0457db5f5 100644
--- a/docker/test/integration/features/prometheus.feature
+++ b/extensions/prometheus/tests/features/prometheus.feature
@@ -16,14 +16,13 @@
 @ENABLE_PROMETHEUS
 Feature: MiNiFi can publish metrics to Prometheus server
 
-  Background:
-    Given the content of "/tmp/output" is monitored
-
   Scenario: Published metrics are scraped by Prometheus server
     Given a GetFile processor with the name "GetFile1" and the "Input 
Directory" property set to "/tmp/input"
     And a file with the content "test" is present in "/tmp/input"
     And a PutFile processor with the "Directory" property set to "/tmp/output"
+    And PutFile is EVENT_DRIVEN
     And the "success" relationship of the GetFile1 processor is connected to 
the PutFile
+    And PutFile's success relationship is auto-terminated
     And Prometheus is enabled in MiNiFi
     And a Prometheus server is set up
     When all instances start up
@@ -40,7 +39,9 @@ Feature: MiNiFi can publish metrics to Prometheus server
     Given a GetFile processor with the name "GetFile1" and the "Input 
Directory" property set to "/tmp/input"
     And a file with the content "test" is present in "/tmp/input"
     And a PutFile processor with the "Directory" property set to "/tmp/output"
+    And PutFile is EVENT_DRIVEN
     And the "success" relationship of the GetFile1 processor is connected to 
the PutFile
+    And PutFile's success relationship is auto-terminated
     And Prometheus with SSL is enabled in MiNiFi
     And a Prometheus server is set up with SSL
     When all instances start up
@@ -57,7 +58,6 @@ Feature: MiNiFi can publish metrics to Prometheus server
     And a GetFile processor with the name "GetFile2" and the "Input Directory" 
property set to "/tmp/input"
     And the "Keep Source File" property of the GetFile1 processor is set to 
"true"
     And the "Keep Source File" property of the GetFile2 processor is set to 
"true"
-    And "GetFile2" processor is a start node
     And a file with the content "test" is present in "/tmp/input"
     And a PutFile processor with the "Directory" property set to "/tmp/output"
     And the "success" relationship of the GetFile1 processor is connected to 
the PutFile
diff --git 
a/extensions/prometheus/tests/features/resources/prometheus_checker.py 
b/extensions/prometheus/tests/features/resources/prometheus_checker.py
new file mode 100644
index 000000000..e37662890
--- /dev/null
+++ b/extensions/prometheus/tests/features/resources/prometheus_checker.py
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import argparse
+import sys
+import requests
+from prometheus_api_client import PrometheusConnect
+
+
+class PrometheusChecker:
+    def __init__(self, prometheus_host: str):
+        self.prometheus_client = 
PrometheusConnect(url=f"http://{prometheus_host}:9090";, disable_ssl=True)
+
+    def verify_processor_metric(self, metric_class: str, processor_name: str) 
-> bool:
+        if metric_class == "GetFileMetrics":
+            return self._verify_getfile_metrics(metric_class, processor_name)
+        else:
+            return self._verify_general_processor_metrics(metric_class, 
processor_name)
+
+    def verify_metric_class(self, metric_class: str) -> bool:
+        if metric_class == "RepositoryMetrics":
+            return self._verify_repository_metrics()
+        elif metric_class == "QueueMetrics":
+            return self._verify_queue_metrics()
+        elif metric_class == "FlowInformation":
+            return self._verify_flow_information_metrics()
+        elif metric_class == "DeviceInfoNode":
+            return self._verify_device_info_node_metrics()
+        elif metric_class == "AgentStatus":
+            return self._verify_agent_status_metrics()
+        else:
+            raise Exception("Metric class '%s' verification is not 
implemented" % metric_class)
+
+    def _verify_repository_metrics(self) -> bool:
+        label_list = [{'repository_name': 'provenance'}, {'repository_name': 
'flowfile'}, {'repository_name': 'content'}]
+        # Only flowfile and content repositories are using rocksdb by default, 
so rocksdb specific metrics are only present there
+        return all((self._verify_metrics_exist(['minifi_is_running', 
'minifi_is_full', 'minifi_repository_size_bytes', 
'minifi_max_repository_size_bytes', 'minifi_repository_entry_count'], 
'RepositoryMetrics', labels) for labels in label_list)) and \
+            
all((self._verify_metric_larger_than_zero('minifi_repository_size_bytes', 
'RepositoryMetrics', labels) for labels in label_list[1:3])) and \
+            
all((self._verify_metrics_exist(['minifi_rocksdb_table_readers_size_bytes', 
'minifi_rocksdb_all_memory_tables_size_bytes'], 'RepositoryMetrics', labels) 
for labels in label_list[1:3]))
+
+    def _verify_queue_metrics(self) -> bool:
+        return self._verify_metrics_exist(['minifi_queue_data_size', 
'minifi_queue_data_size_max', 'minifi_queue_size', 'minifi_queue_size_max'], 
'QueueMetrics')
+
+    def _verify_general_processor_metrics(self, metric_class: str, 
processor_name: str) -> bool:
+        labels = {'processor_name': processor_name}
+        return 
self._verify_metrics_exist(['minifi_average_onTrigger_runtime_milliseconds', 
'minifi_last_onTrigger_runtime_milliseconds',
+                                           
'minifi_average_session_commit_runtime_milliseconds', 
'minifi_last_session_commit_runtime_milliseconds',
+                                           'minifi_incoming_flow_files', 
'minifi_incoming_bytes', 'minifi_bytes_read', 'minifi_bytes_written'], 
metric_class, labels) and \
+            
self._verify_metrics_larger_than_zero(['minifi_onTrigger_invocations', 
'minifi_transferred_flow_files', 'minifi_transferred_to_success',
+                                                   'minifi_transferred_bytes', 
'minifi_processing_nanos'],
+                                                  metric_class, labels)
+
+    def _verify_getfile_metrics(self, metric_class: str, processor_name: str) 
-> bool:
+        labels = {'processor_name': processor_name}
+        return self._verify_general_processor_metrics(metric_class, 
processor_name) and \
+            self._verify_metrics_exist(['minifi_input_bytes', 
'minifi_accepted_files'], metric_class, labels)
+
+    def _verify_flow_information_metrics(self) -> bool:
+        return self._verify_metrics_exist(['minifi_queue_data_size', 
'minifi_queue_data_size_max', 'minifi_queue_size', 'minifi_queue_size_max',
+                                           'minifi_bytes_read', 
'minifi_bytes_written', 'minifi_flow_files_in', 'minifi_flow_files_out', 
'minifi_bytes_in', 'minifi_bytes_out',
+                                           'minifi_invocations', 
'minifi_processing_nanos'], 'FlowInformation') and \
+            self._verify_metric_exists('minifi_is_running', 'FlowInformation', 
{'component_name': 'FlowController'})
+
+    def _verify_device_info_node_metrics(self) -> bool:
+        return self._verify_metrics_exist(['minifi_physical_mem', 
'minifi_memory_usage', 'minifi_cpu_utilization', 'minifi_cpu_load_average'], 
'DeviceInfoNode')
+
+    def _verify_agent_status_metrics(self) -> bool:
+        label_list = [{'repository_name': 'flowfile'}, {'repository_name': 
'content'}]
+        # Only flowfile and content repositories are using rocksdb by default, 
so rocksdb specific metrics are only present there
+        for labels in label_list:
+            if not (self._verify_metric_exists('minifi_is_running', 
'AgentStatus', labels)
+                    and self._verify_metric_exists('minifi_is_full', 
'AgentStatus', labels)
+                    and 
self._verify_metric_exists('minifi_max_repository_size_bytes', 'AgentStatus', 
labels)
+                    and 
self._verify_metric_larger_than_zero('minifi_repository_size_bytes', 
'AgentStatus', labels)
+                    and 
self._verify_metric_exists('minifi_repository_entry_count', 'AgentStatus', 
labels)
+                    and 
self._verify_metric_exists('minifi_rocksdb_table_readers_size_bytes', 
'AgentStatus', labels)
+                    and 
self._verify_metric_exists('minifi_rocksdb_all_memory_tables_size_bytes', 
'AgentStatus', labels)):
+                return False
+
+        # provenance repository is NoOpRepository by default which has zero 
size
+        if not (self._verify_metric_exists('minifi_is_running', 'AgentStatus', 
{'repository_name': 'provenance'})
+                and self._verify_metric_exists('minifi_is_full', 
'AgentStatus', {'repository_name': 'provenance'})
+                and 
self._verify_metric_exists('minifi_max_repository_size_bytes', 'AgentStatus', 
{'repository_name': 'provenance'})
+                and self._verify_metric_exists('minifi_repository_size_bytes', 
'AgentStatus', {'repository_name': 'provenance'})
+                and 
self._verify_metric_exists('minifi_repository_entry_count', 'AgentStatus', 
{'repository_name': 'provenance'})):
+            return False
+        return self._verify_metric_exists('minifi_uptime_milliseconds', 
'AgentStatus') and \
+            self._verify_metric_exists('minifi_agent_memory_usage_bytes', 
'AgentStatus') and \
+            self._verify_metric_exists('minifi_agent_cpu_utilization', 
'AgentStatus')
+
+    def _verify_metric_exists(self, metric_name: str, metric_class: str, 
labels: dict = {}) -> bool:
+        labels['metric_class'] = metric_class
+        labels['agent_identifier'] = "Agent1"
+        return 
len(self.prometheus_client.get_current_metric_value(metric_name=metric_name, 
label_config=labels)) > 0
+
+    def _verify_metrics_exist(self, metric_names: list, metric_class: str, 
labels: dict = {}) -> bool:
+        return all((self._verify_metric_exists(metric_name, metric_class, 
labels) for metric_name in metric_names))
+
+    def _verify_metric_larger_than_zero(self, metric_name: str, metric_class: 
str, labels: dict = {}) -> bool:
+        labels['metric_class'] = metric_class
+        result = 
self.prometheus_client.get_current_metric_value(metric_name=metric_name, 
label_config=labels)
+        return len(result) > 0 and int(result[0]['value'][1]) > 0
+
+    def _verify_metrics_larger_than_zero(self, metric_names: list, 
metric_class: str, labels: dict = {}) -> bool:
+        return all((self._verify_metric_larger_than_zero(metric_name, 
metric_class, labels) for metric_name in metric_names))
+
+
+def verify_all_metric_types_are_defined_once(prometheus_target: str) -> bool:
+    response = requests.get(f"http://{prometheus_target}:9936/metrics";)
+    if response.status_code < 200 or response.status_code >= 300:
+        return False
+
+    metric_types = set()
+    for line in response.text.split("\n"):
+        if line.startswith("# TYPE"):
+            metric_type = line.split(" ")[2]
+            if metric_type in metric_types:
+                return False
+            metric_types.add(metric_type)
+
+    return True
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description='Prometheus Checker')
+    parser.add_argument('--verify-metrics-defined-once', type=str, 
required=False, help='Prometheus target to verify all defined metrics are 
unique')
+    parser.add_argument('--prometheus-host', type=str, required=False, 
help='Prometheus server host')
+    parser.add_argument('--metric-class', type=str, required=False, 
help='Metric class to verify')
+    parser.add_argument('--processor-name', type=str, required=False, 
help='Processor name to verify')
+
+    args = parser.parse_args()
+
+    checker = PrometheusChecker(args.prometheus_host)
+    if args.verify_metrics_defined_once:
+        if not 
verify_all_metric_types_are_defined_once(args.verify_metrics_defined_once):
+            sys.exit(1)
+    elif args.processor_name and args.metric_class:
+        if not checker.verify_processor_metric(args.metric_class, 
args.processor_name):
+            sys.exit(1)
+    elif args.metric_class:
+        if not checker.verify_metric_class(args.metric_class):
+            sys.exit(1)
+    else:
+        print("Either --metric-class or --verify-metrics-defined-once must be 
provided")
+        sys.exit(1)
diff --git a/extensions/prometheus/tests/features/steps/prometheus_container.py 
b/extensions/prometheus/tests/features/steps/prometheus_container.py
new file mode 100644
index 000000000..620014f45
--- /dev/null
+++ b/extensions/prometheus/tests/features/steps/prometheus_container.py
@@ -0,0 +1,92 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from OpenSSL import crypto
+from minifi_test_framework.containers.container import Container
+from minifi_test_framework.core.helpers import wait_for_condition
+from minifi_test_framework.core.minifi_test_context import MinifiTestContext
+from minifi_test_framework.core.ssl_utils import 
make_cert_without_extended_usage
+from minifi_test_framework.containers.file import File
+
+
+class PrometheusContainer(Container):
+    def __init__(self, test_context: MinifiTestContext, ssl: bool = False):
+        super().__init__("prom/prometheus:v3.9.1", 
f"prometheus-{test_context.scenario_id}", test_context.network)
+        self.scenario_id = test_context.scenario_id
+        extra_ssl_settings = ""
+        if ssl:
+            prometheus_cert, prometheus_key = 
make_cert_without_extended_usage(self.container_name, 
test_context.root_ca_cert, test_context.root_ca_key)
+
+            root_ca_content = 
crypto.dump_certificate(type=crypto.FILETYPE_PEM, 
cert=test_context.root_ca_cert)
+            self.files.append(File("/etc/prometheus/certs/root-ca.pem", 
root_ca_content, permissions=0o644))
+
+            prometheus_cert_content = 
crypto.dump_certificate(type=crypto.FILETYPE_PEM, cert=prometheus_cert)
+            self.files.append(File("/etc/prometheus/certs/prometheus.crt", 
prometheus_cert_content, permissions=0o644))
+
+            prometheus_key_content = 
crypto.dump_privatekey(type=crypto.FILETYPE_PEM, pkey=prometheus_key)
+            self.files.append(File("/etc/prometheus/certs/prometheus.key", 
prometheus_key_content, permissions=0o644))
+
+            extra_ssl_settings = """
+    scheme: https
+    tls_config:
+      ca_file: /etc/prometheus/certs/root-ca.pem
+"""
+        prometheus_yml_content = """
+global:
+  scrape_interval: 2s
+  evaluation_interval: 15s
+scrape_configs:
+  - job_name: "minifi"
+    static_configs:
+      - targets: ["minifi-primary-{scenario_id}:9936"]
+{extra_ssl_settings}
+""".format(scenario_id=test_context.scenario_id, 
extra_ssl_settings=extra_ssl_settings)
+
+        self.files.append(File("/etc/prometheus/prometheus.yml", 
prometheus_yml_content, permissions=0o666))
+
+    def deploy(self):
+        super().deploy()
+        finished_str = "Server is ready to receive web requests."
+        return wait_for_condition(
+            condition=lambda: finished_str in self.get_logs(),
+            timeout_seconds=60,
+            bail_condition=lambda: self.exited,
+            context=None
+        )
+
+    def check_metric_class_on_prometheus(self, metric_class: str) -> bool:
+        try:
+            self.client.containers.run("minifi-prometheus-helper:latest", 
["python", "/scripts/prometheus_checker.py", "--prometheus-host", 
self.container_name, "--metric-class", metric_class], remove=True, 
network=self.network.name)
+            return True
+        except Exception:
+            logging.error(f"Failed to check metric class {metric_class} on 
Prometheus")
+            return False
+
+    def check_processor_metric_on_prometheus(self, metric_class: str, 
processor_name: str) -> bool:
+        try:
+            self.client.containers.run("minifi-prometheus-helper:latest", 
["python", "/scripts/prometheus_checker.py", "--prometheus-host", 
self.container_name, "--metric-class", metric_class, "--processor-name", 
processor_name], remove=True, network=self.network.name)
+            return True
+        except Exception:
+            logging.error(f"Failed to check processor metric class 
{metric_class} for processor {processor_name} on Prometheus")
+            return False
+
+    def check_all_metric_types_defined_once(self) -> bool:
+        try:
+            self.client.containers.run("minifi-prometheus-helper:latest", 
["python", "/scripts/prometheus_checker.py", "--verify-metrics-defined-once", 
f"minifi-primary-{self.scenario_id}"], remove=True, network=self.network.name)
+            return True
+        except Exception:
+            logging.error("Failed check that all metric types are defined once 
on Prometheus")
+            return False
diff --git a/extensions/prometheus/tests/features/steps/steps.py 
b/extensions/prometheus/tests/features/steps/steps.py
new file mode 100644
index 000000000..a10a4a26b
--- /dev/null
+++ b/extensions/prometheus/tests/features/steps/steps.py
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from behave import step, then, given
+
+from minifi_test_framework.steps import checking_steps        # noqa: F401
+from minifi_test_framework.steps import configuration_steps   # noqa: F401
+from minifi_test_framework.steps import core_steps            # noqa: F401
+from minifi_test_framework.steps import flow_building_steps   # noqa: F401
+from minifi_test_framework.core.helpers import wait_for_condition
+from minifi_test_framework.core.minifi_test_context import MinifiTestContext
+from prometheus_container import PrometheusContainer
+
+
+@step('a Prometheus server is set up')
+def step_impl(context: MinifiTestContext):
+    context.containers["prometheus"] = PrometheusContainer(context)
+
+
+@step("a Prometheus server is set up with SSL")
+def step_impl(context: MinifiTestContext):
+    context.containers["prometheus"] = PrometheusContainer(context, ssl=True)
+
+
+@then("\"{metric_class}\" are published to the Prometheus server in less than 
{timeout_seconds:d} seconds")
+@then("\"{metric_class}\" is published to the Prometheus server in less than 
{timeout_seconds:d} seconds")
+def step_impl(context: MinifiTestContext, metric_class: str, timeout_seconds: 
int):
+    assert wait_for_condition(
+        condition=lambda: 
context.containers["prometheus"].check_metric_class_on_prometheus(metric_class),
+        timeout_seconds=timeout_seconds, bail_condition=lambda: 
context.containers["prometheus"].exited, context=context)
+
+
+@then("\"{metric_class}\" processor metric is published to the Prometheus 
server in less than {timeout_seconds:d} seconds for \"{processor_name}\" 
processor")
+def step_impl(context: MinifiTestContext, metric_class: str, timeout_seconds: 
int, processor_name: str):
+    assert wait_for_condition(
+        condition=lambda: 
context.containers["prometheus"].check_processor_metric_on_prometheus(metric_class,
 processor_name),
+        timeout_seconds=timeout_seconds, bail_condition=lambda: 
context.containers["prometheus"].exited, context=context)
+
+
+@then("all Prometheus metric types are only defined once")
+def step_impl(context: MinifiTestContext):
+    assert 
context.containers["prometheus"].check_all_metric_types_defined_once()
+
+
+def _enable_prometheus(context: MinifiTestContext):
+    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.agent.identifier",
 "Agent1")
+    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.PrometheusMetricsPublisher.port",
 "9936")
+    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.PrometheusMetricsPublisher.metrics",
+                                                                  
"RepositoryMetrics,QueueMetrics,PutFileMetrics,processorMetrics/Get.*,FlowInformation,DeviceInfoNode,AgentStatus")
+    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.class",
 "PrometheusMetricsPublisher")
+
+
+@given("Prometheus is enabled in MiNiFi")
+def step_impl(context: MinifiTestContext):
+    _enable_prometheus(context)
+
+
+@given("Prometheus with SSL is enabled in MiNiFi")
+def step_impl(context: MinifiTestContext):
+    _enable_prometheus(context)
+    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.PrometheusMetricsPublisher.certificate",
 "/tmp/resources/minifi_merged_cert.crt")
+    
context.get_or_create_default_minifi_container().set_property("nifi.metrics.publisher.PrometheusMetricsPublisher.ca.certificate",
 "/tmp/resources/root_ca.crt")

Reply via email to