This is an automated email from the ASF dual-hosted git repository.
bneradt pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/trafficserver.git
The following commit(s) were added to refs/heads/master by this push:
new 132e01c4ae stats_over_http: Add Prometheus format option (#12302)
132e01c4ae is described below
commit 132e01c4aea486c209fa5ea8e39cb34627442304
Author: Brian Neradt <[email protected]>
AuthorDate: Tue Jun 24 15:30:51 2025 -0500
stats_over_http: Add Prometheus format option (#12302)
This adds the Prometheus format as an optional output to the
stats_over_http plugin. This also adds /json, /csv, and /prometheus
path suffix support in case using the Accept header is problematic.
---
doc/admin-guide/plugins/stats_over_http.en.rst | 35 ++-
plugins/stats_over_http/stats_over_http.cc | 278 +++++++++++++++------
tests/Pipfile | 3 +
.../gold/stats_over_http_0_stdout.gold | 4 -
...stderr.gold => stats_over_http_csv_stderr.gold} | 4 +-
...tderr.gold => stats_over_http_json_stderr.gold} | 2 +-
...gold => stats_over_http_prometheus_stderr.gold} | 4 +-
.../stats_over_http/prometheus_stats_ingester.py | 117 +++++++++
.../stats_over_http/stats_over_http.test.py | 101 +++++++-
9 files changed, 454 insertions(+), 94 deletions(-)
diff --git a/doc/admin-guide/plugins/stats_over_http.en.rst
b/doc/admin-guide/plugins/stats_over_http.en.rst
index e4981bd374..93d8242234 100644
--- a/doc/admin-guide/plugins/stats_over_http.en.rst
+++ b/doc/admin-guide/plugins/stats_over_http.en.rst
@@ -105,9 +105,36 @@ if you wish to have it in CSV format you can do so by
passing an ``Accept`` head
.. option:: Accept: text/csv
-In either case the ``Content-Type`` header returned by stats_over_http.so will
reflect
-the content that has been returned, either ``text/json`` or ``text/csv``.
+Prometheus formatted output is also supported via the ``Accept`` header:
-.. option:: Accept-encoding: gzip, br
+.. option:: Accept: text/plain; version=0.0.4
+
+Alternatively, the output format can be specified as a suffix to the configured
+path in the HTTP request target. The supported suffixes are ``/json``,
+``/csv``, and ``/prometheus``. For example, if the path is set to ``/_stats``
+(the default), you can access the stats in CSV format by using the URL::
+
+ http://host:port/_stats/csv
+
+The Prometheus format can be requested by using the URL::
+
+ http://host:port/_stats/prometheus
+
+The JSON format is the default, but you can also access it explicitly by using
the URL::
-Stats over http also accepts returning data in gzip or br compressed format
+ http://host:port/_stats/json
+
+Note that using a path suffix overrides any ``Accept`` header. Thus if you
+specify a path suffix, the plugin will return the data in that format
regardless of
+the ``Accept`` header.
+
+In either case the ``Content-Type`` header returned by ``stats_over_http.so``
will
+reflect the content that has been returned: ``text/json``, ``text/csv``, or
+``text/plain; version=0.0.4; charset=utf-8`` for JSON, CSV, and Prometheus
+formats respectively.
+
+Stats over http also accepts returning data in gzip or br compressed format
per the
+``Accept-encoding`` header. If the header is present, the plugin will return
the
+data in the specified encoding, for example:
+
+.. option:: Accept-encoding: gzip, br
diff --git a/plugins/stats_over_http/stats_over_http.cc
b/plugins/stats_over_http/stats_over_http.cc
index 07d83b3e07..cc20925202 100644
--- a/plugins/stats_over_http/stats_over_http.cc
+++ b/plugins/stats_over_http/stats_over_http.cc
@@ -24,31 +24,29 @@
/* stats.c: expose traffic server stats over http
*/
-#include <stdio.h>
-#include <stdlib.h>
-#include <stdbool.h>
-#include <ctype.h>
-#include <limits.h>
-#include <ts/ts.h>
-#include <string.h>
-#include <inttypes.h>
+#include <arpa/inet.h>
+#include <cctype>
+#include <chrono>
+#include <cinttypes>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <ctime>
+#include <fstream>
#include <getopt.h>
+#include <netinet/in.h>
+#include <string>
+#include <string_view>
#include <sys/stat.h>
-#include <time.h>
+#include <ts/ts.h>
#include <unistd.h>
-#include <netinet/in.h>
-#include <arpa/inet.h>
#include <zlib.h>
-#include <fstream>
-#include <chrono>
#include <ts/remap.h>
-
-#include "swoc/swoc_ip.h"
-
+#include "swoc/TextView.h"
+#include "tscore/ink_config.h"
#include <tsutil/ts_ip.h>
-#include "tscore/ink_config.h"
#if HAVE_BROTLI_ENCODE_H
#include <brotli/encode.h>
#endif
@@ -105,8 +103,8 @@ struct config_holder_t {
config_t *config;
};
-enum output_format { JSON_OUTPUT, CSV_OUTPUT };
-enum encoding_format { NONE, DEFLATE, GZIP, BR };
+enum class output_format_t { JSON_OUTPUT, CSV_OUTPUT, PROMETHEUS_OUTPUT };
+enum class encoding_format_t { NONE, DEFLATE, GZIP, BR };
int configReloadRequests = 0;
int configReloads = 0;
@@ -141,11 +139,11 @@ struct stats_state {
TSIOBuffer resp_buffer;
TSIOBufferReader resp_reader;
- int output_bytes;
- int body_written;
- output_format output;
- encoding_format encoding;
- z_stream zstrm;
+ int output_bytes;
+ int body_written;
+ output_format_t output_format;
+ encoding_format_t encoding;
+ z_stream zstrm;
#if HAVE_BROTLI_ENCODE_H
b_stream bstrm;
#endif
@@ -160,7 +158,7 @@ nstr(const char *s)
}
#if HAVE_BROTLI_ENCODE_H
-encoding_format
+encoding_format_t
init_br(stats_state *my_state)
{
my_state->bstrm.br = nullptr;
@@ -168,7 +166,7 @@ init_br(stats_state *my_state)
my_state->bstrm.br = BrotliEncoderCreateInstance(nullptr, nullptr, nullptr);
if (!my_state->bstrm.br) {
Dbg(dbg_ctl, "Brotli Encoder Instance Failed");
- return NONE;
+ return encoding_format_t::NONE;
}
BrotliEncoderSetParameter(my_state->bstrm.br, BROTLI_PARAM_QUALITY,
BROTLI_COMPRESSION_LEVEL);
BrotliEncoderSetParameter(my_state->bstrm.br, BROTLI_PARAM_LGWIN,
BROTLI_LGW);
@@ -178,7 +176,7 @@ init_br(stats_state *my_state)
my_state->bstrm.next_out = nullptr;
my_state->bstrm.avail_out = 0;
my_state->bstrm.total_out = 0;
- return BR;
+ return encoding_format_t::BR;
}
#endif
@@ -191,7 +189,7 @@ ms_since_epoch()
}
} // namespace
-encoding_format
+encoding_format_t
init_gzip(stats_state *my_state, int mode)
{
my_state->zstrm.next_in = Z_NULL;
@@ -207,16 +205,16 @@ init_gzip(stats_state *my_state, int mode)
int err = deflateInit2(&my_state->zstrm, ZLIB_COMPRESSION_LEVEL, Z_DEFLATED,
mode, ZLIB_MEMLEVEL, Z_DEFAULT_STRATEGY);
if (err != Z_OK) {
Dbg(dbg_ctl, "gzip initialization failed");
- return NONE;
+ return encoding_format_t::NONE;
} else {
Dbg(dbg_ctl, "gzip initialized successfully");
if (mode == GZIP_MODE) {
- return GZIP;
+ return encoding_format_t::GZIP;
} else if (mode == DEFLATE_MODE) {
- return DEFLATE;
+ return encoding_format_t::DEFLATE;
}
}
- return NONE;
+ return encoding_format_t::NONE;
}
static void
@@ -270,37 +268,55 @@ static const char RESP_HEADER_CSV_DEFLATE[] =
"HTTP/1.0 200 OK\r\nContent-Type: text/csv\r\nContent-Encoding:
deflate\r\nCache-Control: no-cache\r\n\r\n";
static const char RESP_HEADER_CSV_BR[] =
"HTTP/1.0 200 OK\r\nContent-Type: text/csv\r\nContent-Encoding:
br\r\nCache-Control: no-cache\r\n\r\n";
+static const char RESP_HEADER_PROMETHEUS[] =
+ "HTTP/1.0 200 OK\r\nContent-Type: text/plain; version=0.0.4;
charset=utf-8\r\nCache-Control: no-cache\r\n\r\n";
+static const char RESP_HEADER_PROMETHEUS_GZIP[] = "HTTP/1.0 200
OK\r\nContent-Type: text/plain; version=0.0.4; "
+
"charset=utf-8\r\nContent-Encoding: gzip\r\nCache-Control: no-cache\r\n\r\n";
+static const char RESP_HEADER_PROMETHEUS_DEFLATE[] =
+ "HTTP/1.0 200 OK\r\nContent-Type: text/plain; version=0.0.4;
charset=utf-8\r\nContent-Encoding: deflate\r\nCache-Control: "
+ "no-cache\r\n\r\n";
+static const char RESP_HEADER_PROMETHEUS_BR[] = "HTTP/1.0 200
OK\r\nContent-Type: text/plain; version=0.0.4; "
+
"charset=utf-8\r\nContent-Encoding: br\r\nCache-Control: no-cache\r\n\r\n";
static int
stats_add_resp_header(stats_state *my_state)
{
- switch (my_state->output) {
- case JSON_OUTPUT:
- if (my_state->encoding == GZIP) {
+ switch (my_state->output_format) {
+ case output_format_t::JSON_OUTPUT:
+ if (my_state->encoding == encoding_format_t::GZIP) {
return stats_add_data_to_resp_buffer(RESP_HEADER_JSON_GZIP, my_state);
- } else if (my_state->encoding == DEFLATE) {
+ } else if (my_state->encoding == encoding_format_t::DEFLATE) {
return stats_add_data_to_resp_buffer(RESP_HEADER_JSON_DEFLATE, my_state);
- } else if (my_state->encoding == BR) {
+ } else if (my_state->encoding == encoding_format_t::BR) {
return stats_add_data_to_resp_buffer(RESP_HEADER_JSON_BR, my_state);
} else {
return stats_add_data_to_resp_buffer(RESP_HEADER_JSON, my_state);
}
break;
- case CSV_OUTPUT:
- if (my_state->encoding == GZIP) {
+ case output_format_t::CSV_OUTPUT:
+ if (my_state->encoding == encoding_format_t::GZIP) {
return stats_add_data_to_resp_buffer(RESP_HEADER_CSV_GZIP, my_state);
- } else if (my_state->encoding == DEFLATE) {
+ } else if (my_state->encoding == encoding_format_t::DEFLATE) {
return stats_add_data_to_resp_buffer(RESP_HEADER_CSV_DEFLATE, my_state);
- } else if (my_state->encoding == BR) {
+ } else if (my_state->encoding == encoding_format_t::BR) {
return stats_add_data_to_resp_buffer(RESP_HEADER_CSV_BR, my_state);
} else {
return stats_add_data_to_resp_buffer(RESP_HEADER_CSV, my_state);
}
break;
- default:
- TSError("stats_add_resp_header: Unknown output format");
+ case output_format_t::PROMETHEUS_OUTPUT:
+ if (my_state->encoding == encoding_format_t::GZIP) {
+ return stats_add_data_to_resp_buffer(RESP_HEADER_PROMETHEUS_GZIP,
my_state);
+ } else if (my_state->encoding == encoding_format_t::DEFLATE) {
+ return stats_add_data_to_resp_buffer(RESP_HEADER_PROMETHEUS_DEFLATE,
my_state);
+ } else if (my_state->encoding == encoding_format_t::BR) {
+ return stats_add_data_to_resp_buffer(RESP_HEADER_PROMETHEUS_BR,
my_state);
+ } else {
+ return stats_add_data_to_resp_buffer(RESP_HEADER_PROMETHEUS, my_state);
+ }
break;
}
+ // Not reached.
return stats_add_data_to_resp_buffer(RESP_HEADER_JSON, my_state);
}
@@ -326,6 +342,10 @@ stats_process_read(TSCont contp, TSEvent event,
stats_state *my_state)
}
#define APPEND(a) my_state->output_bytes += stats_add_data_to_resp_buffer(a,
my_state)
+
+//-----------------------------------------------------------------------------
+// JSON Formatters
+//-----------------------------------------------------------------------------
#define APPEND_STAT_JSON(a, fmt, v)
\
do {
\
char b[256];
\
@@ -346,6 +366,9 @@ stats_process_read(TSCont contp, TSEvent event, stats_state
*my_state)
}
\
} while (0)
+//-----------------------------------------------------------------------------
+// CSV Formatters
+//-----------------------------------------------------------------------------
#define APPEND_STAT_CSV(a, fmt, v) \
do { \
char b[256]; \
@@ -360,6 +383,18 @@ stats_process_read(TSCont contp, TSEvent event,
stats_state *my_state)
} \
} while (0)
+//-----------------------------------------------------------------------------
+// Prometheus Formatters
+//-----------------------------------------------------------------------------
+// Note that Prometheus only supports numeric types.
+#define APPEND_STAT_PROMETHEUS_NUMERIC(a, fmt, v) \
+ do { \
+ char b[256]; \
+ if (snprintf(b, sizeof(b), "%s " fmt "\n", a, v) < (int)sizeof(b)) { \
+ APPEND(b); \
+ } \
+ } while (0)
+
// This wraps uint64_t values to the int64_t range to fit into a Java long.
Java 8 has an unsigned long which
// can interoperate with a full uint64_t, but it's unlikely that much of the
ecosystem supports that yet.
static uint64_t
@@ -421,6 +456,49 @@ csv_out_stat(TSRecordType /* rec_type ATS_UNUSED */, void
*edata, int /* registe
}
}
+/** Replace characters offensive to Prometheus with '_'.
+ * Prometheus is particular about metric names.
+ * @param[in] name The metric name to sanitize.
+ * @return A sanitized metric name.
+ */
+static std::string
+sanitize_metric_name_for_prometheus(std::string_view name)
+{
+ std::string sanitized_name(name);
+ // Convert certain characters that Prometheus doesn't like to '_'.
+ for (auto &c : sanitized_name) {
+ if (c == '.' || c == '+' || c == '-') {
+ c = '_';
+ }
+ }
+ return sanitized_name;
+}
+
+static void
+prometheus_out_stat(TSRecordType /* rec_type ATS_UNUSED */, void *edata, int
/* registered ATS_UNUSED */, const char *name,
+ TSRecordDataType data_type, TSRecordData *datum)
+{
+ stats_state *my_state = static_cast<stats_state *>(edata);
+ std::string sanitized_name = sanitize_metric_name_for_prometheus(name);
+ switch (data_type) {
+ case TS_RECORDDATATYPE_COUNTER:
+ APPEND_STAT_PROMETHEUS_NUMERIC(sanitized_name.c_str(), "%" PRIu64,
wrap_unsigned_counter(datum->rec_counter));
+ break;
+ case TS_RECORDDATATYPE_INT:
+ APPEND_STAT_PROMETHEUS_NUMERIC(sanitized_name.c_str(), "%" PRIu64,
wrap_unsigned_counter(datum->rec_int));
+ break;
+ case TS_RECORDDATATYPE_FLOAT:
+ APPEND_STAT_PROMETHEUS_NUMERIC(sanitized_name.c_str(), "%f",
datum->rec_float);
+ break;
+ case TS_RECORDDATATYPE_STRING:
+ Dbg(dbg_ctl, "Prometheus does not support string values, skipping: %s",
sanitized_name.c_str());
+ break;
+ default:
+ Dbg(dbg_ctl, "unknown type for %s: %d", sanitized_name.c_str(), data_type);
+ break;
+ }
+}
+
static void
json_out_stats(stats_state *my_state)
{
@@ -511,29 +589,37 @@ csv_out_stats(stats_state *my_state)
APPEND_STAT_CSV("version", "%s", version);
}
+static void
+prometheus_out_stats(stats_state *my_state)
+{
+ TSRecordDump((TSRecordType)(TS_RECORDTYPE_PLUGIN | TS_RECORDTYPE_NODE |
TS_RECORDTYPE_PROCESS), prometheus_out_stat, my_state);
+ APPEND_STAT_PROMETHEUS_NUMERIC("current_time_epoch_ms", "%" PRIu64,
ms_since_epoch());
+ // No version printed, since string stats are not supported by Prometheus.
+}
+
static void
stats_process_write(TSCont contp, TSEvent event, stats_state *my_state)
{
if (event == TS_EVENT_VCONN_WRITE_READY) {
if (my_state->body_written == 0) {
my_state->body_written = 1;
- switch (my_state->output) {
- case JSON_OUTPUT:
+ switch (my_state->output_format) {
+ case output_format_t::JSON_OUTPUT:
json_out_stats(my_state);
break;
- case CSV_OUTPUT:
+ case output_format_t::CSV_OUTPUT:
csv_out_stats(my_state);
break;
- default:
- TSError("stats_process_write: Unknown output type\n");
+ case output_format_t::PROMETHEUS_OUTPUT:
+ prometheus_out_stats(my_state);
break;
}
- if ((my_state->encoding == GZIP) || (my_state->encoding == DEFLATE)) {
+ if ((my_state->encoding == encoding_format_t::GZIP) ||
(my_state->encoding == encoding_format_t::DEFLATE)) {
gzip_out_stats(my_state);
}
#if HAVE_BROTLI_ENCODE_H
- else if (my_state->encoding == BR) {
+ else if (my_state->encoding == encoding_format_t::BR) {
br_out_stats(my_state);
}
#endif
@@ -569,15 +655,19 @@ stats_dostuff(TSCont contp, TSEvent event, void *edata)
static int
stats_origin(TSCont contp, TSEvent /* event ATS_UNUSED */, void *edata)
{
- TSCont icontp;
- stats_state *my_state;
- config_t *config;
- TSHttpTxn txnp = (TSHttpTxn)edata;
- TSMBuffer reqp;
- TSMLoc hdr_loc = nullptr, url_loc = nullptr, accept_field = nullptr,
accept_encoding_field = nullptr;
- TSEvent reenable = TS_EVENT_HTTP_CONTINUE;
- int path_len = 0;
- const char *path = nullptr;
+ TSCont icontp;
+ stats_state *my_state;
+ config_t *config;
+ TSHttpTxn txnp = (TSHttpTxn)edata;
+ TSMBuffer reqp;
+ TSMLoc hdr_loc = nullptr, url_loc = nullptr, accept_field =
nullptr, accept_encoding_field = nullptr;
+ TSEvent reenable = TS_EVENT_HTTP_CONTINUE;
+ int path_len = 0;
+ const char *path = nullptr;
+ swoc::TextView request_path;
+ swoc::TextView request_path_suffix;
+ output_format_t format_per_path = output_format_t::JSON_OUTPUT;
+ bool path_had_explicit_format = false;
Dbg(dbg_ctl, "in the read stuff");
config = get_config(contp);
@@ -593,12 +683,37 @@ stats_origin(TSCont contp, TSEvent /* event ATS_UNUSED
*/, void *edata)
path = TSUrlPathGet(reqp, url_loc, &path_len);
Dbg(dbg_ctl, "Path: %.*s", path_len, path);
- if (!(path_len != 0 && path_len == int(config->stats_path.length()) &&
- !memcmp(path, config->stats_path.c_str(),
config->stats_path.length()))) {
- Dbg(dbg_ctl, "not this plugins path, saw: %.*s, looking for: %s",
path_len, path, config->stats_path.c_str());
+ if (path_len == 0) {
+ Dbg(dbg_ctl, "Empty path");
+ goto notforme;
+ }
+
+ request_path = swoc::TextView{path, static_cast<size_t>(path_len)};
+ if (!request_path.starts_with(config->stats_path)) {
+ Dbg(dbg_ctl, "Not the configured path for stats: %.*s, expected: %s",
path_len, path, config->stats_path.c_str());
goto notforme;
}
+ if (request_path == config->stats_path) {
+ Dbg(dbg_ctl, "Exact match for stats path: %s", config->stats_path.c_str());
+ format_per_path = output_format_t::JSON_OUTPUT;
+ path_had_explicit_format = false;
+ } else {
+ request_path_suffix =
request_path.remove_prefix(config->stats_path.length());
+ if (request_path_suffix == "/json") {
+ format_per_path = output_format_t::JSON_OUTPUT;
+ } else if (request_path_suffix == "/csv") {
+ format_per_path = output_format_t::CSV_OUTPUT;
+ } else if (request_path_suffix == "/prometheus") {
+ format_per_path = output_format_t::PROMETHEUS_OUTPUT;
+ } else {
+ Dbg(dbg_ctl, "Unknown suffix for stats path: %.*s",
static_cast<int>(request_path_suffix.length()),
+ request_path_suffix.data());
+ goto notforme;
+ }
+ path_had_explicit_format = true;
+ }
+
if (auto addr = TSHttpTxnClientAddrGet(txnp); !is_ipmap_allowed(config,
addr)) {
Dbg(dbg_ctl, "not right ip");
TSHttpTxnStatusSet(txnp, TS_HTTP_STATUS_FORBIDDEN);
@@ -615,24 +730,35 @@ stats_origin(TSCont contp, TSEvent /* event ATS_UNUSED
*/, void *edata)
memset(my_state, 0, sizeof(*my_state));
icontp = TSContCreate(stats_dostuff, TSMutexCreate());
- accept_field = TSMimeHdrFieldFind(reqp, hdr_loc, TS_MIME_FIELD_ACCEPT,
TS_MIME_LEN_ACCEPT);
- my_state->output = JSON_OUTPUT; // default to json output
- // accept header exists, use it to determine response type
- if (accept_field != TS_NULL_MLOC) {
- int len = -1;
- const char *str = TSMimeHdrFieldValueStringGet(reqp, hdr_loc,
accept_field, -1, &len);
-
- // Parse the Accept header, default to JSON output unless its another
supported format
- if (!strncasecmp(str, "text/csv", len)) {
- my_state->output = CSV_OUTPUT;
- } else {
- my_state->output = JSON_OUTPUT;
+ if (path_had_explicit_format) {
+ Dbg(dbg_ctl, "Path had explicit format, ignoring any Accept header: %s",
request_path_suffix.data());
+ my_state->output_format = format_per_path;
+ } else {
+ // Check for an Accept header to determine response type.
+ accept_field = TSMimeHdrFieldFind(reqp, hdr_loc,
TS_MIME_FIELD_ACCEPT, TS_MIME_LEN_ACCEPT);
+ my_state->output_format = output_format_t::JSON_OUTPUT; // default to json
output
+ // accept header exists, use it to determine response type
+ if (accept_field != TS_NULL_MLOC) {
+ int len = -1;
+ const char *str = TSMimeHdrFieldValueStringGet(reqp, hdr_loc,
accept_field, -1, &len);
+
+ // Parse the Accept header, default to JSON output unless its another
supported format
+ if (!strncasecmp(str, "text/csv", len)) {
+ Dbg(dbg_ctl, "Saw text/csv in accept header, sending CSV output.");
+ my_state->output_format = output_format_t::CSV_OUTPUT;
+ } else if (!strncasecmp(str, "text/plain; version=0.0.4", len)) {
+ Dbg(dbg_ctl, "Saw text/plain; version=0.0.4 in accept header, sending
Prometheus output.");
+ my_state->output_format = output_format_t::PROMETHEUS_OUTPUT;
+ } else {
+ Dbg(dbg_ctl, "Saw %.*s in accept header, defaulting to JSON output.",
len, str);
+ my_state->output_format = output_format_t::JSON_OUTPUT;
+ }
}
}
// Check for Accept Encoding and init
accept_encoding_field = TSMimeHdrFieldFind(reqp, hdr_loc,
TS_MIME_FIELD_ACCEPT_ENCODING, TS_MIME_LEN_ACCEPT_ENCODING);
- my_state->encoding = NONE;
+ my_state->encoding = encoding_format_t::NONE;
if (accept_encoding_field != TS_NULL_MLOC) {
int len = -1;
const char *str = TSMimeHdrFieldValueStringGet(reqp, hdr_loc,
accept_encoding_field, -1, &len);
@@ -650,7 +776,7 @@ stats_origin(TSCont contp, TSEvent /* event ATS_UNUSED */,
void *edata)
}
#endif
else {
- my_state->encoding = NONE;
+ my_state->encoding = encoding_format_t::NONE;
}
}
Dbg(dbg_ctl, "Finished AE check");
diff --git a/tests/Pipfile b/tests/Pipfile
index 68a4841e36..c2cf352c3f 100644
--- a/tests/Pipfile
+++ b/tests/Pipfile
@@ -56,5 +56,8 @@ grpcio-tools = "*"
pyOpenSSL = "*"
eventlet = "*"
+# To test stats_over_http prometheus exporter.
+prometheus_client = "*"
+
[requires]
python_version = "3"
diff --git
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stdout.gold
b/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stdout.gold
deleted file mode 100644
index a9315fe22f..0000000000
---
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stdout.gold
+++ /dev/null
@@ -1,4 +0,0 @@
-{ "global": {``
-``
- }
-}
diff --git
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
b/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_csv_stderr.gold
similarity index 70%
copy from
tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
copy to
tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_csv_stderr.gold
index 23cfc22ce1..45d326e398 100644
---
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
+++
b/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_csv_stderr.gold
@@ -1,8 +1,8 @@
``
-> GET /_stats HTTP/1.1
+> GET /_stats``HTTP/1.1
``
< HTTP/1.1 200 OK
-< Content-Type: text/json
+< Content-Type: text/csv
< Cache-Control: no-cache
< Date:``
< Age:``
diff --git
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
b/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_json_stderr.gold
similarity index 85%
copy from
tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
copy to
tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_json_stderr.gold
index 23cfc22ce1..123f2229b6 100644
---
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
+++
b/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_json_stderr.gold
@@ -1,5 +1,5 @@
``
-> GET /_stats HTTP/1.1
+> GET /_stats``HTTP/1.1
``
< HTTP/1.1 200 OK
< Content-Type: text/json
diff --git
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
b/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_prometheus_stderr.gold
similarity index 59%
rename from
tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
rename to
tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_prometheus_stderr.gold
index 23cfc22ce1..9caf215489 100644
---
a/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_0_stderr.gold
+++
b/tests/gold_tests/pluginTest/stats_over_http/gold/stats_over_http_prometheus_stderr.gold
@@ -1,8 +1,8 @@
``
-> GET /_stats HTTP/1.1
+> GET /_stats``HTTP/1.1
``
< HTTP/1.1 200 OK
-< Content-Type: text/json
+< Content-Type: text/plain; version=0.0.4; charset=utf-8
< Cache-Control: no-cache
< Date:``
< Age:``
diff --git
a/tests/gold_tests/pluginTest/stats_over_http/prometheus_stats_ingester.py
b/tests/gold_tests/pluginTest/stats_over_http/prometheus_stats_ingester.py
new file mode 100644
index 0000000000..16b3701ecc
--- /dev/null
+++ b/tests/gold_tests/pluginTest/stats_over_http/prometheus_stats_ingester.py
@@ -0,0 +1,117 @@
+'''Parse ATS Prometheus stats with Prometheus to verify correct formatting.'''
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+from urllib.request import urlopen
+from prometheus_client.parser import text_string_to_metric_families
+
+
+def parse_args() -> argparse.Namespace:
+ """
+ Parse command line arguments for the Prometheus metrics ingester.
+
+ :return: Parsed arguments with the 'url' attribute.
+ """
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument("url", help="URL to fetch metrics from")
+ return parser.parse_args()
+
+
+def query_ats(url: str) -> str:
+ """
+ Fetch Prometheus metrics from the specified URL.
+
+ :param url: URL to fetch metrics from.
+ :return: Response text containing the metrics.
+ """
+ try:
+ with urlopen(url) as response:
+ return response.read().decode('utf-8')
+ except Exception as e:
+ raise RuntimeError(f"Failed to fetch metrics from {url}: {e}")
+
+
+def parse_ats_metrics(text: str) -> list:
+ """
+ Parse Prometheus metrics from a text string.
+
+ :param text: The ATS output containing Prometheus metrics.
+ :return: List of parsed metric families.
+ """
+ try:
+ families = text_string_to_metric_families(text)
+ except Exception as e:
+ raise RuntimeError(f"Failed to parse metrics: {e}")
+
+ if not families:
+ raise RuntimeError("No metrics found in the provided text")
+ return families
+
+
+def print_metrics(families: list) -> None:
+ """
+ Print parsed metric families in Prometheus format.
+
+ :param families: List of parsed metric families.
+ """
+ try:
+ for family in families:
+ print(f"# HELP {family.name} {family.documentation}")
+ print(f"# TYPE {family.name} {family.type}")
+ for sample in family.samples:
+ name, labels, value = sample.name, sample.labels, sample.value
+ if labels:
+ label_str = ",".join(f'{k}="{v}"' for k, v in
labels.items())
+ print(f"{name}{{{label_str}}} {value}")
+ else:
+ print(f"{name} {value}")
+ except Exception as e:
+ raise RuntimeError(f"Failed to print metrics: {e}")
+
+
+def main() -> int:
+ """
+ Fetch and parse Prometheus metrics from a given URL.
+
+ :return: Exit code, 0 on success, non-zero on failure.
+ """
+ args = parse_args()
+
+ try:
+ ats_output = query_ats(args.url)
+ except RuntimeError as e:
+ print(f"Error fetching URL {args.url}: {e}", file=sys.stderr)
+ return 1
+
+ try:
+ families = parse_ats_metrics(ats_output)
+ except RuntimeError as e:
+ print(f"Error parsing ATS metrics: {e}", file=sys.stderr)
+ return 1
+
+ # Parsing issues may not arise until we try to print the metrics.
+ try:
+ print_metrics(families)
+ except RuntimeError as e:
+ print(f"Error parsing the metrics when printing them: {e}",
file=sys.stderr)
+ return 1
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git
a/tests/gold_tests/pluginTest/stats_over_http/stats_over_http.test.py
b/tests/gold_tests/pluginTest/stats_over_http/stats_over_http.test.py
index c330573a39..5ccfbe0d46 100644
--- a/tests/gold_tests/pluginTest/stats_over_http/stats_over_http.test.py
+++ b/tests/gold_tests/pluginTest/stats_over_http/stats_over_http.test.py
@@ -17,6 +17,7 @@
# limitations under the License.
from enum import Enum
+import sys
Test.Summary = 'Exercise stats-over-http plugin'
Test.SkipUnless(Condition.PluginExists('stats_over_http.so'))
@@ -62,18 +63,108 @@ class StatsOverHttpPluginTest:
assert (self.state == self.State.RUNNING)
tr.StillRunningAfter = self.ts
- def __testCase0(self):
- tr = Test.AddTestRun()
+ def __testCaseNoAccept(self):
+ tr = Test.AddTestRun('Fetch stats over HTTP in JSON format: no Accept
and default path')
self.__checkProcessBefore(tr)
tr.MakeCurlCommand(f"-vs --http1.1
http://127.0.0.1:{self.ts.Variables.port}/_stats")
tr.Processes.Default.ReturnCode = 0
- tr.Processes.Default.Streams.stdout =
"gold/stats_over_http_0_stdout.gold"
- tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_0_stderr.gold"
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression('{
"global": {', 'Output should have the JSON header.')
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression(
+ '"proxy.process.http.delete_requests": "0",', 'Output should be
JSON formatted.')
+ tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_json_stderr.gold"
tr.Processes.Default.TimeOut = 3
self.__checkProcessAfter(tr)
+ def __testCaseAcceptCSV(self):
+ tr = Test.AddTestRun('Fetch stats over HTTP in CSV format')
+ self.__checkProcessBefore(tr)
+ tr.MakeCurlCommand(f"-vs -H'Accept: text/csv' --http1.1
http://127.0.0.1:{self.ts.Variables.port}/_stats")
+ tr.Processes.Default.ReturnCode = 0
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression(
+ 'proxy.process.http.delete_requests,0', 'Output should be CSV
formatted.')
+ tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_csv_stderr.gold"
+ tr.Processes.Default.TimeOut = 3
+ self.__checkProcessAfter(tr)
+
+ def __testCaseAcceptPrometheus(self):
+ tr = Test.AddTestRun('Fetch stats over HTTP in Prometheus format')
+ self.__checkProcessBefore(tr)
+ tr.MakeCurlCommand(f"-vs -H'Accept: text/plain; version=0.0.4'
--http1.1 http://127.0.0.1:{self.ts.Variables.port}/_stats")
+ tr.Processes.Default.ReturnCode = 0
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression(
+ 'proxy_process_http_delete_requests 0', 'Output should be
Prometheus formatted.')
+ tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_prometheus_stderr.gold"
+ tr.Processes.Default.TimeOut = 3
+ self.__checkProcessAfter(tr)
+
+ def __testCasePathJSON(self):
+ tr = Test.AddTestRun('Fetch stats over HTTP in JSON format via
/_stats/json')
+ self.__checkProcessBefore(tr)
+ tr.MakeCurlCommand(f"-vs --http1.1
http://127.0.0.1:{self.ts.Variables.port}/_stats/json")
+ tr.Processes.Default.ReturnCode = 0
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression('{
"global": {', 'JSON header expected.')
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression(
+ '"proxy.process.http.delete_requests": "0",', 'JSON field
expected.')
+ tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_json_stderr.gold"
+ tr.Processes.Default.TimeOut = 3
+ self.__checkProcessAfter(tr)
+
+ def __testCasePathCSV(self):
+ tr = Test.AddTestRun('Fetch stats over HTTP in CSV format via
/_stats/csv')
+ self.__checkProcessBefore(tr)
+ tr.MakeCurlCommand(f"-vs --http1.1
http://127.0.0.1:{self.ts.Variables.port}/_stats/csv")
+ tr.Processes.Default.ReturnCode = 0
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression(
+ 'proxy.process.http.delete_requests,0', 'CSV output expected.')
+ tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_csv_stderr.gold"
+ tr.Processes.Default.TimeOut = 3
+ self.__checkProcessAfter(tr)
+
+ def __testCasePathPrometheus(self):
+ tr = Test.AddTestRun('Fetch stats over HTTP in Prometheus format via
/_stats/prometheus')
+ self.__checkProcessBefore(tr)
+ tr.MakeCurlCommand(f"-vs --http1.1
http://127.0.0.1:{self.ts.Variables.port}/_stats/prometheus")
+ tr.Processes.Default.ReturnCode = 0
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression(
+ 'proxy_process_http_delete_requests 0', 'Prometheus output
expected.')
+ tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_prometheus_stderr.gold"
+ tr.Processes.Default.TimeOut = 3
+ self.__checkProcessAfter(tr)
+
+ def __testCaseAcceptIgnoredIfPathExplicit(self):
+ tr = Test.AddTestRun('Fetch stats over HTTP in Prometheus format with
Accept csv header')
+ self.__checkProcessBefore(tr)
+ tr.MakeCurlCommand(f"-vs -H'Accept: text/csv' --http1.1
http://127.0.0.1:{self.ts.Variables.port}/_stats/prometheus")
+ tr.Processes.Default.ReturnCode = 0
+ tr.Processes.Default.Streams.stdout += Testers.ContainsExpression(
+ 'proxy_process_http_delete_requests 0', 'Prometheus output
expected.')
+ tr.Processes.Default.Streams.stderr =
"gold/stats_over_http_prometheus_stderr.gold"
+ tr.Processes.Default.TimeOut = 3
+ self.__checkProcessAfter(tr)
+
+ def __queryAndParsePrometheusMetrics(self):
+ """
+ Query the ATS stats over HTTP in Prometheus format and parse the
output.
+ """
+ tr = Test.AddTestRun('Query and parse Prometheus metrics')
+ ingester = 'prometheus_stats_ingester.py'
+ tr.Setup.CopyAs(ingester)
+ self.__checkProcessBefore(tr)
+ p = tr.Processes.Default
+ p.Command = f'{sys.executable} {ingester}
http://127.0.0.1:{self.ts.Variables.port}/_stats/prometheus'
+ p.ReturnCode = 0
+ p.Streams.stdout += Testers.ContainsExpression(
+ 'proxy_process_http_delete_requests 0', 'Verify the successful
parsing of Prometheus metrics.')
+
def run(self):
- self.__testCase0()
+ self.__testCaseNoAccept()
+ self.__testCaseAcceptCSV()
+ self.__testCaseAcceptPrometheus()
+ self.__testCasePathJSON()
+ self.__testCasePathCSV()
+ self.__testCasePathPrometheus()
+ self.__testCaseAcceptIgnoredIfPathExplicit()
+ self.__queryAndParsePrometheusMetrics()
StatsOverHttpPluginTest().run()