This is an automated email from the ASF dual-hosted git repository.
zhaoqingran pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hertzbeat.git
The following commit(s) were added to refs/heads/master by this push:
new 47caa5bdf8 feat: support otlp http binary protobuf format log data and
update doc (#3986)
47caa5bdf8 is described below
commit 47caa5bdf8ea78415a6881f2c0e5925fec31c10d
Author: Yang Chen <[email protected]>
AuthorDate: Fri Jan 23 18:34:36 2026 +0800
feat: support otlp http binary protobuf format log data and update doc
(#3986)
Signed-off-by: Yang Chen <[email protected]>
Co-authored-by: Copilot <[email protected]>
Co-authored-by: Logic <[email protected]>
---
.../log/storage/GreptimeLogStorageE2eTest.java | 2 +-
.../log/controller/LogIngestionController.java | 57 ++------
.../log/controller/LogQueryController.java | 161 +++++++++++----------
.../log/controller/OtlpLogController.java | 160 ++++++++++++++++++++
.../hertzbeat/log/notice/LogSseFilterCriteria.java | 26 +++-
.../hertzbeat/log/service/LogProtocolAdapter.java | 2 +-
.../log/service/impl/OtlpLogProtocolAdapter.java | 46 ++++--
.../log/controller/LogIngestionControllerTest.java | 68 +++------
.../log/controller/LogQueryControllerTest.java | 39 +++--
.../log/controller/OtlpLogControllerTest.java | 131 +++++++++++++++++
.../log/notice/LogSseFilterCriteriaTest.java | 47 +++++-
.../service/impl/OtlpLogProtocolAdapterTest.java | 69 +++++++++
.../store/history/tsdb/HistoryDataReader.java | 34 ++++-
.../tsdb/greptime/GreptimeDbDataStorage.java | 20 ++-
.../tsdb/greptime/GreptimeDbDataStorageTest.java | 50 +++----
home/docs/help/log_integration.md | 24 +--
.../current/help/log_integration.md | 20 +--
home/static/img/docs/help/log_integration_cn.png | Bin 277357 -> 191427 bytes
home/static/img/docs/help/log_integration_en.png | Bin 298468 -> 203907 bytes
.../log/log-manage/log-manage.component.html | 14 +-
.../routes/log/log-manage/log-manage.component.ts | 10 +-
.../log/log-stream/log-stream.component.html | 10 ++
.../routes/log/log-stream/log-stream.component.ts | 6 +
web-app/src/app/service/log.service.ts | 14 +-
.../src/assets/doc/log-integration/otlp.en-US.md | 14 +-
.../src/assets/doc/log-integration/otlp.zh-CN.md | 14 +-
web-app/src/assets/i18n/en-US.json | 2 +
web-app/src/assets/i18n/ja-JP.json | 2 +
web-app/src/assets/i18n/pt-BR.json | 2 +
web-app/src/assets/i18n/zh-CN.json | 2 +
web-app/src/assets/i18n/zh-TW.json | 2 +
31 files changed, 730 insertions(+), 318 deletions(-)
diff --git
a/hertzbeat-e2e/hertzbeat-log-e2e/src/test/java/org/apache/hertzbeat/log/storage/GreptimeLogStorageE2eTest.java
b/hertzbeat-e2e/hertzbeat-log-e2e/src/test/java/org/apache/hertzbeat/log/storage/GreptimeLogStorageE2eTest.java
index 6a85ffa107..23dc0c02db 100644
---
a/hertzbeat-e2e/hertzbeat-log-e2e/src/test/java/org/apache/hertzbeat/log/storage/GreptimeLogStorageE2eTest.java
+++
b/hertzbeat-e2e/hertzbeat-log-e2e/src/test/java/org/apache/hertzbeat/log/storage/GreptimeLogStorageE2eTest.java
@@ -161,6 +161,6 @@ public class GreptimeLogStorageE2eTest {
long startTime = endTime - Duration.ofMinutes(5).toMillis(); // Look
back 5 minutes
return greptimeDbDataStorage.queryLogsByMultipleConditions(
- startTime, endTime, null, null, null, null);
+ startTime, endTime, null, null, null, null, null);
}
}
diff --git
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogIngestionController.java
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogIngestionController.java
index 2536986ae8..e32510e4d6 100644
---
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogIngestionController.java
+++
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogIngestionController.java
@@ -17,6 +17,7 @@
package org.apache.hertzbeat.log.controller;
+import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
@@ -25,8 +26,8 @@ import org.apache.hertzbeat.common.constants.CommonConstants;
import org.apache.hertzbeat.common.entity.dto.Message;
import org.apache.hertzbeat.log.service.LogProtocolAdapter;
import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
-import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -34,15 +35,16 @@ import
org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
- * Log Ingestion Controller
+ * Generic Log Ingestion Controller
+ * Provides a fallback endpoint for log protocols that don't have dedicated
controllers.
+ * For OTLP protocol, use OtlpLogController instead.
*/
@Tag(name = "Log Ingestion Controller")
@RestController
-@RequestMapping(path = "/api/logs", produces = "application/json")
+@RequestMapping(path = "/api/logs", produces =
MediaType.APPLICATION_JSON_VALUE)
@Slf4j
public class LogIngestionController {
- private static final String DEFAULT_PROTOCOL = "otlp";
private final List<LogProtocolAdapter> protocolAdapters;
public LogIngestionController(List<LogProtocolAdapter> protocolAdapters) {
@@ -51,26 +53,23 @@ public class LogIngestionController {
/**
* Receive log payload pushed from external system specifying the log
protocol.
- * Examples:
- * - POST /api/logs/ingest/otlp (content body is OTLP JSON)
*
- * @param protocol log protocol identifier
- * @param content raw request body
+ * @param protocol log protocol identifier (e.g., "vector", "loki")
+ * @param content raw request body
*/
- @PostMapping("/ingest/{protocol}")
- public ResponseEntity<Message<Void>>
ingestExternLog(@PathVariable("protocol") String protocol,
- @RequestBody String
content) {
- log.debug("Receive extern log from protocol: {}, content length: {}",
protocol, content == null ? 0 : content.length());
- if (!StringUtils.hasText(protocol)) {
- protocol = DEFAULT_PROTOCOL; // Default to OTLP if no protocol
specified
- }
+ @Operation(summary = "Ingest logs by protocol name")
+ @PostMapping(value = "/ingest/{protocol}", consumes =
MediaType.APPLICATION_JSON_VALUE)
+ public ResponseEntity<Message<Void>> ingestLog(@PathVariable("protocol")
String protocol,
+ @RequestBody String
content) {
+ log.debug("Receive log from protocol: {}, content length: {}",
protocol, content == null ? 0 : content.length());
+
for (LogProtocolAdapter adapter : protocolAdapters) {
if (adapter.supportProtocol().equalsIgnoreCase(protocol)) {
try {
adapter.ingest(content);
return ResponseEntity.ok(Message.success("Add extern log
success"));
} catch (Exception e) {
- log.error("Add extern log failed: {}", e.getMessage(), e);
+ log.error("Add log failed: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.BAD_REQUEST)
.body(Message.fail(CommonConstants.FAIL_CODE, "Add
extern log failed: " + e.getMessage()));
}
@@ -80,30 +79,4 @@ public class LogIngestionController {
return ResponseEntity.status(HttpStatus.BAD_REQUEST)
.body(Message.fail(CommonConstants.FAIL_CODE, "Not support the
" + protocol + " protocol log"));
}
-
- /**
- * Receive default log payload (when protocol is not specified).
- * It will look for a service whose supportProtocol() returns "otlp".
- */
- @PostMapping("/ingest")
- public ResponseEntity<Message<Void>> ingestDefaultExternLog(@RequestBody
String content) {
- log.info("Receive default extern log content, length: {}", content ==
null ? 0 : content.length());
- LogProtocolAdapter adapter = protocolAdapters.stream()
- .filter(item ->
DEFAULT_PROTOCOL.equalsIgnoreCase(item.supportProtocol()))
- .findFirst()
- .orElse(null);
- if (adapter != null) {
- try {
- adapter.ingest(content);
- return ResponseEntity.ok(Message.success("Add extern log
success"));
- } catch (Exception e) {
- log.error("Add extern log failed: {}", e.getMessage(), e);
- return ResponseEntity.status(HttpStatus.BAD_REQUEST)
- .body(Message.fail(CommonConstants.FAIL_CODE, "Add
extern log failed: " + e.getMessage()));
- }
- }
- log.error("Not support default extern log protocol");
- return ResponseEntity.status(HttpStatus.BAD_REQUEST)
- .body(Message.fail(CommonConstants.FAIL_CODE, "Not support the
default protocol log"));
- }
}
diff --git
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogQueryController.java
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogQueryController.java
index 2de4de5ab2..5d0bed1f45 100644
---
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogQueryController.java
+++
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/LogQueryController.java
@@ -61,50 +61,54 @@ public class LogQueryController {
}
@GetMapping("/list")
- @Operation(summary = "Query logs by time range with optional filters",
- description = "Query logs by [start,end] in ms and optional
filters with pagination. Returns paginated log entries sorted by timestamp in
descending order.")
+ @Operation(summary = "Query logs by time range with optional filters",
+ description = "Query logs by [start,end] in ms and optional
filters with pagination. Returns paginated log entries sorted by timestamp in
descending order.")
public ResponseEntity<Message<Page<LogEntry>>> list(
- @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
+ @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
@RequestParam(value = "start", required = false) Long start,
- @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
+ @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
@RequestParam(value = "end", required = false) Long end,
- @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
+ @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
@RequestParam(value = "traceId", required = false) String traceId,
- @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
+ @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
@RequestParam(value = "spanId", required = false) String spanId,
- @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
+ @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
@RequestParam(value = "severityNumber", required = false) Integer
severityNumber,
- @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
+ @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
@RequestParam(value = "severityText", required = false) String
severityText,
- @Parameter(description = "Page index starting from 0", example =
"0")
+ @Parameter(description = "Log content search keyword", example =
"error")
+ @RequestParam(value = "search", required = false) String search,
+ @Parameter(description = "Page index starting from 0", example =
"0")
@RequestParam(value = "pageIndex", required = false, defaultValue
= "0") Integer pageIndex,
- @Parameter(description = "Number of items per page", example =
"20")
+ @Parameter(description = "Number of items per page", example =
"20")
@RequestParam(value = "pageSize", required = false, defaultValue =
"20") Integer pageSize) {
- Page<LogEntry> result = getPagedLogs(start, end, traceId, spanId,
severityNumber, severityText, pageIndex, pageSize);
+ Page<LogEntry> result = getPagedLogs(start, end, traceId, spanId,
severityNumber, severityText, search, pageIndex, pageSize);
return ResponseEntity.ok(Message.success(result));
}
@GetMapping("/stats/overview")
- @Operation(summary = "Log overview statistics",
- description = "Overall counts and basic statistics with
filters. Provides counts by severity levels according to OpenTelemetry
standard.")
+ @Operation(summary = "Log overview statistics",
+ description = "Overall counts and basic statistics with filters.
Provides counts by severity levels according to OpenTelemetry standard.")
public ResponseEntity<Message<Map<String, Object>>> overviewStats(
- @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
+ @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
@RequestParam(value = "start", required = false) Long start,
- @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
+ @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
@RequestParam(value = "end", required = false) Long end,
- @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
+ @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
@RequestParam(value = "traceId", required = false) String traceId,
- @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
+ @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
@RequestParam(value = "spanId", required = false) String spanId,
- @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
+ @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
@RequestParam(value = "severityNumber", required = false) Integer
severityNumber,
- @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
- @RequestParam(value = "severityText", required = false) String
severityText) {
- List<LogEntry> logs = getFilteredLogs(start, end, traceId, spanId,
severityNumber, severityText);
-
+ @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
+ @RequestParam(value = "severityText", required = false) String
severityText,
+ @Parameter(description = "Log content search keyword", example =
"error")
+ @RequestParam(value = "search", required = false) String search) {
+ List<LogEntry> logs = getFilteredLogs(start, end, traceId, spanId,
severityNumber, severityText, search);
+
Map<String, Object> overview = new HashMap<>();
overview.put("totalCount", logs.size());
-
+
// Count by severity levels according to OpenTelemetry standard
// TRACE: 1-4, DEBUG: 5-8, INFO: 9-12, WARN: 13-16, ERROR: 17-20,
FATAL: 21-24
long fatalCount = logs.stream().filter(log -> log.getSeverityNumber()
!= null && log.getSeverityNumber() >= 21 && log.getSeverityNumber() <=
24).count();
@@ -113,114 +117,117 @@ public class LogQueryController {
long infoCount = logs.stream().filter(log -> log.getSeverityNumber()
!= null && log.getSeverityNumber() >= 9 && log.getSeverityNumber() <=
12).count();
long debugCount = logs.stream().filter(log -> log.getSeverityNumber()
!= null && log.getSeverityNumber() >= 5 && log.getSeverityNumber() <=
8).count();
long traceCount = logs.stream().filter(log -> log.getSeverityNumber()
!= null && log.getSeverityNumber() >= 1 && log.getSeverityNumber() <=
4).count();
-
+
overview.put("fatalCount", fatalCount);
overview.put("errorCount", errorCount);
overview.put("warnCount", warnCount);
overview.put("infoCount", infoCount);
overview.put("debugCount", debugCount);
overview.put("traceCount", traceCount);
-
+
return ResponseEntity.ok(Message.success(overview));
}
@GetMapping("/stats/trace-coverage")
- @Operation(summary = "Trace coverage statistics",
- description = "Statistics about trace information availability.
Shows how many logs have trace IDs, span IDs, or both for distributed tracing
analysis.")
+ @Operation(summary = "Trace coverage statistics",
+ description = "Statistics about trace information availability.
Shows how many logs have trace IDs, span IDs, or both for distributed tracing
analysis.")
public ResponseEntity<Message<Map<String, Object>>> traceCoverageStats(
- @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
+ @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
@RequestParam(value = "start", required = false) Long start,
- @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
+ @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
@RequestParam(value = "end", required = false) Long end,
- @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
+ @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
@RequestParam(value = "traceId", required = false) String traceId,
- @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
+ @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
@RequestParam(value = "spanId", required = false) String spanId,
- @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
+ @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
@RequestParam(value = "severityNumber", required = false) Integer
severityNumber,
- @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
- @RequestParam(value = "severityText", required = false) String
severityText) {
- List<LogEntry> logs = getFilteredLogs(start, end, traceId, spanId,
severityNumber, severityText);
-
+ @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
+ @RequestParam(value = "severityText", required = false) String
severityText,
+ @Parameter(description = "Log content search keyword", example =
"error")
+ @RequestParam(value = "search", required = false) String search) {
+ List<LogEntry> logs = getFilteredLogs(start, end, traceId, spanId,
severityNumber, severityText, search);
+
Map<String, Object> result = new HashMap<>();
-
+
// Trace coverage statistics
long withTraceId = logs.stream().filter(log -> log.getTraceId() !=
null && !log.getTraceId().isEmpty()).count();
long withSpanId = logs.stream().filter(log -> log.getSpanId() != null
&& !log.getSpanId().isEmpty()).count();
- long withBothTraceAndSpan = logs.stream().filter(log ->
- log.getTraceId() != null && !log.getTraceId().isEmpty()
- && log.getSpanId() != null &&
!log.getSpanId().isEmpty()).count();
+ long withBothTraceAndSpan = logs.stream().filter(log ->
+ log.getTraceId() != null && !log.getTraceId().isEmpty()
+ && log.getSpanId() != null &&
!log.getSpanId().isEmpty()).count();
long withoutTrace = logs.size() - withTraceId;
-
+
Map<String, Long> traceCoverage = new HashMap<>();
traceCoverage.put("withTrace", withTraceId);
traceCoverage.put("withoutTrace", withoutTrace);
traceCoverage.put("withSpan", withSpanId);
traceCoverage.put("withBothTraceAndSpan", withBothTraceAndSpan);
-
+
result.put("traceCoverage", traceCoverage);
return ResponseEntity.ok(Message.success(result));
}
@GetMapping("/stats/trend")
- @Operation(summary = "Log trend over time",
- description = "Count logs by hour intervals with filters.
Groups logs by hour and provides time-series data for trend analysis.")
+ @Operation(summary = "Log trend over time",
+ description = "Count logs by hour intervals with filters. Groups
logs by hour and provides time-series data for trend analysis.")
public ResponseEntity<Message<Map<String, Object>>> trendStats(
- @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
+ @Parameter(description = "Start timestamp in milliseconds (Unix
timestamp)", example = "1640995200000")
@RequestParam(value = "start", required = false) Long start,
- @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
+ @Parameter(description = "End timestamp in milliseconds (Unix
timestamp)", example = "1641081600000")
@RequestParam(value = "end", required = false) Long end,
- @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
+ @Parameter(description = "Trace ID for distributed tracing",
example = "1234567890abcdef")
@RequestParam(value = "traceId", required = false) String traceId,
- @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
+ @Parameter(description = "Span ID for distributed tracing",
example = "abcdef1234567890")
@RequestParam(value = "spanId", required = false) String spanId,
- @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
+ @Parameter(description = "Log severity number (1-24 according to
OpenTelemetry standard)", example = "9")
@RequestParam(value = "severityNumber", required = false) Integer
severityNumber,
- @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
- @RequestParam(value = "severityText", required = false) String
severityText) {
- List<LogEntry> logs = getFilteredLogs(start, end, traceId, spanId,
severityNumber, severityText);
-
+ @Parameter(description = "Log severity text (TRACE, DEBUG, INFO,
WARN, ERROR, FATAL)", example = "INFO")
+ @RequestParam(value = "severityText", required = false) String
severityText,
+ @Parameter(description = "Log content search keyword", example =
"error")
+ @RequestParam(value = "search", required = false) String search) {
+ List<LogEntry> logs = getFilteredLogs(start, end, traceId, spanId,
severityNumber, severityText, search);
+
// Group by hour
Map<String, Long> hourlyStats = logs.stream()
- .filter(log -> log.getTimeUnixNano() != null)
- .collect(Collectors.groupingBy(
- log -> {
- long timestampMs = log.getTimeUnixNano() / 1_000_000L;
- LocalDateTime dateTime = LocalDateTime.ofInstant(
- Instant.ofEpochMilli(timestampMs),
- ZoneId.systemDefault());
- return
dateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:00"));
- },
- Collectors.counting()));
-
+ .filter(log -> log.getTimeUnixNano() != null)
+ .collect(Collectors.groupingBy(
+ log -> {
+ long timestampMs = log.getTimeUnixNano() /
1_000_000L;
+ LocalDateTime dateTime = LocalDateTime.ofInstant(
+ Instant.ofEpochMilli(timestampMs),
+ ZoneId.systemDefault());
+ return
dateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:00"));
+ },
+ Collectors.counting()));
+
Map<String, Object> result = new HashMap<>();
result.put("hourlyStats", hourlyStats);
return ResponseEntity.ok(Message.success(result));
}
- private List<LogEntry> getFilteredLogs(Long start, Long end, String
traceId, String spanId,
- Integer severityNumber, String
severityText) {
+ private List<LogEntry> getFilteredLogs(Long start, Long end, String
traceId, String spanId,
+ Integer severityNumber, String
severityText, String search) {
// Use the new multi-condition query method
- return historyDataReader.queryLogsByMultipleConditions(start, end,
traceId, spanId, severityNumber, severityText);
+ return historyDataReader.queryLogsByMultipleConditions(start, end,
traceId, spanId, severityNumber, severityText, search);
}
- private Page<LogEntry> getPagedLogs(Long start, Long end, String traceId,
String spanId,
- Integer severityNumber, String
severityText, Integer pageIndex, Integer pageSize) {
+ private Page<LogEntry> getPagedLogs(Long start, Long end, String traceId,
String spanId,
+ Integer severityNumber, String
severityText, String search,
+ Integer pageIndex, Integer pageSize) {
// Calculate pagination parameters
int offset = pageIndex * pageSize;
-
+
// Get total count and paginated data
- long totalElements =
historyDataReader.countLogsByMultipleConditions(start, end, traceId, spanId,
severityNumber, severityText);
+ long totalElements =
historyDataReader.countLogsByMultipleConditions(start, end, traceId, spanId,
severityNumber, severityText, search);
List<LogEntry> pagedLogs =
historyDataReader.queryLogsByMultipleConditionsWithPagination(
- start, end, traceId, spanId, severityNumber, severityText, offset,
pageSize);
-
+ start, end, traceId, spanId, severityNumber, severityText,
search, offset, pageSize);
+
// Create PageRequest (sorted by timestamp descending)
Sort sort = Sort.by(Sort.Direction.DESC, "timeUnixNano");
PageRequest pageRequest = PageRequest.of(pageIndex, pageSize, sort);
-
+
// Return Spring Data Page object
return new PageImpl<>(pagedLogs, pageRequest, totalElements);
}
-}
-
-
+}
\ No newline at end of file
diff --git
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/OtlpLogController.java
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/OtlpLogController.java
new file mode 100644
index 0000000000..3d01075081
--- /dev/null
+++
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/controller/OtlpLogController.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hertzbeat.log.controller;
+
+import com.fasterxml.jackson.core.io.JsonStringEncoder;
+import com.google.protobuf.InvalidProtocolBufferException;
+import com.google.protobuf.util.JsonFormat;
+import com.google.rpc.Status;
+import io.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse;
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.tags.Tag;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hertzbeat.log.service.impl.OtlpLogProtocolAdapter;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+/**
+ * OTLP Log Ingestion Controller
+ * Implements OTLP/HTTP specification for log ingestion.
+ * Supports both binary-encoded Protobuf (application/x-protobuf) and
JSON-encoded Protobuf (application/json).
+ *
+ * @see <a href="https://opentelemetry.io/docs/specs/otlp/#otlphttp">OTLP/HTTP
Specification</a>
+ */
+@Tag(name = "OTLP Log Controller")
+@RestController
+@RequestMapping(path = "/api/logs/otlp")
+@Slf4j
+public class OtlpLogController {
+
+ private static final String CONTENT_TYPE_PROTOBUF =
"application/x-protobuf";
+
+ private static final ExportLogsServiceResponse EMPTY_RESPONSE =
ExportLogsServiceResponse.newBuilder().build();
+
+ private final OtlpLogProtocolAdapter otlpLogProtocolAdapter;
+
+ public OtlpLogController(OtlpLogProtocolAdapter otlpLogProtocolAdapter) {
+ this.otlpLogProtocolAdapter = otlpLogProtocolAdapter;
+ }
+
+ /**
+ * OTLP/HTTP standard endpoint for logs with JSON-encoded Protobuf payload.
+ * Content-Type: application/json
+ *
+ * Response follows OTLP specification:
+ * - Success: HTTP 200 with ExportLogsServiceResponse (JSON encoded)
+ * - Failure: HTTP 400 with google.rpc.Status (JSON encoded)
+ *
+ * @param content JSON-encoded ExportLogsServiceRequest
+ * @return ExportLogsServiceResponse on success, Status on failure
+ */
+ @Operation(summary = "Ingest OTLP logs (JSON format)")
+ @PostMapping(value = "/v1/logs", consumes =
MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
+ public ResponseEntity<String> ingestJsonLogs(@RequestBody String content) {
+ log.debug("Receive OTLP JSON logs, content length: {}", content ==
null ? 0 : content.length());
+ try {
+ otlpLogProtocolAdapter.ingest(content);
+ return ResponseEntity.ok(toJsonResponse(EMPTY_RESPONSE));
+ } catch (IllegalArgumentException e) {
+ return ResponseEntity.status(HttpStatus.BAD_REQUEST)
+ .body(toJsonErrorResponse(e.getMessage()));
+ } catch (Exception e) {
+ // Server-side errors - unexpected failure
+ log.error("Unexpected error ingesting OTLP JSON logs: {}",
e.getMessage(), e);
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
+ .body(toJsonErrorResponse(e.getMessage()));
+ }
+ }
+
+ /**
+ * OTLP/HTTP standard endpoint for logs with binary-encoded Protobuf
payload.
+ * Content-Type: application/x-protobuf
+ *
+ * Response follows OTLP specification:
+ * - Success: HTTP 200 with ExportLogsServiceResponse (binary encoded)
+ * - Failure: HTTP 400 with google.rpc.Status (binary encoded)
+ *
+ * @param content binary-encoded ExportLogsServiceRequest
+ * @return ExportLogsServiceResponse on success, Status on failure
+ */
+ @Operation(summary = "Ingest OTLP logs (binary Protobuf format)")
+ @PostMapping(value = "/v1/logs", consumes = CONTENT_TYPE_PROTOBUF,
produces = CONTENT_TYPE_PROTOBUF)
+ public ResponseEntity<byte[]> ingestBinaryLogs(@RequestBody byte[]
content) {
+ log.debug("Receive OTLP binary logs, content length: {}", content ==
null ? 0 : content.length);
+ try {
+ otlpLogProtocolAdapter.ingestBinary(content);
+ return ResponseEntity.ok(EMPTY_RESPONSE.toByteArray());
+ } catch (IllegalArgumentException e) {
+ // Client-side validation errors - malformed request
+ return ResponseEntity.status(HttpStatus.BAD_REQUEST)
+ .body(createBinaryErrorResponse(e.getMessage()));
+ } catch (Exception e) {
+ // Server-side errors - unexpected failure
+ log.error("Unexpected error ingesting OTLP binary logs: {}",
e.getMessage(), e);
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
+ .body(createBinaryErrorResponse(e.getMessage()));
+ }
+ }
+
+ private String toJsonResponse(ExportLogsServiceResponse response) {
+ try {
+ return JsonFormat.printer().print(response);
+ } catch (InvalidProtocolBufferException e) {
+ log.error("Failed to convert ExportLogsServiceResponse to JSON:
{}", e.getMessage(), e);
+ return "{}";
+ }
+ }
+
+ private String toJsonErrorResponse(String message) {
+ Status status = Status.newBuilder()
+ .setMessage(message != null ? message : "Unknown error")
+ .build();
+ try {
+ return JsonFormat.printer().print(status);
+ } catch (InvalidProtocolBufferException e) {
+ return "{\"message\":\"" + escapeJson(message) + "\"}";
+ }
+ }
+
+ /**
+ * Escapes a string value for safe inclusion in JSON.
+ *
+ * @param message the string to escape
+ * @return the escaped string, or empty string if message is null
+ */
+ private String escapeJson(String message) {
+ if (message == null) {
+ return "";
+ }
+
+ char[] escaped =
JsonStringEncoder.getInstance().quoteAsString(message);
+ return new String(escaped);
+ }
+
+ private byte[] createBinaryErrorResponse(String message) {
+ return Status.newBuilder()
+ .setMessage(message != null ? message : "Unknown error")
+ .build()
+ .toByteArray();
+ }
+}
diff --git
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteria.java
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteria.java
index 42ee0ba155..b42f68aa8a 100644
---
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteria.java
+++
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteria.java
@@ -49,7 +49,13 @@ public class LogSseFilterCriteria {
*/
@Schema(description = "The severity text (also known as log level).",
example = "INFO", accessMode = READ_WRITE)
private String severityText;
-
+
+ /**
+ * Log content text filtering (case-insensitive contains match).
+ */
+ @Schema(description = "Log content text filtering", example = "error
occurred", accessMode = READ_WRITE)
+ private String logContent;
+
/**
* A unique identifier for a trace.
* All spans from the same trace share the same trace_id.
@@ -77,18 +83,30 @@ public class LogSseFilterCriteria {
if (StringUtils.hasText(severityText) &&
!severityText.equalsIgnoreCase(log.getSeverityText())) {
return false;
}
-
+
// Check severity number match (if both are present)
if (severityNumber != null && log.getSeverityNumber() != null
&& !severityNumber.equals(log.getSeverityNumber())) {
return false;
}
-
+
+ // Check log content match
+ if (StringUtils.hasText(logContent)) {
+ Object body = log.getBody();
+ if (body == null) {
+ return false;
+ }
+ String bodyStr = body.toString();
+ if (!StringUtils.hasText(bodyStr) ||
!bodyStr.toLowerCase().contains(logContent.toLowerCase())) {
+ return false;
+ }
+ }
+
// Check trace ID match
if (StringUtils.hasText(traceId) &&
!traceId.equalsIgnoreCase(log.getTraceId())) {
return false;
}
-
+
// Check span ID match
if (StringUtils.hasText(spanId) &&
!spanId.equalsIgnoreCase(log.getSpanId())) {
return false;
diff --git
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/LogProtocolAdapter.java
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/LogProtocolAdapter.java
index c12987e2e4..a33e861c00 100644
---
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/LogProtocolAdapter.java
+++
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/LogProtocolAdapter.java
@@ -28,7 +28,7 @@ package org.apache.hertzbeat.log.service;
public interface LogProtocolAdapter {
/**
- * Ingest raw log payload pushed from external system.
+ * Ingest log payload pushed from external system.
*
* @param content raw request body string
*/
diff --git
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapter.java
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapter.java
index 53980f6614..46978af182 100644
---
a/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapter.java
+++
b/hertzbeat-log/src/main/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapter.java
@@ -39,7 +39,10 @@ import java.util.List;
import java.util.Map;
/**
- * Adapter for OpenTelemetry OTLP/HTTP JSON log ingestion.
+ * Adapter for OpenTelemetry OTLP/HTTP log ingestion.
+ * Supports both JSON-encoded and binary-encoded Protobuf formats.
+ *
+ * @see <a href="https://opentelemetry.io/docs/specs/otlp/#otlphttp">OTLP/HTTP
Specification</a>
*/
@Slf4j
@Service
@@ -58,26 +61,47 @@ public class OtlpLogProtocolAdapter implements
LogProtocolAdapter {
@Override
public void ingest(String content) {
if (content == null || content.isEmpty()) {
- log.warn("Received empty OTLP log payload - skip processing.");
+ log.warn("Received empty OTLP JSON log payload - skip
processing.");
return;
}
ExportLogsServiceRequest.Builder builder =
ExportLogsServiceRequest.newBuilder();
try {
JsonFormat.parser().ignoringUnknownFields().merge(content,
builder);
ExportLogsServiceRequest request = builder.build();
-
- // Extract LogEntry instances from the request
- List<LogEntry> logEntries = extractLogEntries(request);
- log.debug("Successfully extracted {} log entries from OTLP payload
{}", logEntries.size(), content);
- commonDataQueue.sendLogEntryToStorageBatch(logEntries);
- commonDataQueue.sendLogEntryToAlertBatch(logEntries);
- logEntries.forEach(logSseManager::broadcast);
+ processLogsRequest(request, "JSON");
+ } catch (InvalidProtocolBufferException e) {
+ log.error("Failed to parse OTLP JSON log payload: {}",
e.getMessage());
+ throw new IllegalArgumentException("Invalid OTLP JSON log
content", e);
+ }
+ }
+
+ /**
+ * Ingest binary-encoded Protobuf log payload (OTLP-specific).
+ *
+ * @param content binary-encoded ExportLogsServiceRequest
+ */
+ public void ingestBinary(byte[] content) {
+ if (content == null || content.length == 0) {
+ log.warn("Received empty OTLP binary log payload - skip
processing.");
+ return;
+ }
+ try {
+ ExportLogsServiceRequest request =
ExportLogsServiceRequest.parseFrom(content);
+ processLogsRequest(request, "binary");
} catch (InvalidProtocolBufferException e) {
- log.error("Failed to parse OTLP log payload: {}", e.getMessage());
- throw new IllegalArgumentException("Invalid OTLP log content", e);
+ log.error("Failed to parse OTLP binary log payload: {}",
e.getMessage());
+ throw new IllegalArgumentException("Invalid OTLP binary log
content", e);
}
}
+ private void processLogsRequest(ExportLogsServiceRequest request, String
format) {
+ List<LogEntry> logEntries = extractLogEntries(request);
+ log.debug("Successfully extracted {} log entries from OTLP {}
payload", logEntries.size(), format);
+ commonDataQueue.sendLogEntryToStorageBatch(logEntries);
+ commonDataQueue.sendLogEntryToAlertBatch(logEntries);
+ logEntries.forEach(logSseManager::broadcast);
+ }
+
/**
* Extract LogEntry instances from ExportLogsServiceRequest.
*
diff --git
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogIngestionControllerTest.java
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogIngestionControllerTest.java
index 1e41c23b79..cb6eb3678f 100644
---
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogIngestionControllerTest.java
+++
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogIngestionControllerTest.java
@@ -18,24 +18,22 @@
package org.apache.hertzbeat.log.controller;
import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.when;
import static
org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static
org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.List;
import org.apache.hertzbeat.common.constants.CommonConstants;
-import org.apache.hertzbeat.common.entity.log.LogEntry;
-import org.apache.hertzbeat.common.util.JsonUtil;
import org.apache.hertzbeat.log.service.LogProtocolAdapter;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
-import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MockMvc;
@@ -51,34 +49,29 @@ class LogIngestionControllerTest {
private MockMvc mockMvc;
@Mock
- private LogProtocolAdapter otlpAdapter;
+ private LogProtocolAdapter vectorAdapter;
private LogIngestionController logIngestionController;
@BeforeEach
void setUp() {
- List<LogProtocolAdapter> adapters = Arrays.asList(otlpAdapter);
+ List<LogProtocolAdapter> adapters = Arrays.asList(vectorAdapter);
this.logIngestionController = new LogIngestionController(adapters);
this.mockMvc =
MockMvcBuilders.standaloneSetup(logIngestionController).build();
}
@Test
- void testIngestExternLogWithOtlpProtocol() throws Exception {
- LogEntry logEntry = LogEntry.builder()
- .timeUnixNano(1734005477630L)
- .severityNumber(1)
- .severityText("INFO")
- .body("Test log message")
- .attributes(new HashMap<>())
- .build();
+ void testIngestLogWithKnownProtocol() throws Exception {
+ String logContent = "{\"message\":\"Test log message\"}";
- when(otlpAdapter.supportProtocol()).thenReturn("otlp");
+ when(vectorAdapter.supportProtocol()).thenReturn("vector");
+ doNothing().when(vectorAdapter).ingest(anyString());
mockMvc.perform(
MockMvcRequestBuilders
- .post("/api/logs/ingest/otlp")
+ .post("/api/logs/ingest/vector")
.contentType(MediaType.APPLICATION_JSON)
- .content(JsonUtil.toJson(logEntry))
+ .content(logContent)
)
.andDo(print())
.andExpect(status().isOk())
@@ -88,16 +81,16 @@ class LogIngestionControllerTest {
}
@Test
- void testIngestExternLogWithUnsupportedProtocol() throws Exception {
- String unsupportedLogContent = "{\"message\":\"Unsupported protocol
log\"}";
+ void testIngestLogWithUnsupportedProtocol() throws Exception {
+ String logContent = "{\"message\":\"Unsupported protocol log\"}";
- when(otlpAdapter.supportProtocol()).thenReturn("otlp");
+ when(vectorAdapter.supportProtocol()).thenReturn("vector");
mockMvc.perform(
MockMvcRequestBuilders
.post("/api/logs/ingest/unsupported")
.contentType(MediaType.APPLICATION_JSON)
- .content(unsupportedLogContent)
+ .content(logContent)
)
.andExpect(status().isBadRequest())
.andExpect(jsonPath("$.code").value((int)
CommonConstants.FAIL_CODE))
@@ -105,38 +98,15 @@ class LogIngestionControllerTest {
}
@Test
- void testIngestDefaultExternLog() throws Exception {
- LogEntry logEntry = LogEntry.builder()
- .timeUnixNano(1734005477630L)
- .severityNumber(2)
- .severityText("WARN")
- .body("Default protocol log message")
- .attributes(new HashMap<>())
- .build();
-
- when(otlpAdapter.supportProtocol()).thenReturn("otlp");
-
- mockMvc.perform(
- MockMvcRequestBuilders
- .post("/api/logs/ingest")
- .contentType(MediaType.APPLICATION_JSON)
- .content(JsonUtil.toJson(logEntry))
- )
- .andExpect(status().isOk())
- .andExpect(jsonPath("$.code").value((int)
CommonConstants.SUCCESS_CODE))
- .andExpect(jsonPath("$.msg").value("Add extern log success"));
- }
-
- @Test
- void testIngestDefaultExternLogWithAdapterException() throws Exception {
- String logContent = "{\"message\":\"Default log message that will
cause exception\"}";
+ void testIngestLogWithAdapterException() throws Exception {
+ String logContent = "{\"message\":\"Log message that will cause
exception\"}";
- when(otlpAdapter.supportProtocol()).thenReturn("otlp");
- Mockito.doThrow(new IllegalArgumentException("Invalid log
format")).when(otlpAdapter).ingest(anyString());
+ when(vectorAdapter.supportProtocol()).thenReturn("vector");
+ doThrow(new IllegalArgumentException("Invalid log
format")).when(vectorAdapter).ingest(anyString());
mockMvc.perform(
MockMvcRequestBuilders
- .post("/api/logs/ingest")
+ .post("/api/logs/ingest/vector")
.contentType(MediaType.APPLICATION_JSON)
.content(logContent)
)
diff --git
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogQueryControllerTest.java
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogQueryControllerTest.java
index 96173c2a24..8e3da88e40 100644
---
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogQueryControllerTest.java
+++
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/LogQueryControllerTest.java
@@ -17,11 +17,10 @@
package org.apache.hertzbeat.log.controller;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
-import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.when;
import static
org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -87,10 +86,10 @@ class LogQueryControllerTest {
List<LogEntry> mockLogs = Arrays.asList(logEntry1, logEntry2);
- when(historyDataReader.countLogsByMultipleConditions(anyLong(),
anyLong(), anyString(),
- anyString(), anyInt(), anyString())).thenReturn(2L);
-
when(historyDataReader.queryLogsByMultipleConditionsWithPagination(anyLong(),
anyLong(),
- anyString(), anyString(), anyInt(), anyString(), anyInt(),
anyInt()))
+ when(historyDataReader.countLogsByMultipleConditions(anyLong(),
anyLong(), any(),
+ any(), any(), any(), any())).thenReturn(2L);
+
when(historyDataReader.queryLogsByMultipleConditionsWithPagination(anyLong(),
anyLong(),
+ any(), any(), any(), any(), any(), anyInt(), anyInt()))
.thenReturn(mockLogs);
mockMvc.perform(
@@ -127,10 +126,10 @@ class LogQueryControllerTest {
.build()
);
- when(historyDataReader.countLogsByMultipleConditions(isNull(),
isNull(), isNull(),
- isNull(), isNull(), isNull())).thenReturn(1L);
-
when(historyDataReader.queryLogsByMultipleConditionsWithPagination(isNull(),
isNull(),
- isNull(), isNull(), isNull(), isNull(), eq(0), eq(20)))
+ when(historyDataReader.countLogsByMultipleConditions(any(), any(),
any(),
+ any(), any(), any(), any())).thenReturn(1L);
+
when(historyDataReader.queryLogsByMultipleConditionsWithPagination(any(), any(),
+ any(), any(), any(), any(), any(), eq(0), eq(20)))
.thenReturn(mockLogs);
mockMvc.perform(
@@ -163,8 +162,8 @@ class LogQueryControllerTest {
LogEntry.builder().severityNumber(21).build()
);
- when(historyDataReader.queryLogsByMultipleConditions(isNull(),
isNull(), isNull(),
- isNull(), isNull(), isNull())).thenReturn(mockLogs);
+ when(historyDataReader.queryLogsByMultipleConditions(any(), any(),
any(),
+ any(), any(), any(), any())).thenReturn(mockLogs);
mockMvc.perform(
MockMvcRequestBuilders
@@ -188,8 +187,8 @@ class LogQueryControllerTest {
LogEntry.builder().severityNumber(17).build()
);
-
when(historyDataReader.queryLogsByMultipleConditions(eq(1734005477000L),
eq(1734005478000L),
- isNull(), isNull(), isNull(), isNull())).thenReturn(mockLogs);
+
when(historyDataReader.queryLogsByMultipleConditions(eq(1734005477000L),
eq(1734005478000L),
+ any(), any(), any(), any(), any())).thenReturn(mockLogs);
mockMvc.perform(
MockMvcRequestBuilders
@@ -217,8 +216,8 @@ class LogQueryControllerTest {
LogEntry.builder().build() // null values
);
- when(historyDataReader.queryLogsByMultipleConditions(isNull(),
isNull(), isNull(),
- isNull(), isNull(), isNull())).thenReturn(mockLogs);
+ when(historyDataReader.queryLogsByMultipleConditions(any(), any(),
any(),
+ any(), any(), any(), any())).thenReturn(mockLogs);
mockMvc.perform(
MockMvcRequestBuilders
@@ -244,8 +243,8 @@ class LogQueryControllerTest {
LogEntry.builder().timeUnixNano(1734009077630000000L).build()
);
- when(historyDataReader.queryLogsByMultipleConditions(isNull(),
isNull(), isNull(),
- isNull(), isNull(), isNull())).thenReturn(mockLogs);
+ when(historyDataReader.queryLogsByMultipleConditions(any(), any(),
any(),
+ any(), any(), any(), any())).thenReturn(mockLogs);
mockMvc.perform(
MockMvcRequestBuilders
@@ -263,8 +262,8 @@ class LogQueryControllerTest {
LogEntry.builder().timeUnixNano(null).build() // This should
be filtered out
);
- when(historyDataReader.queryLogsByMultipleConditions(isNull(),
isNull(), isNull(),
- isNull(), isNull(), isNull())).thenReturn(mockLogs);
+ when(historyDataReader.queryLogsByMultipleConditions(any(), any(),
any(),
+ any(), any(), any(), any())).thenReturn(mockLogs);
mockMvc.perform(
MockMvcRequestBuilders
diff --git
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/OtlpLogControllerTest.java
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/OtlpLogControllerTest.java
new file mode 100644
index 0000000000..d2e67bcb8e
--- /dev/null
+++
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/controller/OtlpLogControllerTest.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hertzbeat.log.controller;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
+import static
org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
+import static
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+import static
org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
+
+import org.apache.hertzbeat.log.service.impl.OtlpLogProtocolAdapter;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.springframework.http.MediaType;
+import org.springframework.test.web.servlet.MockMvc;
+import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
+import org.springframework.test.web.servlet.setup.MockMvcBuilders;
+
+/**
+ * Unit test for {@link OtlpLogController}
+ */
+@ExtendWith(MockitoExtension.class)
+class OtlpLogControllerTest {
+
+ private static final String CONTENT_TYPE_PROTOBUF =
"application/x-protobuf";
+
+ private MockMvc mockMvc;
+
+ @Mock
+ private OtlpLogProtocolAdapter otlpLogProtocolAdapter;
+
+ private OtlpLogController otlpLogController;
+
+ @BeforeEach
+ void setUp() {
+ this.otlpLogController = new OtlpLogController(otlpLogProtocolAdapter);
+ this.mockMvc =
MockMvcBuilders.standaloneSetup(otlpLogController).build();
+ }
+
+ @Test
+ void testIngestJsonLogsSuccess() throws Exception {
+ String jsonContent = "{\"resourceLogs\":[]}";
+
+ doNothing().when(otlpLogProtocolAdapter).ingest(anyString());
+
+ mockMvc.perform(
+ MockMvcRequestBuilders
+ .post("/api/logs/otlp/v1/logs")
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(jsonContent)
+ )
+ .andDo(print())
+ .andExpect(status().isOk())
+ .andExpect(content().contentType(MediaType.APPLICATION_JSON))
+ .andReturn();
+ }
+
+ @Test
+ void testIngestJsonLogsFailure() throws Exception {
+ String jsonContent = "{\"invalid\":\"content\"}";
+
+ doThrow(new IllegalArgumentException("Invalid OTLP JSON log content"))
+ .when(otlpLogProtocolAdapter).ingest(anyString());
+
+ mockMvc.perform(
+ MockMvcRequestBuilders
+ .post("/api/logs/otlp/v1/logs")
+ .contentType(MediaType.APPLICATION_JSON)
+ .content(jsonContent)
+ )
+ .andExpect(status().isBadRequest())
+ .andExpect(content().contentType(MediaType.APPLICATION_JSON))
+ .andReturn();
+ }
+
+ @Test
+ void testIngestBinaryLogsSuccess() throws Exception {
+ byte[] binaryContent = new byte[]{0x0a, 0x0b, 0x0c};
+
+
doNothing().when(otlpLogProtocolAdapter).ingestBinary(any(byte[].class));
+
+ mockMvc.perform(
+ MockMvcRequestBuilders
+ .post("/api/logs/otlp/v1/logs")
+ .contentType(CONTENT_TYPE_PROTOBUF)
+ .content(binaryContent)
+ )
+ .andDo(print())
+ .andExpect(status().isOk())
+ .andExpect(content().contentType(CONTENT_TYPE_PROTOBUF))
+ .andReturn();
+ }
+
+ @Test
+ void testIngestBinaryLogsFailure() throws Exception {
+ byte[] binaryContent = new byte[]{0x0a, 0x0b, 0x0c};
+
+ doThrow(new IllegalArgumentException("Invalid OTLP binary log
content"))
+ .when(otlpLogProtocolAdapter).ingestBinary(any(byte[].class));
+
+ mockMvc.perform(
+ MockMvcRequestBuilders
+ .post("/api/logs/otlp/v1/logs")
+ .contentType(CONTENT_TYPE_PROTOBUF)
+ .content(binaryContent)
+ )
+ .andExpect(status().isBadRequest())
+ .andExpect(content().contentType(CONTENT_TYPE_PROTOBUF))
+ .andReturn();
+ }
+}
diff --git
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteriaTest.java
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteriaTest.java
index 698990f972..b06284d9ba 100644
---
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteriaTest.java
+++
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/notice/LogSseFilterCriteriaTest.java
@@ -105,16 +105,44 @@ class LogSseFilterCriteriaTest {
// Test Span ID filter - match
filterCriteria.setSpanId("1234567890abcdef");
assertTrue(filterCriteria.matches(testLogEntry));
-
+
// Test Span ID filter - no match
filterCriteria.setSpanId("abcdef1234567890");
assertFalse(filterCriteria.matches(testLogEntry));
-
+
// Test Span ID filter - case insensitive
filterCriteria.setSpanId("1234567890ABCDEF");
assertTrue(filterCriteria.matches(testLogEntry));
}
+ @Test
+ void testMatchesWithLogContentFilter() {
+ // Test log content filter - match
+ filterCriteria.setLogContent("Test log");
+ assertTrue(filterCriteria.matches(testLogEntry));
+
+ // Test log content filter - no match
+ filterCriteria.setLogContent("Error message");
+ assertFalse(filterCriteria.matches(testLogEntry));
+
+ // Test log content filter - case insensitive
+ filterCriteria.setLogContent("test log");
+ assertTrue(filterCriteria.matches(testLogEntry));
+
+ // Test log content filter - partial match
+ filterCriteria.setLogContent("message");
+ assertTrue(filterCriteria.matches(testLogEntry));
+
+ // Test log content filter with null body
+ LogEntry nullBodyLog = LogEntry.builder()
+ .severityNumber(9)
+ .severityText("INFO")
+ .body(null)
+ .build();
+ filterCriteria.setLogContent("test");
+ assertFalse(filterCriteria.matches(nullBodyLog));
+ }
+
@Test
void testMatchesWithMultipleFilters() {
// Test multiple filter combinations - all match
@@ -163,14 +191,15 @@ class LogSseFilterCriteriaTest {
void testConstructorWithAllParameters() {
// Test constructor with all parameters
LogSseFilterCriteria criteria = new LogSseFilterCriteria(
- 9, "INFO", "1234567890abcdef1234567890abcdef",
"1234567890abcdef"
+ 9, "INFO", null, "1234567890abcdef1234567890abcdef",
"1234567890abcdef"
);
-
+
assertEquals(9, criteria.getSeverityNumber());
assertEquals("INFO", criteria.getSeverityText());
+ assertEquals(null, criteria.getLogContent());
assertEquals("1234567890abcdef1234567890abcdef",
criteria.getTraceId());
assertEquals("1234567890abcdef", criteria.getSpanId());
-
+
// Test matching
assertTrue(criteria.matches(testLogEntry));
}
@@ -179,17 +208,19 @@ class LogSseFilterCriteriaTest {
void testNoArgsConstructorAndSetters() {
// Test no-args constructor and setter methods
LogSseFilterCriteria criteria = new LogSseFilterCriteria();
-
+
criteria.setSeverityNumber(9);
criteria.setSeverityText("INFO");
+ criteria.setLogContent("Test log");
criteria.setTraceId("1234567890abcdef1234567890abcdef");
criteria.setSpanId("1234567890abcdef");
-
+
assertEquals(9, criteria.getSeverityNumber());
assertEquals("INFO", criteria.getSeverityText());
+ assertEquals("Test log", criteria.getLogContent());
assertEquals("1234567890abcdef1234567890abcdef",
criteria.getTraceId());
assertEquals("1234567890abcdef", criteria.getSpanId());
-
+
// Test matching
assertTrue(criteria.matches(testLogEntry));
}
diff --git
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapterTest.java
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapterTest.java
index fa136db240..0a570155bf 100644
---
a/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapterTest.java
+++
b/hertzbeat-log/src/test/java/org/apache/hertzbeat/log/service/impl/OtlpLogProtocolAdapterTest.java
@@ -80,6 +80,47 @@ class OtlpLogProtocolAdapterTest {
verifyNoInteractions(commonDataQueue, logSseManager);
}
+ @Test
+ void testIngestBinaryWithNullContent() {
+ adapter.ingestBinary(null);
+ verifyNoInteractions(commonDataQueue, logSseManager);
+ }
+
+ @Test
+ void testIngestBinaryWithEmptyContent() {
+ adapter.ingestBinary(new byte[0]);
+ verifyNoInteractions(commonDataQueue, logSseManager);
+ }
+
+ @Test
+ void testIngestBinaryWithValidOtlpLogData() throws Exception {
+ byte[] binaryPayload = createValidOtlpLogBinaryPayload();
+
+ adapter.ingestBinary(binaryPayload);
+
+ ArgumentCaptor<List<LogEntry>> listCaptor =
ArgumentCaptor.forClass(List.class);
+ verify(commonDataQueue,
times(1)).sendLogEntryToStorageBatch(listCaptor.capture());
+ verify(commonDataQueue, times(1)).sendLogEntryToAlertBatch(anyList());
+ verify(logSseManager, times(1)).broadcast(any(LogEntry.class));
+
+ List<LogEntry> capturedList = listCaptor.getValue();
+ assertNotNull(capturedList);
+ assertEquals(1, capturedList.size());
+
+ LogEntry capturedEntry = capturedList.get(0);
+ assertEquals("binary-test-service",
capturedEntry.getResource().get("service_name"));
+ assertEquals("binary log message", capturedEntry.getBody());
+ assertEquals("INFO", capturedEntry.getSeverityText());
+ }
+
+ @Test
+ void testIngestBinaryWithInvalidContent() {
+ byte[] invalidBinary = "not a valid protobuf".getBytes();
+
+ assertThrows(IllegalArgumentException.class, () ->
adapter.ingestBinary(invalidBinary));
+ verifyNoInteractions(commonDataQueue, logSseManager);
+ }
+
@Test
void testIngestWithValidOtlpLogData() throws Exception {
String otlpPayload = createValidOtlpLogPayload();
@@ -315,4 +356,32 @@ class OtlpLogProtocolAdapterTest {
ExportLogsServiceRequest request =
ExportLogsServiceRequest.newBuilder().build();
return JsonFormat.printer().print(request);
}
+
+ private byte[] createValidOtlpLogBinaryPayload() {
+ ExportLogsServiceRequest request =
ExportLogsServiceRequest.newBuilder()
+ .addResourceLogs(ResourceLogs.newBuilder()
+ .setResource(Resource.newBuilder()
+ .addAttributes(KeyValue.newBuilder()
+ .setKey("service.name")
+
.setValue(AnyValue.newBuilder().setStringValue("binary-test-service").build())
+ .build())
+ .build())
+ .addScopeLogs(ScopeLogs.newBuilder()
+ .setScope(InstrumentationScope.newBuilder()
+ .setName("binary-test-scope")
+ .setVersion("1.0.0")
+ .build())
+ .addLogRecords(LogRecord.newBuilder()
+ .setTimeUnixNano(System.currentTimeMillis() *
1_000_000)
+ .setObservedTimeUnixNano(System.currentTimeMillis() *
1_000_000)
+ .setSeverityNumberValue(9)
+ .setSeverityText("INFO")
+ .setBody(AnyValue.newBuilder().setStringValue("binary
log message").build())
+ .build())
+ .build())
+ .build())
+ .build();
+
+ return request.toByteArray();
+ }
}
diff --git
a/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/HistoryDataReader.java
b/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/HistoryDataReader.java
index 89fe0f3adb..567e27177e 100644
---
a/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/HistoryDataReader.java
+++
b/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/HistoryDataReader.java
@@ -65,45 +65,67 @@ public interface HistoryDataReader {
* @param spanId span ID filter
* @param severityNumber severity number filter
* @param severityText severity text filter
+ * @param searchContent search content in log body
* @return filtered log entries
*/
default List<LogEntry> queryLogsByMultipleConditions(Long startTime, Long
endTime, String traceId,
String spanId,
Integer severityNumber,
- String severityText) {
+ String severityText,
String searchContent) {
throw new UnsupportedOperationException("query logs by multiple
conditions is not supported");
}
/**
- * Query logs with multiple filter conditions and pagination
+ * Query logs with multiple filter conditions and pagination (Legacy)
+ */
+ default List<LogEntry> queryLogsByMultipleConditionsWithPagination(Long
startTime, Long endTime, String traceId,
+ String
spanId, Integer severityNumber,
+ String
severityText, Integer offset, Integer limit) {
+ return queryLogsByMultipleConditionsWithPagination(startTime, endTime,
traceId, spanId, severityNumber, severityText, null, offset, limit);
+ }
+
+ /**
+ * Query logs with multiple filter conditions and pagination including
search content
* @param startTime start time in milliseconds
* @param endTime end time in milliseconds
* @param traceId trace ID filter
* @param spanId span ID filter
* @param severityNumber severity number filter
* @param severityText severity text filter
+ * @param searchContent search content in log body
* @param offset pagination offset
* @param limit pagination limit
* @return filtered log entries with pagination
*/
default List<LogEntry> queryLogsByMultipleConditionsWithPagination(Long
startTime, Long endTime, String traceId,
String
spanId, Integer severityNumber,
- String
severityText, Integer offset, Integer limit) {
+ String
severityText, String searchContent,
+ Integer
offset, Integer limit) {
throw new UnsupportedOperationException("query logs by multiple
conditions with pagination is not supported");
}
/**
- * Count logs with multiple filter conditions
+ * Count logs with multiple filter conditions (Legacy)
+ */
+ default long countLogsByMultipleConditions(Long startTime, Long endTime,
String traceId,
+ String spanId, Integer
severityNumber,
+ String severityText) {
+ return countLogsByMultipleConditions(startTime, endTime, traceId,
spanId, severityNumber, severityText, null);
+ }
+
+ /**
+ * Count logs with multiple filter conditions including search content
* @param startTime start time in milliseconds
* @param endTime end time in milliseconds
* @param traceId trace ID filter
* @param spanId span ID filter
* @param severityNumber severity number filter
* @param severityText severity text filter
+ * @param searchContent search content in log body
* @return count of matching log entries
*/
default long countLogsByMultipleConditions(Long startTime, Long endTime,
String traceId,
String spanId, Integer
severityNumber,
- String severityText) {
+ String severityText, String
searchContent) {
throw new UnsupportedOperationException("count logs by multiple
conditions is not supported");
}
-}
+}
\ No newline at end of file
diff --git
a/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorage.java
b/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorage.java
index ada81ec742..8443fd255a 100644
---
a/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorage.java
+++
b/hertzbeat-warehouse/src/main/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorage.java
@@ -541,10 +541,10 @@ public class GreptimeDbDataStorage extends
AbstractHistoryDataStorage {
@Override
public List<LogEntry> queryLogsByMultipleConditions(Long startTime, Long
endTime, String traceId,
String spanId, Integer
severityNumber,
- String severityText) {
+ String severityText,
String searchContent) {
try {
StringBuilder sql = new StringBuilder("SELECT * FROM
").append(LOG_TABLE_NAME);
- buildWhereConditions(sql, startTime, endTime, traceId, spanId,
severityNumber, severityText);
+ buildWhereConditions(sql, startTime, endTime, traceId, spanId,
severityNumber, severityText, searchContent);
sql.append(" ORDER BY time_unix_nano DESC");
List<Map<String, Object>> rows =
greptimeSqlQueryExecutor.execute(sql.toString());
@@ -558,10 +558,11 @@ public class GreptimeDbDataStorage extends
AbstractHistoryDataStorage {
@Override
public List<LogEntry> queryLogsByMultipleConditionsWithPagination(Long
startTime, Long endTime, String traceId,
String
spanId, Integer severityNumber,
- String
severityText, Integer offset, Integer limit) {
+ String
severityText, String searchContent,
+ Integer
offset, Integer limit) {
try {
StringBuilder sql = new StringBuilder("SELECT * FROM
").append(LOG_TABLE_NAME);
- buildWhereConditions(sql, startTime, endTime, traceId, spanId,
severityNumber, severityText);
+ buildWhereConditions(sql, startTime, endTime, traceId, spanId,
severityNumber, severityText, searchContent);
sql.append(" ORDER BY time_unix_nano DESC");
// Add pagination
@@ -583,10 +584,10 @@ public class GreptimeDbDataStorage extends
AbstractHistoryDataStorage {
@Override
public long countLogsByMultipleConditions(Long startTime, Long endTime,
String traceId,
String spanId, Integer
severityNumber,
- String severityText) {
+ String severityText, String
searchContent) {
try {
StringBuilder sql = new StringBuilder("SELECT COUNT(*) as count
FROM ").append(LOG_TABLE_NAME);
- buildWhereConditions(sql, startTime, endTime, traceId, spanId,
severityNumber, severityText);
+ buildWhereConditions(sql, startTime, endTime, traceId, spanId,
severityNumber, severityText, searchContent);
List<Map<String, Object>> rows =
greptimeSqlQueryExecutor.execute(sql.toString());
if (rows != null && !rows.isEmpty()) {
@@ -623,7 +624,7 @@ public class GreptimeDbDataStorage extends
AbstractHistoryDataStorage {
* @param severityNumber severity number
*/
private void buildWhereConditions(StringBuilder sql, Long startTime, Long
endTime, String traceId,
- String spanId, Integer severityNumber,
String severityText) {
+ String spanId, Integer severityNumber,
String severityText, String searchContent) {
List<String> conditions = new ArrayList<>();
// Time range condition
@@ -651,6 +652,11 @@ public class GreptimeDbDataStorage extends
AbstractHistoryDataStorage {
conditions.add("severity_text = '" + safeString(severityText) +
"'");
}
+ // Search content condition - search in body field
+ if (StringUtils.hasText(searchContent)) {
+ conditions.add("body LIKE '%" + safeString(searchContent) + "%'");
+ }
+
// Add WHERE clause if there are conditions
if (!conditions.isEmpty()) {
sql.append(" WHERE ").append(String.join(" AND ", conditions));
diff --git
a/hertzbeat-warehouse/src/test/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorageTest.java
b/hertzbeat-warehouse/src/test/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorageTest.java
index 45a1234c11..c28130c493 100644
---
a/hertzbeat-warehouse/src/test/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorageTest.java
+++
b/hertzbeat-warehouse/src/test/java/org/apache/hertzbeat/warehouse/store/history/tsdb/greptime/GreptimeDbDataStorageTest.java
@@ -82,7 +82,7 @@ class GreptimeDbDataStorageTest {
@Mock
private GreptimeDB greptimeDb;
-
+
private GreptimeDbDataStorage greptimeDbDataStorage;
@BeforeEach
@@ -120,14 +120,14 @@ class GreptimeDbDataStorageTest {
void testSaveData() {
try (MockedStatic<GreptimeDB> mockedStatic =
mockStatic(GreptimeDB.class)) {
mockedStatic.when(() ->
GreptimeDB.create(any())).thenReturn(greptimeDb);
-
+
// Mock the write result
@SuppressWarnings("unchecked")
Result<WriteOk, Err> mockResult = mock(Result.class);
when(mockResult.isOk()).thenReturn(true);
CompletableFuture<Result<WriteOk, Err>> mockFuture =
CompletableFuture.completedFuture(mockResult);
when(greptimeDb.write(any(Table.class))).thenReturn(mockFuture);
-
+
greptimeDbDataStorage = new
GreptimeDbDataStorage(greptimeProperties, restTemplate,
greptimeSqlQueryExecutor);
// Test with valid metrics data
@@ -153,7 +153,7 @@ class GreptimeDbDataStorageTest {
@Test
void testGetHistoryMetricData() {
greptimeDbDataStorage = new GreptimeDbDataStorage(greptimeProperties,
restTemplate, greptimeSqlQueryExecutor);
-
+
PromQlQueryContent content = createMockPromQlQueryContent();
ResponseEntity<PromQlQueryContent> responseEntity = new
ResponseEntity<>(content, HttpStatus.OK);
@@ -172,14 +172,14 @@ class GreptimeDbDataStorageTest {
void testSaveLogData() {
try (MockedStatic<GreptimeDB> mockedStatic =
mockStatic(GreptimeDB.class)) {
mockedStatic.when(() ->
GreptimeDB.create(any())).thenReturn(greptimeDb);
-
+
// Mock the write result
@SuppressWarnings("unchecked")
Result<WriteOk, Err> mockResult = mock(Result.class);
when(mockResult.isOk()).thenReturn(true);
CompletableFuture<Result<WriteOk, Err>> mockFuture =
CompletableFuture.completedFuture(mockResult);
when(greptimeDb.write(any(Table.class))).thenReturn(mockFuture);
-
+
greptimeDbDataStorage = new
GreptimeDbDataStorage(greptimeProperties, restTemplate,
greptimeSqlQueryExecutor);
LogEntry logEntry = createMockLogEntry();
@@ -196,10 +196,10 @@ class GreptimeDbDataStorageTest {
greptimeDbDataStorage = new
GreptimeDbDataStorage(greptimeProperties, restTemplate,
greptimeSqlQueryExecutor);
List<Map<String, Object>> mockLogRows = createMockLogRows();
when(greptimeSqlQueryExecutor.execute(anyString())).thenReturn(mockLogRows);
-
+
// Test basic query
List<LogEntry> result =
greptimeDbDataStorage.queryLogsByMultipleConditions(
- System.currentTimeMillis() - 3600000,
System.currentTimeMillis(), "trace123", "span456", 1, "INFO"
+ System.currentTimeMillis() - 3600000,
System.currentTimeMillis(), "trace123", "span456", 1, "INFO", "content"
);
assertNotNull(result);
assertEquals(1, result.size());
@@ -209,34 +209,34 @@ class GreptimeDbDataStorageTest {
List<Map<String, Object>> mockCountResult =
List.of(Map.of("count", 5L));
when(greptimeSqlQueryExecutor.execute(anyString())).thenReturn(mockCountResult);
long count = greptimeDbDataStorage.countLogsByMultipleConditions(
- System.currentTimeMillis() - 3600000,
System.currentTimeMillis(), null, null, null, null
+ System.currentTimeMillis() - 3600000,
System.currentTimeMillis(), null, null, null, null, null
);
assertEquals(5L, count);
-
+
// Test count query with executor error
when(greptimeSqlQueryExecutor.execute(anyString())).thenThrow(new
RuntimeException("Database error"));
- long errorCount =
greptimeDbDataStorage.countLogsByMultipleConditions(System.currentTimeMillis()
- 3600000, System.currentTimeMillis(), null, null, null, null);
+ long errorCount =
greptimeDbDataStorage.countLogsByMultipleConditions(System.currentTimeMillis()
- 3600000, System.currentTimeMillis(), null, null, null, null, null);
assertEquals(0L, errorCount);
}
}
-
+
@Test
void testQueryLogsWithPagination() {
try (MockedStatic<GreptimeDB> mockedStatic =
mockStatic(GreptimeDB.class)) {
mockedStatic.when(() ->
GreptimeDB.create(any())).thenReturn(greptimeDb);
greptimeDbDataStorage = new
GreptimeDbDataStorage(greptimeProperties, restTemplate,
greptimeSqlQueryExecutor);
when(greptimeSqlQueryExecutor.execute(anyString())).thenReturn(createMockLogRows());
-
+
ArgumentCaptor<String> sqlCaptor =
ArgumentCaptor.forClass(String.class);
greptimeDbDataStorage.queryLogsByMultipleConditionsWithPagination(
System.currentTimeMillis() - 3600000,
System.currentTimeMillis(),
- null, null, null, null, 1, 10
+ null, null, null, null, null, 1, 10
);
verify(greptimeSqlQueryExecutor).execute(sqlCaptor.capture());
String capturedSql = sqlCaptor.getValue();
-
+
assertTrue(capturedSql.toLowerCase().contains("limit 10"));
assertTrue(capturedSql.toLowerCase().contains("offset 1"));
}
@@ -299,7 +299,7 @@ class GreptimeDbDataStorageTest {
lenient().when(mockMetricsData.getCode()).thenReturn(CollectRep.Code.SUCCESS);
lenient().when(mockMetricsData.getMetrics()).thenReturn("cpu");
lenient().when(mockMetricsData.getId()).thenReturn(1L);
-
+
if (!hasValues) {
lenient().when(mockMetricsData.getValues()).thenReturn(Collections.emptyList());
return mockMetricsData;
@@ -311,36 +311,36 @@ class GreptimeDbDataStorageTest {
lenient().when(mockField1.getName()).thenReturn("usage");
lenient().when(mockField1.getLabel()).thenReturn(false);
lenient().when(mockField1.getType()).thenReturn((int)
CommonConstants.TYPE_NUMBER);
-
+
CollectRep.Field mockField2 = mock(CollectRep.Field.class);
lenient().when(mockField2.getName()).thenReturn("instance");
lenient().when(mockField2.getLabel()).thenReturn(true);
lenient().when(mockField2.getType()).thenReturn((int)
CommonConstants.TYPE_STRING);
-
+
lenient().when(mockMetricsData.getFields()).thenReturn(List.of(mockField1,
mockField2));
-
+
// Create ValueRow mock
CollectRep.ValueRow mockValueRow = mock(CollectRep.ValueRow.class);
lenient().when(mockValueRow.getColumnsList()).thenReturn(List.of("server1",
"85.5"));
-
+
lenient().when(mockMetricsData.getValues()).thenReturn(List.of(mockValueRow));
// Mock RowWrapper for readRow()
RowWrapper mockRowWrapper = mock(RowWrapper.class);
lenient().when(mockRowWrapper.hasNextRow()).thenReturn(true, false);
lenient().when(mockRowWrapper.nextRow()).thenReturn(mockRowWrapper);
-
+
// Mock cell stream
ArrowCell mockCell1 = mock(ArrowCell.class);
lenient().when(mockCell1.getValue()).thenReturn("85.5");
lenient().when(mockCell1.getMetadataAsBoolean(any())).thenReturn(false);
lenient().when(mockCell1.getMetadataAsByte(any())).thenReturn(CommonConstants.TYPE_NUMBER);
-
+
ArrowCell mockCell2 = mock(ArrowCell.class);
lenient().when(mockCell2.getValue()).thenReturn("server1");
lenient().when(mockCell2.getMetadataAsBoolean(any())).thenReturn(true);
lenient().when(mockCell2.getMetadataAsByte(any())).thenReturn(CommonConstants.TYPE_STRING);
-
+
lenient().when(mockRowWrapper.cellStream()).thenReturn(java.util.stream.Stream.of(mockCell1,
mockCell2));
lenient().when(mockMetricsData.readRow()).thenReturn(mockRowWrapper);
@@ -384,7 +384,7 @@ class GreptimeDbDataStorageTest {
row.put("body", "\"Test log message\"");
row.put("trace_id", "trace123");
row.put("span_id", "span456");
-
+
return List.of(row);
}
-}
\ No newline at end of file
+}
diff --git a/home/docs/help/log_integration.md
b/home/docs/help/log_integration.md
index ac1e9b9884..6bdd2892f3 100644
--- a/home/docs/help/log_integration.md
+++ b/home/docs/help/log_integration.md
@@ -22,7 +22,7 @@ The log integration feature is currently in Beta
(experimental) stage. There may
HertzBeat currently supports data integration from the following third-party
log platforms:
-- **OTLP**: Support standard OpenTelemetry Log Protocol (OTLP) HTTP/JSON
format, can directly receive log data from OpenTelemetry Collector and various
applications that support OTLP.
+- **OTLP**: Support standard OpenTelemetry Log Protocol (OTLP) HTTP format,
can directly receive log data from OpenTelemetry Collector and various
applications that support OTLP.
- **More Protocol Support**: HertzBeat is actively expanding its log
integration support, including Filebeat, Vector, Loki, etc. If you can't find
the integration you need temporarily, the active community can also help you
add it.
You can view specific integration methods and configuration examples through
HertzBeat's "Log Integration" interface.
@@ -31,32 +31,24 @@ You can view specific integration methods and configuration
examples through Her
## OpenTelemetry OTLP Protocol Integration
-### API Endpoints
+### API Endpoint
-HertzBeat provides the following interfaces for receiving OTLP log data:
-
-**Protocol-specific Interface**:
-
-```text
-POST /api/logs/ingest/otlp
-```
-
-**Default Interface** (automatically uses OTLP protocol):
+HertzBeat provides the following interface for receiving OTLP log data:
```text
-POST /api/logs/ingest
+POST /api/logs/otlp/v1/logs
```
### Request Configuration
#### Request Headers
-- `Content-Type`: `application/json`
+- `Content-Type`: `application/json` or `application/x-protobuf`
- `Authorization`: `Bearer {token}`
#### Request Body Format
-Support standard OTLP JSON format log data:
+Supports standard OTLP JSON-Protobuf format or Binary Protobuf format log data:
```json
{
@@ -120,7 +112,7 @@ Add HertzBeat as a log export target in the OpenTelemetry
Collector configuratio
```yaml
exporters:
otlphttp:
- logs_endpoint: http://{hertzbeat_host}:1157/api/logs/ingest/otlp
+ logs_endpoint: http://{hertzbeat_host}:1157/api/logs/otlp/v1/logs
compression: none
encoding: json
headers:
@@ -172,7 +164,7 @@ Application and environment information can be set through
`resource.attributes`
#### Log Format Error
-- **OTLP Format**: Ensure standard OTLP JSON format is sent
+- **OTLP Format**: Ensure standard OTLP JSON-Protobuf or Binary Protobuf
format is sent
- **Timestamp Format**: Check if timestamp format is Unix timestamp with
nanosecond precision
- **Log Level**: Verify severityNumber value range (1-24)
- **Data Type**: Ensure data types of each field comply with OTLP specification
diff --git
a/home/i18n/zh-cn/docusaurus-plugin-content-docs/current/help/log_integration.md
b/home/i18n/zh-cn/docusaurus-plugin-content-docs/current/help/log_integration.md
index 8428b03583..bb28f58648 100644
---
a/home/i18n/zh-cn/docusaurus-plugin-content-docs/current/help/log_integration.md
+++
b/home/i18n/zh-cn/docusaurus-plugin-content-docs/current/help/log_integration.md
@@ -22,7 +22,7 @@ keywords: [开源监控, 日志集成, 日志管理, 多源日志]
HertzBeat 当前已支持以下协议进行日志数据接入:
-- **OTLP**:支持标准的 OpenTelemetry 日志协议 (OTLP) HTTP/JSON 格式,可直接接收来自 OpenTelemetry
Collector 和各种支持 OTLP 的应用程序的日志数据。
+- **OTLP**:支持标准的 OpenTelemetry 日志协议 (OTLP) HTTP 格式,可直接接收来自 OpenTelemetry
Collector 和各种支持 OTLP 的应用程序的日志数据。
- **更多协议支持**:HertzBeat 正在积极扩展其日志集成支持,包括 Filebeat、Vector、Loki
等。如果暂时没有找到你需要的集成,活跃的社区也可以协助你添加。
你可以通过 HertzBeat 的"日志集成"界面查看具体的接入方式和配置示例。
@@ -35,28 +35,20 @@ HertzBeat 当前已支持以下协议进行日志数据接入:
HertzBeat 提供以下接口用于接收 OTLP 日志数据:
-**指定协议接口**:
-
-```text
-POST /api/logs/ingest/otlp
-```
-
-**默认接口**(自动使用OTLP协议):
-
```text
-POST /api/logs/ingest
+POST /api/logs/otlp/v1/logs
```
### 请求配置
#### 请求头
-- `Content-Type`: `application/json`
+- `Content-Type`: `application/json` 或 `application/x-protobuf`
- `Authorization`: `Bearer {token}`
#### 请求体格式
-支持标准的 OTLP JSON 格式日志数据:
+支持标准的 OTLP JSON-Protobuf 格式日志数据或者 Binary Protobuf 格式日志数据:
```json
{
@@ -120,7 +112,7 @@ POST /api/logs/ingest
```yaml
exporters:
otlphttp:
- logs_endpoint: http://{hertzbeat_host}:1157/api/logs/ingest/otlp
+ logs_endpoint: http://{hertzbeat_host}:1157/api/logs/otlp/v1/logs
compression: none
encoding: json
headers:
@@ -172,7 +164,7 @@ service:
#### 日志格式错误
-- **OTLP格式**:确保发送的是标准OTLP JSON格式
+- **OTLP格式**:确保发送的是标准 OTLP JSON-Protobuf 或 Binary Protobuf 格式
- **时间戳格式**:检查时间戳格式是否为纳秒精度的Unix时间戳
- **日志级别**:验证 severityNumber 值范围(1-24)
- **数据类型**:确保各字段的数据类型符合OTLP规范
diff --git a/home/static/img/docs/help/log_integration_cn.png
b/home/static/img/docs/help/log_integration_cn.png
index bf152a91c6..b7da008489 100644
Binary files a/home/static/img/docs/help/log_integration_cn.png and
b/home/static/img/docs/help/log_integration_cn.png differ
diff --git a/home/static/img/docs/help/log_integration_en.png
b/home/static/img/docs/help/log_integration_en.png
index a8a449a751..e3f6729a83 100644
Binary files a/home/static/img/docs/help/log_integration_en.png and
b/home/static/img/docs/help/log_integration_en.png differ
diff --git a/web-app/src/app/routes/log/log-manage/log-manage.component.html
b/web-app/src/app/routes/log/log-manage/log-manage.component.html
index cb95696ac9..3ee454cd9d 100644
--- a/web-app/src/app/routes/log/log-manage/log-manage.component.html
+++ b/web-app/src/app/routes/log/log-manage/log-manage.component.html
@@ -5,9 +5,9 @@
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
-
+
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -23,9 +23,7 @@
<nz-divider></nz-divider>
-<!-- Combined Filter and Statistics Card -->
<nz-card [nzTitle]="'log.manage.title' | i18n" class="manager-card">
- <!-- Filters Section -->
<div class="filters-container">
<nz-space nzSize="middle" nzWrap>
<nz-range-picker *nzSpaceItem [(ngModel)]="timeRange" nzShowTime
nzFormat="yyyy-MM-dd HH:mm:ss"></nz-range-picker>
@@ -56,6 +54,7 @@
<nz-auto-option nzValue="ERROR">ERROR</nz-auto-option>
<nz-auto-option nzValue="FATAL">FATAL</nz-auto-option>
</nz-autocomplete>
+ <input *nzSpaceItem nz-input [placeholder]="'log.manage.search' | i18n"
[(ngModel)]="searchContent" style="width: 200px" />
<button *nzSpaceItem nz-button nzType="primary" (click)="query()">
<i nz-icon nzType="search"></i> {{ 'log.manage.search' | i18n }}
</button>
@@ -101,9 +100,7 @@
</ng-template>
</div>
- <!-- Statistics Section (Collapsible) -->
<div *ngIf="showStatistics">
- <!-- Statistics Overview -->
<div nz-row [nzGutter]="16" style="margin-bottom: 16px">
<div nz-col nzXs="12" nzSm="4">
<nz-card>
@@ -179,7 +176,6 @@
</div>
</div>
- <!-- Charts Section -->
<div nz-row [nzGutter]="16">
<div nz-col nzXs="24" nzMd="8">
<nz-card [nzTitle]="'log.manage.chart.severity-distribution' | i18n"
[nzSize]="'small'">
@@ -200,7 +196,6 @@
</div>
</nz-card>
-<!-- Log Table -->
<nz-table
#fixedTable
[nzData]="data"
@@ -319,7 +314,6 @@
</tbody>
</nz-table>
-<!-- Log Details Modal -->
<nz-modal
[(nzVisible)]="isModalVisible"
[nzTitle]="'log.manage.log-entry-details' | i18n"
@@ -329,7 +323,6 @@
>
<ng-container *nzModalContent>
<div *ngIf="selectedLogEntry" class="log-details-modal">
- <!-- Basic Info Section -->
<nz-card [nzTitle]="'log.manage.basic-information' | i18n"
[nzSize]="'small'" class="basic-info-card">
<div class="basic-info">
<div class="info-row">
@@ -353,7 +346,6 @@
</div>
</nz-card>
- <!-- JSON Details Section -->
<nz-card [nzTitle]="'log.manage.complete-json-data' | i18n"
[nzSize]="'small'">
<div class="json-content">
<button
diff --git a/web-app/src/app/routes/log/log-manage/log-manage.component.ts
b/web-app/src/app/routes/log/log-manage/log-manage.component.ts
index 1d448931d1..496161769b 100644
--- a/web-app/src/app/routes/log/log-manage/log-manage.component.ts
+++ b/web-app/src/app/routes/log/log-manage/log-manage.component.ts
@@ -92,6 +92,7 @@ export class LogManageComponent implements OnInit {
severityText?: string;
traceId: string = '';
spanId: string = '';
+ searchContent: string = '';
// table with pagination
loading = false;
@@ -302,6 +303,7 @@ export class LogManageComponent implements OnInit {
this.spanId,
this.severityNumber,
this.severityText,
+ this.searchContent,
this.pageIndex - 1,
this.pageSize
);
@@ -339,8 +341,9 @@ export class LogManageComponent implements OnInit {
const spanId = this.spanId || undefined;
const severity = this.severityNumber || undefined;
const severityText = this.severityText || undefined;
+ const search = this.searchContent || undefined;
- this.logSvc.overviewStats(start, end, traceId, spanId, severity,
severityText).subscribe({
+ this.logSvc.overviewStats(start, end, traceId, spanId, severity,
severityText, search).subscribe({
next: message => {
if (message.code === 0) {
this.overviewStats = message.data || {};
@@ -349,7 +352,7 @@ export class LogManageComponent implements OnInit {
}
});
- this.logSvc.traceCoverageStats(start, end, traceId, spanId, severity,
severityText).subscribe({
+ this.logSvc.traceCoverageStats(start, end, traceId, spanId, severity,
severityText, search).subscribe({
next: message => {
if (message.code === 0) {
this.refreshTraceCoverageChart(message.data || {});
@@ -357,7 +360,7 @@ export class LogManageComponent implements OnInit {
}
});
- this.logSvc.trendStats(start, end, traceId, spanId, severity,
severityText).subscribe({
+ this.logSvc.trendStats(start, end, traceId, spanId, severity,
severityText, search).subscribe({
next: message => {
if (message.code === 0) {
this.refreshTrendChart(message.data?.hourlyStats || {});
@@ -372,6 +375,7 @@ export class LogManageComponent implements OnInit {
this.traceId = '';
this.spanId = '';
this.severityText = '';
+ this.searchContent = '';
this.pageIndex = 1;
this.query();
}
diff --git a/web-app/src/app/routes/log/log-stream/log-stream.component.html
b/web-app/src/app/routes/log/log-stream/log-stream.component.html
index 3a41193045..4efa55c9ba 100644
--- a/web-app/src/app/routes/log/log-stream/log-stream.component.html
+++ b/web-app/src/app/routes/log/log-stream/log-stream.component.html
@@ -126,6 +126,16 @@
</nz-autocomplete>
</div>
+ <div class="filter-item">
+ <label class="filter-label">{{ 'log.stream.content' | i18n }}</label>
+ <nz-input-group [nzPrefix]="contentTemplate" class="filter-input">
+ <input type="text" nz-input [(ngModel)]="filterLogContent"
[placeholder]="'log.stream.content-placeholder' | i18n" />
+ </nz-input-group>
+ <ng-template #contentTemplate>
+ <i nz-icon nzType="search"></i>
+ </ng-template>
+ </div>
+
<div class="filter-item">
<label class="filter-label">{{ 'log.stream.trace-id' | i18n
}}</label>
<nz-input-group [nzPrefix]="traceIdTemplate" class="filter-input">
diff --git a/web-app/src/app/routes/log/log-stream/log-stream.component.ts
b/web-app/src/app/routes/log/log-stream/log-stream.component.ts
index ce1709c366..e8d7630ddb 100644
--- a/web-app/src/app/routes/log/log-stream/log-stream.component.ts
+++ b/web-app/src/app/routes/log/log-stream/log-stream.component.ts
@@ -96,6 +96,7 @@ export class LogStreamComponent implements OnInit, OnDestroy,
AfterViewInit {
// Filter properties
filterSeverityNumber: string = '';
filterSeverityText: string = '';
+ filterLogContent: string = '';
filterTraceId: string = '';
filterSpanId: string = '';
@@ -220,6 +221,10 @@ export class LogStreamComponent implements OnInit,
OnDestroy, AfterViewInit {
params.append('severityText', this.filterSeverityText);
}
+ if (this.filterLogContent && this.filterLogContent.trim()) {
+ params.append('logContent', this.filterLogContent);
+ }
+
if (this.filterTraceId && this.filterTraceId.trim()) {
params.append('traceId', this.filterTraceId);
}
@@ -376,6 +381,7 @@ export class LogStreamComponent implements OnInit,
OnDestroy, AfterViewInit {
onClearFilters(): void {
this.filterSeverityNumber = '';
this.filterSeverityText = '';
+ this.filterLogContent = '';
this.filterTraceId = '';
this.filterSpanId = '';
this.resetState();
diff --git a/web-app/src/app/service/log.service.ts
b/web-app/src/app/service/log.service.ts
index 2a4a0a19aa..0f5b70ea7d 100644
--- a/web-app/src/app/service/log.service.ts
+++ b/web-app/src/app/service/log.service.ts
@@ -43,6 +43,7 @@ export class LogService {
spanId?: string,
severityNumber?: number,
severityText?: string,
+ search?: string,
pageIndex: number = 0,
pageSize: number = 20
): Observable<Message<Page<LogEntry>>> {
@@ -53,6 +54,7 @@ export class LogService {
if (spanId) params = params.set('spanId', spanId);
if (severityNumber != null) params = params.set('severityNumber',
severityNumber);
if (severityText) params = params.set('severityText', severityText);
+ if (search) params = params.set('search', search);
params = params.set('pageIndex', pageIndex);
params = params.set('pageSize', pageSize);
return this.http.get<Message<any>>(logs_list_uri, { params });
@@ -64,7 +66,8 @@ export class LogService {
traceId?: string,
spanId?: string,
severityNumber?: number,
- severityText?: string
+ severityText?: string,
+ search?: string
): Observable<Message<any>> {
let params = new HttpParams();
if (start != null) params = params.set('start', start);
@@ -73,6 +76,7 @@ export class LogService {
if (spanId) params = params.set('spanId', spanId);
if (severityNumber != null) params = params.set('severityNumber',
severityNumber);
if (severityText) params = params.set('severityText', severityText);
+ if (search) params = params.set('search', search);
return this.http.get<Message<any>>(logs_stats_overview_uri, { params });
}
@@ -82,7 +86,8 @@ export class LogService {
traceId?: string,
spanId?: string,
severityNumber?: number,
- severityText?: string
+ severityText?: string,
+ search?: string
): Observable<Message<any>> {
let params = new HttpParams();
if (start != null) params = params.set('start', start);
@@ -91,6 +96,7 @@ export class LogService {
if (spanId) params = params.set('spanId', spanId);
if (severityNumber != null) params = params.set('severityNumber',
severityNumber);
if (severityText) params = params.set('severityText', severityText);
+ if (search) params = params.set('search', search);
return this.http.get<Message<any>>(logs_stats_trend_uri, { params });
}
@@ -100,7 +106,8 @@ export class LogService {
traceId?: string,
spanId?: string,
severityNumber?: number,
- severityText?: string
+ severityText?: string,
+ search?: string
): Observable<Message<any>> {
let params = new HttpParams();
if (start != null) params = params.set('start', start);
@@ -109,6 +116,7 @@ export class LogService {
if (spanId) params = params.set('spanId', spanId);
if (severityNumber != null) params = params.set('severityNumber',
severityNumber);
if (severityText) params = params.set('severityText', severityText);
+ if (search) params = params.set('search', search);
return this.http.get<Message<any>>(logs_stats_trace_coverage_uri, { params
});
}
diff --git a/web-app/src/assets/doc/log-integration/otlp.en-US.md
b/web-app/src/assets/doc/log-integration/otlp.en-US.md
index f6c370de0a..0f9f4ee32c 100644
--- a/web-app/src/assets/doc/log-integration/otlp.en-US.md
+++ b/web-app/src/assets/doc/log-integration/otlp.en-US.md
@@ -2,20 +2,16 @@
### API Endpoint
-`POST /api/logs/ingest/otlp`
-
-Or use the default endpoint (automatically uses OTLP protocol):
-
-`POST /api/logs/ingest`
+`POST /api/logs/otlp/v1/logs`
### Request Headers
-- `Content-Type`: `application/json`
+- `Content-Type`: `application/json` or `application/x-protobuf`
- `Authorization`: `Bearer {token}`
### Request Body
-Supports standard OTLP JSON format log data:
+Supports standard OTLP JSON-Protobuf format or Binary Protobuf format log data:
```json
{
@@ -75,7 +71,7 @@ Supports standard OTLP JSON format log data:
```yaml
exporters:
otlphttp:
- logs_endpoint: http://{hertzbeat_host}:1157/api/logs/ingest/otlp
+ logs_endpoint: http://{hertzbeat_host}:1157/api/logs/otlp/v1/logs
compression: none
encoding: json
headers:
@@ -100,10 +96,8 @@ service:
#### Log Sending Failures
- Ensure HertzBeat service address is accessible from external systems
- Check if Token is correctly configured
-- Verify request header Content-Type is set to application/json
#### Log Format Errors
-- Ensure sending standard OTLP JSON format
- Check timestamp format is nanosecond precision
- Verify severityNumber value range (1-24)
diff --git a/web-app/src/assets/doc/log-integration/otlp.zh-CN.md
b/web-app/src/assets/doc/log-integration/otlp.zh-CN.md
index 6427f95ac8..232cfec5cc 100644
--- a/web-app/src/assets/doc/log-integration/otlp.zh-CN.md
+++ b/web-app/src/assets/doc/log-integration/otlp.zh-CN.md
@@ -2,20 +2,16 @@
### 接口端点
-`POST /api/logs/ingest/otlp`
-
-或使用默认接口(自动使用OTLP协议):
-
-`POST /api/logs/ingest`
+`POST /api/logs/otlp/v1/logs`
### 请求头
-- `Content-Type`: `application/json`
+- `Content-Type`: `application/json` or `application/x-protobuf`
- `Authorization`: `Bearer {token}`
### 请求体
-支持标准的 OTLP JSON 格式日志数据:
+支持标准的 OTLP JSON-Protobuf 格式日志数据或者 Binary Protobuf 格式日志数据:
```json
{
@@ -75,7 +71,7 @@
```yaml
exporters:
otlphttp:
- logs_endpoint: http://{hertzbeat_host}:1157/api/logs/ingest/otlp
+ logs_endpoint: http://{hertzbeat_host}:1157/api/logs/otlp/v1/logs
compression: none
encoding: json
headers:
@@ -100,10 +96,8 @@ service:
#### 日志发送失败
- 确保HertzBeat服务地址可以被外部系统访问
- 检查Token是否正确配置
-- 验证请求头Content-Type设置为application/json
#### 日志格式错误
-- 确保发送的是标准OTLP JSON格式
- 检查时间戳格式是否为纳秒精度
- 验证severityNumber值范围(1-24)
diff --git a/web-app/src/assets/i18n/en-US.json
b/web-app/src/assets/i18n/en-US.json
index 36c3c1fd11..013c6c9b72 100644
--- a/web-app/src/assets/i18n/en-US.json
+++ b/web-app/src/assets/i18n/en-US.json
@@ -695,6 +695,8 @@
"log.stream.severity-number-placeholder": "Enter Severity Number",
"log.stream.severity-text": "Severity:",
"log.stream.severity-text-placeholder": "Enter Severity",
+ "log.stream.content" : "Log Content",
+ "log.stream.content-placeholder" : "Enter Log Content",
"log.stream.show-filters": "Show Filters",
"log.stream.span": "Span:",
"log.stream.span-id": "Span ID:",
diff --git a/web-app/src/assets/i18n/ja-JP.json
b/web-app/src/assets/i18n/ja-JP.json
index 2c1d697e1d..99e4206259 100644
--- a/web-app/src/assets/i18n/ja-JP.json
+++ b/web-app/src/assets/i18n/ja-JP.json
@@ -657,6 +657,8 @@
"log.stream.severity-number-placeholder": "重大度番号を入力",
"log.stream.severity-text": "重大度:",
"log.stream.severity-text-placeholder": "重大度を入力",
+ "log.stream.content": "ログ内容",
+ "log.stream.content-placeholder": "ログ内容を入力してください",
"log.stream.show-filters": "フィルターを表示",
"log.stream.span": "スパン:",
"log.stream.span-id": "Span ID:",
diff --git a/web-app/src/assets/i18n/pt-BR.json
b/web-app/src/assets/i18n/pt-BR.json
index ac2bd8f590..ea86ea3072 100644
--- a/web-app/src/assets/i18n/pt-BR.json
+++ b/web-app/src/assets/i18n/pt-BR.json
@@ -427,6 +427,8 @@
"log.stream.severity-number-placeholder": "Digite o número de severidade",
"log.stream.severity-text": "Severidade:",
"log.stream.severity-text-placeholder": "Digite a severidade",
+ "log.stream.content": "Conteúdo do Log",
+ "log.stream.content-placeholder": "Digite o conteúdo do log",
"log.stream.show-filters": "Mostrar filtros",
"log.stream.span": "Span:",
"log.stream.span-id": "Span ID:",
diff --git a/web-app/src/assets/i18n/zh-CN.json
b/web-app/src/assets/i18n/zh-CN.json
index 5347b004f6..d6ed897a01 100644
--- a/web-app/src/assets/i18n/zh-CN.json
+++ b/web-app/src/assets/i18n/zh-CN.json
@@ -698,6 +698,8 @@
"log.stream.severity-number-placeholder": "输入日志级别编号",
"log.stream.severity-text": "日志级别:",
"log.stream.severity-text-placeholder": "输入日志级别",
+ "log.stream.content" : "日志内容",
+ "log.stream.content-placeholder" : "输入日志内容",
"log.stream.show-filters": "显示过滤器",
"log.stream.span": "跨度:",
"log.stream.span-id": "Span ID:",
diff --git a/web-app/src/assets/i18n/zh-TW.json
b/web-app/src/assets/i18n/zh-TW.json
index b5f277c382..28470ff6d5 100644
--- a/web-app/src/assets/i18n/zh-TW.json
+++ b/web-app/src/assets/i18n/zh-TW.json
@@ -661,6 +661,8 @@
"log.stream.severity-number-placeholder": "輸入日誌等級編號",
"log.stream.severity-text": "日誌等級:",
"log.stream.severity-text-placeholder": "輸入日誌等級",
+ "log.stream.content": "日誌內容",
+ "log.stream.content-placeholder": "輸入日誌內容",
"log.stream.show-filters": "顯示過濾器",
"log.stream.span": "跨度:",
"log.stream.span-id": "Span ID:",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]