This is an automated email from the ASF dual-hosted git repository.

spacewander pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/apisix.git


The following commit(s) were added to refs/heads/master by this push:
     new f8eac5e  chore(kafka-logger): add brokers info to log and update docs 
(#4908)
f8eac5e is described below

commit f8eac5e725edd7323ee7b1f7be05b3fbc080fe70
Author: okaybase <75366457+okayb...@users.noreply.github.com>
AuthorDate: Mon Aug 30 13:06:04 2021 +0800

    chore(kafka-logger): add brokers info to log and update docs (#4908)
---
 apisix/plugins/kafka-logger.lua        |  3 ++-
 docs/en/latest/plugins/kafka-logger.md |  6 ++---
 t/plugin/kafka-logger.t                | 48 ++++++++++++++++++++++++++++++++++
 3 files changed, 52 insertions(+), 5 deletions(-)

diff --git a/apisix/plugins/kafka-logger.lua b/apisix/plugins/kafka-logger.lua
index 526c5cb..b680bd4 100644
--- a/apisix/plugins/kafka-logger.lua
+++ b/apisix/plugins/kafka-logger.lua
@@ -163,7 +163,8 @@ local function send_kafka_data(conf, log_message, prod)
                                       prod, conf.kafka_topic, log_message))
 
     if not ok then
-        return false, "failed to send data to Kafka topic: " .. err
+        return false, "failed to send data to Kafka topic: " .. err ..
+                ", brokers: " .. core.json.encode(conf.broker_list)
     end
 
     return true
diff --git a/docs/en/latest/plugins/kafka-logger.md 
b/docs/en/latest/plugins/kafka-logger.md
index 9f54c5d..f19a693 100644
--- a/docs/en/latest/plugins/kafka-logger.md
+++ b/docs/en/latest/plugins/kafka-logger.md
@@ -34,9 +34,7 @@ title: kafka-logger
 
 `kafka-logger` is a plugin which works as a Kafka client driver for the 
ngx_lua nginx module.
 
-This will provide the ability to send Log data requests as JSON objects to 
external Kafka clusters.
-
-This plugin provides the ability to push Log data as a batch to you're 
external Kafka topics. In case if you did not receive the log data don't worry 
give it some time it will automatically send the logs after the timer function 
expires in our Batch Processor.
+This plugin provides the ability to push requests log data as JSON objects to 
your external Kafka clusters. In case if you did not receive the log data don't 
worry give it some time it will automatically send the logs after the timer 
function expires in our Batch Processor.
 
 For more info on Batch-Processor in Apache APISIX please refer.
 [Batch-Processor](../batch-processor.md)
@@ -169,7 +167,7 @@ curl http://127.0.0.1:9080/apisix/admin/routes/5 -H 
'X-API-KEY: edd1c9f034335f13
 
 ## Test Plugin
 
-*success:
+success:
 
 ```shell
 $ curl -i http://127.0.0.1:9080/hello
diff --git a/t/plugin/kafka-logger.t b/t/plugin/kafka-logger.t
index 34f61b7..51876d3 100644
--- a/t/plugin/kafka-logger.t
+++ b/t/plugin/kafka-logger.t
@@ -1066,3 +1066,51 @@ property "broker_list" validation failed: failed to 
validate 127.0.0.1 (matching
 property "broker_list" validation failed: failed to validate 127.0.0.1 
(matching ".*"): expected 65536 to be smaller than 65535
 --- no_error_log
 [error]
+
+
+
+=== TEST 25: kafka brokers info in log
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                             "kafka-logger": {
+                                    "broker_list" :
+                                      {
+                                        "127.0.0.127":9092
+                                      },
+                                    "kafka_topic" : "test2",
+                                    "producer_type": "sync",
+                                    "key" : "key1",
+                                    "batch_max_size": 1,
+                                    "cluster_name": 10
+                             }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+            )
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+            local http = require "resty.http"
+            local httpc = http.new()
+            local uri = "http://127.0.0.1:"; .. ngx.var.server_port .. "/hello"
+            local res, err = httpc:request_uri(uri, {method = "GET"})
+        }
+    }
+--- request
+GET /t
+--- error_log_like eval
+qr/create new kafka producer instance, brokers: 
\[\{"port":9092,"host":"127.0.0.127"}]/
+qr/failed to send data to Kafka topic: .*, brokers: \{"127.0.0.127":9092}/

Reply via email to