From 315394f20575ae4fb659ce2ccade92e4464f7c20 Mon Sep 17 00:00:00 2001 From: "zhuo.chen" Date: Fri, 25 Jun 2021 17:22:17 +0800 Subject: [PATCH 1/3] feat(kafka-logger): support for specified the log formats via admin API. --- apisix/plugins/http-logger.lua | 50 +++----------------------- apisix/plugins/kafka-logger.lua | 27 ++++++++++++-- apisix/utils/log-util.lua | 48 +++++++++++++++++++++++++ docs/zh/latest/plugins/kafka-logger.md | 29 ++++++++++++++- t/plugin/kafka-logger.t | 40 +++++++++++++++++++++ 5 files changed, 144 insertions(+), 50 deletions(-) diff --git a/apisix/plugins/http-logger.lua b/apisix/plugins/http-logger.lua index 0ca37e7090bd..7e2c083bd369 100644 --- a/apisix/plugins/http-logger.lua +++ b/apisix/plugins/http-logger.lua @@ -26,15 +26,11 @@ local ngx = ngx local tostring = tostring local pairs = pairs local ipairs = ipairs -local str_byte = string.byte local timer_at = ngx.timer.at local plugin_name = "http-logger" local stale_timer_running = false local buffers = {} -local lru_log_format = core.lrucache.new({ - ttl = 300, count = 512 -}) local schema = { type = "object", @@ -59,14 +55,7 @@ local schema = { local metadata_schema = { type = "object", properties = { - log_format = { - type = "object", - default = { - ["host"] = "$host", - ["@timestamp"] = "$time_iso8601", - ["client_ip"] = "$remote_addr", - }, - }, + log_format = log_util.metadata_schema_log_format, }, additionalProperties = false, } @@ -157,24 +146,6 @@ local function send_http_data(conf, log_message) end -local function gen_log_format(metadata) - local log_format = {} - if metadata == nil then - return log_format - end - - for k, var_name in pairs(metadata.value.log_format) do - if var_name:byte(1, 1) == str_byte("$") then - log_format[k] = {true, var_name:sub(2)} - else - log_format[k] = {false, var_name} - end - end - core.log.info("log_format: ", core.json.delay_encode(log_format)) - return log_format -end - - -- remove stale objects from the memory after timer expires local function remove_stale_objects(premature) if premature then @@ -198,23 +169,10 @@ function _M.log(conf, ctx) core.log.info("metadata: ", core.json.delay_encode(metadata)) local entry - local log_format = lru_log_format(metadata or "", nil, gen_log_format, - metadata) - if core.table.nkeys(log_format) > 0 then - entry = core.table.new(0, core.table.nkeys(log_format)) - for k, var_attr in pairs(log_format) do - if var_attr[1] then - entry[k] = ctx.var[var_attr[2]] - else - entry[k] = var_attr[2] - end - end - local matched_route = ctx.matched_route and ctx.matched_route.value - if matched_route then - entry.service_id = matched_route.service_id - entry.route_id = matched_route.id - end + if metadata and metadata.value.log_format + and core.table.nkeys(metadata.value.log_format) > 0 then + entry = log_util.get_custom_format_log(metadata.value.log_format) else entry = log_util.get_full_log(ngx, conf) end diff --git a/apisix/plugins/kafka-logger.lua b/apisix/plugins/kafka-logger.lua index 7cc46a9888e6..7aed04cd79bc 100644 --- a/apisix/plugins/kafka-logger.lua +++ b/apisix/plugins/kafka-logger.lua @@ -18,6 +18,8 @@ local core = require("apisix.core") local log_util = require("apisix.utils.log-util") local producer = require ("resty.kafka.producer") local batch_processor = require("apisix.utils.batch-processor") +local plugin = require("apisix.plugin") + local math = math local pairs = pairs local type = type @@ -63,15 +65,27 @@ local schema = { required = {"broker_list", "kafka_topic"} } +local metadata_schema = { + type = "object", + properties = { + log_format = log_util.metadata_schema_log_format, + }, + additionalProperties = false, +} + local _M = { version = 0.1, priority = 403, name = plugin_name, schema = schema, + metadata_schema = metadata_schema, } -function _M.check_schema(conf) +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end return core.schema.check(schema, conf) end @@ -152,8 +166,15 @@ function _M.log(conf, ctx) -- core.log.info("origin entry: ", entry) else - entry = log_util.get_full_log(ngx, conf) - core.log.info("full log entry: ", core.json.delay_encode(entry)) + local metadata = plugin.plugin_metadata(plugin_name) + core.log.info("metadata: ", core.json.delay_encode(metadata)) + if metadata and metadata.value.log_format + and core.table.nkeys(metadata.value.log_format) > 0 then + entry = log_util.get_custom_format_log(metadata.value.log_format) + else + entry = log_util.get_full_log(ngx, conf) + core.log.info("full log entry: ", core.json.delay_encode(entry)) + end end if not stale_timer_running then diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua index 79fa170074df..5db92d08bc93 100644 --- a/apisix/utils/log-util.lua +++ b/apisix/utils/log-util.lua @@ -17,10 +17,58 @@ local core = require("apisix.core") local ngx = ngx local pairs = pairs +local str_byte = string.byte local req_get_body_data = ngx.req.get_body_data +local lru_log_format = core.lrucache.new({ + ttl = 300, count = 512 +}) + local _M = {} +_M.metadata_schema_log_format = { + type = "object", + default = { + ["host"] = "$host", + ["@timestamp"] = "$time_iso8601", + ["client_ip"] = "$remote_addr", + }, +} + + +local function gen_log_format(format) + local log_format = {} + for k, var_name in pairs(format) do + if var_name:byte(1, 1) == str_byte("$") then + log_format[k] = {true, var_name:sub(2)} + else + log_format[k] = {false, var_name} + end + end + core.log.info("log_format: ", core.json.delay_encode(log_format)) + return log_format +end + +local function get_custom_format_log(format) + local ctx = ngx.ctx.api_ctx + local log_format = lru_log_format(format or "", nil, gen_log_format, format) + local entry = core.table.new(0, core.table.nkeys(log_format)) + for k, var_attr in pairs(log_format) do + if var_attr[1] then + entry[k] = ctx.var[var_attr[2]] + else + entry[k] = var_attr[2] + end + end + + local matched_route = ctx.matched_route and ctx.matched_route.value + if matched_route then + entry.service_id = matched_route.service_id + entry.route_id = matched_route.id + end + return entry +end +_M.get_custom_format_log = get_custom_format_log local function get_full_log(ngx, conf) local ctx = ngx.ctx.api_ctx diff --git a/docs/zh/latest/plugins/kafka-logger.md b/docs/zh/latest/plugins/kafka-logger.md index 58e6f44b2cd3..cd50e3301b54 100644 --- a/docs/zh/latest/plugins/kafka-logger.md +++ b/docs/zh/latest/plugins/kafka-logger.md @@ -138,7 +138,7 @@ title: kafka-logger 1. 为特定路由启用 kafka-logger 插件。 ```shell -curl http://127.0.0.1:9080/apisix/admin/routes/5 -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' { "plugins": { "kafka-logger": { @@ -171,6 +171,33 @@ HTTP/1.1 200 OK hello, world ``` +## 插件元数据设置 + +| 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | +| ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | +| log_format | object | 可选 | | | 以 Hash 对象方式声明日志格式。对 value 部分,仅支持字符串。如果是以`$`开头,则表明是要获取 [Nginx 内置变量](http://nginx.org/en/docs/varindex.html)。特别的,该设置是全局生效的,意味着指定 log_format 后,将对所有绑定 kafka-logger 的 Route 或 Service 生效。 | + +### 设置日志格式示例 + +```shell +curl http://127.0.0.1:9080/apisix/admin/plugin_metadata/kafka-logger -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +在日志收集处,将得到类似下面的日志: + +```shell +{"host":"localhost","@timestamp":"2020-09-23T19:05:05-04:00","client_ip":"127.0.0.1","route_id":"1"} +{"host":"localhost","@timestamp":"2020-09-23T19:05:05-04:00","client_ip":"127.0.0.1","route_id":"1"} +``` + + ## 禁用插件 当您要禁用`kafka-logger`插件时,这很简单,您可以在插件配置中删除相应的 json 配置,无需重新启动服务,它将立即生效: diff --git a/t/plugin/kafka-logger.t b/t/plugin/kafka-logger.t index 46a520c84bc5..b0254970884d 100644 --- a/t/plugin/kafka-logger.t +++ b/t/plugin/kafka-logger.t @@ -722,3 +722,43 @@ GET /t [qr/partition_id: 1/, qr/partition_id: 0/, qr/partition_id: 2/] + + + +=== TEST 20: add plugin metadata +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/kafka-logger', + ngx.HTTP_PUT, + [[{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + }]], + [[{ + "node": { + "value": { + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + } + }, + "action": "set" + }]] + ) + ngx.status = code + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] \ No newline at end of file From 2c5845aec2fe4eea5e6c253d78fb83a46e1a021f Mon Sep 17 00:00:00 2001 From: "zhuo.chen" Date: Mon, 28 Jun 2021 15:47:05 +0800 Subject: [PATCH 2/3] add kafka-logger log-format test case --- apisix/plugins/http-logger.lua | 3 +- apisix/plugins/kafka-logger.lua | 3 +- docs/zh/latest/plugins/http-logger.md | 2 +- docs/zh/latest/plugins/kafka-logger.md | 3 +- t/plugin/kafka-logger-log-format.t | 123 +++++++++++++++++++++++++ t/plugin/kafka-logger.t | 40 -------- 6 files changed, 128 insertions(+), 46 deletions(-) create mode 100644 t/plugin/kafka-logger-log-format.t diff --git a/apisix/plugins/http-logger.lua b/apisix/plugins/http-logger.lua index 7e2c083bd369..3a5222e7e15a 100644 --- a/apisix/plugins/http-logger.lua +++ b/apisix/plugins/http-logger.lua @@ -24,7 +24,6 @@ local plugin = require("apisix.plugin") local ngx = ngx local tostring = tostring -local pairs = pairs local ipairs = ipairs local timer_at = ngx.timer.at @@ -170,7 +169,7 @@ function _M.log(conf, ctx) local entry - if metadata and metadata.value.log_format + if metadata and metadata.value.log_format and core.table.nkeys(metadata.value.log_format) > 0 then entry = log_util.get_custom_format_log(metadata.value.log_format) else diff --git a/apisix/plugins/kafka-logger.lua b/apisix/plugins/kafka-logger.lua index 7aed04cd79bc..a80949adb80c 100644 --- a/apisix/plugins/kafka-logger.lua +++ b/apisix/plugins/kafka-logger.lua @@ -168,9 +168,10 @@ function _M.log(conf, ctx) else local metadata = plugin.plugin_metadata(plugin_name) core.log.info("metadata: ", core.json.delay_encode(metadata)) - if metadata and metadata.value.log_format + if metadata and metadata.value.log_format and core.table.nkeys(metadata.value.log_format) > 0 then entry = log_util.get_custom_format_log(metadata.value.log_format) + core.log.info("custom log format entry: ", core.json.delay_encode(entry)) else entry = log_util.get_full_log(ngx, conf) core.log.info("full log entry: ", core.json.delay_encode(entry)) diff --git a/docs/zh/latest/plugins/http-logger.md b/docs/zh/latest/plugins/http-logger.md index 811c9d0eb640..43994acec338 100644 --- a/docs/zh/latest/plugins/http-logger.md +++ b/docs/zh/latest/plugins/http-logger.md @@ -89,7 +89,7 @@ hello, world | 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | | ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | -| log_format | object | 可选 | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | 以 JSON 对象方式声明日志格式。对 value 部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 __APISIX__ 变量或 [Nginx 内置变量](http://nginx.org/en/docs/varindex.html)。特别的,该设置是全局生效的,意味着指定 log_format 后,将对所有绑定 http-logger 的 Route 或 Service 生效。 | +| log_format | object | 可选 | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | 以 JSON 对象方式声明日志格式。对 value 部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 __APISIX__ 变量或 [Nginx 内置变量](http://nginx.org/en/docs/varindex.html)。特别的,**该设置是全局生效的**,意味着指定 log_format 后,将对所有绑定 http-logger 的 Route 或 Service 生效。 | **APISIX 变量** diff --git a/docs/zh/latest/plugins/kafka-logger.md b/docs/zh/latest/plugins/kafka-logger.md index cd50e3301b54..8cfa88d5415e 100644 --- a/docs/zh/latest/plugins/kafka-logger.md +++ b/docs/zh/latest/plugins/kafka-logger.md @@ -175,7 +175,7 @@ hello, world | 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | | ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | -| log_format | object | 可选 | | | 以 Hash 对象方式声明日志格式。对 value 部分,仅支持字符串。如果是以`$`开头,则表明是要获取 [Nginx 内置变量](http://nginx.org/en/docs/varindex.html)。特别的,该设置是全局生效的,意味着指定 log_format 后,将对所有绑定 kafka-logger 的 Route 或 Service 生效。 | +| log_format | object | 可选 | | | 以 Hash 对象方式声明日志格式。对 value 部分,仅支持字符串。如果是以`$`开头,则表明是要获取 [Nginx 内置变量](http://nginx.org/en/docs/varindex.html)。特别的,**该设置是全局生效的**,意味着指定 log_format 后,将对所有绑定 kafka-logger 的 Route 或 Service 生效。 | ### 设置日志格式示例 @@ -197,7 +197,6 @@ curl http://127.0.0.1:9080/apisix/admin/plugin_metadata/kafka-logger -H 'X-API-K {"host":"localhost","@timestamp":"2020-09-23T19:05:05-04:00","client_ip":"127.0.0.1","route_id":"1"} ``` - ## 禁用插件 当您要禁用`kafka-logger`插件时,这很简单,您可以在插件配置中删除相应的 json 配置,无需重新启动服务,它将立即生效: diff --git a/t/plugin/kafka-logger-log-format.t b/t/plugin/kafka-logger-log-format.t new file mode 100644 index 000000000000..37f4249365de --- /dev/null +++ b/t/plugin/kafka-logger-log-format.t @@ -0,0 +1,123 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +log_level('info'); +repeat_each(1); +no_long_string(); +no_root_location(); + +run_tests; + +__DATA__ + +=== TEST 1: add plugin metadata +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/kafka-logger', + ngx.HTTP_PUT, + [[{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + }]], + [[{ + "node": { + "value": { + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } + } + }, + "action": "set" + }]] + ) + + ngx.status = code + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] + + + +=== TEST 2: set route(id: 1), batch_max_size=1 +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "kafka-logger": { + "broker_list" : + { + "127.0.0.1":9092 + }, + "kafka_topic" : "test2", + "key" : "key1", + "timeout" : 1, + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1980": 1 + }, + "type": "roundrobin" + }, + "uri": "/hello" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- request +GET /t +--- response_body +passed +--- no_error_log +[error] + + + +=== TEST 3: hit route and report kafka logger +--- request +GET /hello +--- response_body +hello world +--- wait: 0.5 +--- no_error_log +[error] +--- error_log eval +qr/send data to kafka: \{.*"host":"localhost"/ diff --git a/t/plugin/kafka-logger.t b/t/plugin/kafka-logger.t index b0254970884d..46a520c84bc5 100644 --- a/t/plugin/kafka-logger.t +++ b/t/plugin/kafka-logger.t @@ -722,43 +722,3 @@ GET /t [qr/partition_id: 1/, qr/partition_id: 0/, qr/partition_id: 2/] - - - -=== TEST 20: add plugin metadata ---- config - location /t { - content_by_lua_block { - local t = require("lib.test_admin").test - local code, body = t('/apisix/admin/plugin_metadata/kafka-logger', - ngx.HTTP_PUT, - [[{ - "log_format": { - "host": "$host", - "@timestamp": "$time_iso8601", - "client_ip": "$remote_addr" - } - }]], - [[{ - "node": { - "value": { - "log_format": { - "host": "$host", - "@timestamp": "$time_iso8601", - "client_ip": "$remote_addr" - } - } - }, - "action": "set" - }]] - ) - ngx.status = code - ngx.say(body) - } - } ---- request -GET /t ---- response_body -passed ---- no_error_log -[error] \ No newline at end of file From 02ede96bbe3aace4de22f9f5ba7977e06e55b9dc Mon Sep 17 00:00:00 2001 From: "zhuo.chen" Date: Tue, 29 Jun 2021 11:59:59 +0800 Subject: [PATCH 3/3] add english docs --- apisix/plugins/http-logger.lua | 5 ++-- apisix/plugins/kafka-logger.lua | 5 ++-- apisix/utils/log-util.lua | 4 +-- docs/en/latest/plugins/http-logger.md | 2 +- docs/en/latest/plugins/kafka-logger.md | 38 ++++++++++++++++++++++++++ docs/zh/latest/plugins/kafka-logger.md | 12 +++++++- 6 files changed, 57 insertions(+), 9 deletions(-) diff --git a/apisix/plugins/http-logger.lua b/apisix/plugins/http-logger.lua index 3a5222e7e15a..a28ada2a59f3 100644 --- a/apisix/plugins/http-logger.lua +++ b/apisix/plugins/http-logger.lua @@ -170,8 +170,9 @@ function _M.log(conf, ctx) local entry if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 then - entry = log_util.get_custom_format_log(metadata.value.log_format) + and core.table.nkeys(metadata.value.log_format) > 0 + then + entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) else entry = log_util.get_full_log(ngx, conf) end diff --git a/apisix/plugins/kafka-logger.lua b/apisix/plugins/kafka-logger.lua index a80949adb80c..9f3fe2d0d57c 100644 --- a/apisix/plugins/kafka-logger.lua +++ b/apisix/plugins/kafka-logger.lua @@ -169,8 +169,9 @@ function _M.log(conf, ctx) local metadata = plugin.plugin_metadata(plugin_name) core.log.info("metadata: ", core.json.delay_encode(metadata)) if metadata and metadata.value.log_format - and core.table.nkeys(metadata.value.log_format) > 0 then - entry = log_util.get_custom_format_log(metadata.value.log_format) + and core.table.nkeys(metadata.value.log_format) > 0 + then + entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) core.log.info("custom log format entry: ", core.json.delay_encode(entry)) else entry = log_util.get_full_log(ngx, conf) diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua index 5db92d08bc93..361d9b264c21 100644 --- a/apisix/utils/log-util.lua +++ b/apisix/utils/log-util.lua @@ -48,9 +48,7 @@ local function gen_log_format(format) return log_format end -local function get_custom_format_log(format) - local ctx = ngx.ctx.api_ctx - +local function get_custom_format_log(ctx, format) local log_format = lru_log_format(format or "", nil, gen_log_format, format) local entry = core.table.new(0, core.table.nkeys(log_format)) for k, var_attr in pairs(log_format) do diff --git a/docs/en/latest/plugins/http-logger.md b/docs/en/latest/plugins/http-logger.md index 104452c9b4e1..8b9dec26810b 100644 --- a/docs/en/latest/plugins/http-logger.md +++ b/docs/en/latest/plugins/http-logger.md @@ -91,7 +91,7 @@ hello, world | ---------------- | ------- | ----------- | ------------- | ------- | ---------------------------------------------------------------------------------------- | | log_format | object | optional | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | Log format declared as JSON object. Only string is supported in the `value` part. If the value starts with `$`, it means to get `APISIX` variables or [Nginx variable](http://nginx.org/en/docs/varindex.html). | - Note that the metadata configuration is applied in global scope, which means it will take effect on all Route or Service which use http-logger plugin. + Note that **the metadata configuration is applied in global scope**, which means it will take effect on all Route or Service which use http-logger plugin. **APISIX Variables** diff --git a/docs/en/latest/plugins/kafka-logger.md b/docs/en/latest/plugins/kafka-logger.md index 31694e55bc5d..b2ad5343cf41 100644 --- a/docs/en/latest/plugins/kafka-logger.md +++ b/docs/en/latest/plugins/kafka-logger.md @@ -176,6 +176,44 @@ HTTP/1.1 200 OK hello, world ``` +## Metadata + +| Name | Type | Requirement | Default | Valid | Description | +| ---------------- | ------- | ----------- | ------------- | ------- | ---------------------------------------------------------------------------------------- | +| log_format | object | optional | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | Log format declared as JSON object. Only string is supported in the `value` part. If the value starts with `$`, it means to get `APISIX` variables or [Nginx variable](http://nginx.org/en/docs/varindex.html). | + + Note that **the metadata configuration is applied in global scope**, which means it will take effect on all Route or Service which use kafka-logger plugin. + +**APISIX Variables** + +| Variable Name | Description | Usage Example | +|------------------|-------------------------|----------------| +| route_id | id of `route` | $route_id | +| route_name | name of `route` | $route_name | +| service_id | id of `service` | $service_id | +| service_name | name of `service` | $service_name | +| consumer_name | username of `consumer` | $consumer_name | + +### Example + +```shell +curl http://127.0.0.1:9080/apisix/admin/plugin_metadata/kafka-logger -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "log_format": { + "host": "$host", + "@timestamp": "$time_iso8601", + "client_ip": "$remote_addr" + } +}' +``` + +It is expected to see some logs like that: + +```shell +{"host":"localhost","@timestamp":"2020-09-23T19:05:05-04:00","client_ip":"127.0.0.1","route_id":"1"} +{"host":"localhost","@timestamp":"2020-09-23T19:05:05-04:00","client_ip":"127.0.0.1","route_id":"1"} +``` + ## Disable Plugin Remove the corresponding json configuration in the plugin configuration to disable the `kafka-logger`. diff --git a/docs/zh/latest/plugins/kafka-logger.md b/docs/zh/latest/plugins/kafka-logger.md index 8cfa88d5415e..34721385b27a 100644 --- a/docs/zh/latest/plugins/kafka-logger.md +++ b/docs/zh/latest/plugins/kafka-logger.md @@ -175,7 +175,17 @@ hello, world | 名称 | 类型 | 必选项 | 默认值 | 有效值 | 描述 | | ---------------- | ------- | ------ | ------------- | ------- | ------------------------------------------------ | -| log_format | object | 可选 | | | 以 Hash 对象方式声明日志格式。对 value 部分,仅支持字符串。如果是以`$`开头,则表明是要获取 [Nginx 内置变量](http://nginx.org/en/docs/varindex.html)。特别的,**该设置是全局生效的**,意味着指定 log_format 后,将对所有绑定 kafka-logger 的 Route 或 Service 生效。 | +| log_format | object | 可选 | {"host": "$host", "@timestamp": "$time_iso8601", "client_ip": "$remote_addr"} | | 以 JSON 对象方式声明日志格式。对 value 部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 __APISIX__ 变量或 [Nginx 内置变量](http://nginx.org/en/docs/varindex.html)。特别的,**该设置是全局生效的**,意味着指定 log_format 后,将对所有绑定 http-logger 的 Route 或 Service 生效。 | + +**APISIX 变量** + +| 变量名 | 描述 | 使用示例 | +|------------------|-------------------------|----------------| +| route_id | `route` 的 id | $route_id | +| route_name | `route` 的 name | $route_name | +| service_id | `service` 的 id | $service_id | +| service_name | `service` 的 name | $service_name | +| consumer_name | `consumer` 的 username | $consumer_name | ### 设置日志格式示例