Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add custom log format to each logger #8643

Merged
merged 1 commit into from
Jan 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 1 addition & 13 deletions apisix/plugins/clickhouse-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,8 @@ local log_util = require("apisix.utils.log-util")
local core = require("apisix.core")
local http = require("resty.http")
local url = require("net.url")
local plugin = require("apisix.plugin")
local math_random = math.random

local ngx = ngx
local tostring = tostring

local plugin_name = "clickhouse-logger"
Expand Down Expand Up @@ -148,17 +146,7 @@ end


function _M.log(conf, ctx)
local metadata = plugin.plugin_metadata(plugin_name)
core.log.info("metadata: ", core.json.delay_encode(metadata))
local entry

if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
else
entry = log_util.get_full_log(ngx, conf)
end
local entry = log_util.get_log_entry(plugin_name, conf, ctx)

if batch_processor_manager:add_entry(conf, entry) then
return
Expand Down
15 changes: 1 addition & 14 deletions apisix/plugins/elasticsearch-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ local core = require("apisix.core")
local http = require("resty.http")
local log_util = require("apisix.utils.log-util")
local bp_manager_mod = require("apisix.utils.batch-processor-manager")
local plugin = require("apisix.plugin")

local ngx = ngx
local str_format = core.string.format
Expand Down Expand Up @@ -98,19 +97,7 @@ end


local function get_logger_entry(conf, ctx)
local entry
local metadata = plugin.plugin_metadata(plugin_name)
core.log.info("metadata: ", core.json.delay_encode(metadata))
if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
core.log.info("custom log format entry: ", core.json.delay_encode(entry))
else
entry = log_util.get_full_log(ngx, conf)
core.log.info("full log entry: ", core.json.delay_encode(entry))
end

local entry = log_util.get_log_entry(plugin_name, conf, ctx)
return core.json.encode({
create = {
_index = conf.field.index,
Expand Down
13 changes: 1 addition & 12 deletions apisix/plugins/file-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
--
local log_util = require("apisix.utils.log-util")
local core = require("apisix.core")
local plugin = require("apisix.plugin")
local ngx = ngx
local io_open = io.open
local is_apisix_or, process = pcall(require, "resty.apisix.process")
Expand Down Expand Up @@ -149,17 +148,7 @@ function _M.body_filter(conf, ctx)
end

function _M.log(conf, ctx)
local metadata = plugin.plugin_metadata(plugin_name)
local entry

if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
else
entry = log_util.get_full_log(ngx, conf)
end

local entry = log_util.get_log_entry(plugin_name, conf, ctx)
write_file_data(conf, entry)
end

Expand Down
72 changes: 45 additions & 27 deletions apisix/plugins/google-cloud-logging.lua
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
--

local core = require("apisix.core")
local ngx = ngx
local tostring = tostring
local http = require("resty.http")
local log_util = require("apisix.utils.log-util")
Expand Down Expand Up @@ -95,6 +94,13 @@ local schema = {
encrypt_fields = {"auth_config.private_key"},
}

local metadata_schema = {
type = "object",
properties = {
log_format = log_util.metadata_schema_log_format,
},
}


local function send_to_google(oauth, entries)
local http_new = http.new()
Expand Down Expand Up @@ -163,32 +169,39 @@ end


local function get_logger_entry(conf, ctx, oauth)
local entry = log_util.get_full_log(ngx, conf)
local google_entry = {
httpRequest = {
requestMethod = entry.request.method,
requestUrl = entry.request.url,
requestSize = entry.request.size,
status = entry.response.status,
responseSize = entry.response.size,
userAgent = entry.request.headers and entry.request.headers["user-agent"],
remoteIp = entry.client_ip,
serverIp = entry.upstream,
latency = tostring(core.string.format("%0.3f", entry.latency / 1000)) .. "s"
},
jsonPayload = {
route_id = entry.route_id,
service_id = entry.service_id,
},
labels = {
source = "apache-apisix-google-cloud-logging"
},
timestamp = log_util.get_rfc3339_zulu_timestamp(),
resource = conf.resource,
insertId = ctx.var.request_id,
logName = core.string.format("projects/%s/logs/%s", oauth.project_id,
conf.log_id)
local entry, customized = log_util.get_log_entry(plugin_name, conf, ctx)
local google_entry
if not customized then
google_entry = {
httpRequest = {
requestMethod = entry.request.method,
requestUrl = entry.request.url,
requestSize = entry.request.size,
status = entry.response.status,
responseSize = entry.response.size,
userAgent = entry.request.headers and entry.request.headers["user-agent"],
remoteIp = entry.client_ip,
serverIp = entry.upstream,
latency = tostring(core.string.format("%0.3f", entry.latency / 1000)) .. "s"
},
jsonPayload = {
route_id = entry.route_id,
service_id = entry.service_id,
},
}
else
google_entry = {
jsonPayload = entry,
}
end

google_entry.labels = {
source = "apache-apisix-google-cloud-logging"
}
google_entry.timestamp = log_util.get_rfc3339_zulu_timestamp()
google_entry.resource = conf.resource
google_entry.insertId = ctx.var.request_id
google_entry.logName = core.string.format("projects/%s/logs/%s", oauth.project_id, conf.log_id)

return google_entry
end
Expand All @@ -198,11 +211,16 @@ local _M = {
version = 0.1,
priority = 407,
name = plugin_name,
metadata_schema = metadata_schema,
schema = batch_processor_manager:wrap_schema(schema),
}


function _M.check_schema(conf)
function _M.check_schema(conf, schema_type)
if schema_type == core.schema.TYPE_METADATA then
return core.schema.check(metadata_schema, conf)
end

return core.schema.check(schema, conf)
end

Expand Down
15 changes: 1 addition & 14 deletions apisix/plugins/http-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@ local log_util = require("apisix.utils.log-util")
local core = require("apisix.core")
local http = require("resty.http")
local url = require("net.url")
local plugin = require("apisix.plugin")

local ngx = ngx
local tostring = tostring
local ipairs = ipairs

Expand Down Expand Up @@ -156,18 +154,7 @@ end


function _M.log(conf, ctx)
local metadata = plugin.plugin_metadata(plugin_name)
core.log.info("metadata: ", core.json.delay_encode(metadata))

local entry

if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
else
entry = log_util.get_full_log(ngx, conf)
end
local entry = log_util.get_log_entry(plugin_name, conf, ctx)

if not entry.route_id then
entry.route_id = "no-matched"
Expand Down
14 changes: 1 addition & 13 deletions apisix/plugins/kafka-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,12 @@ local core = require("apisix.core")
local log_util = require("apisix.utils.log-util")
local producer = require ("resty.kafka.producer")
local bp_manager_mod = require("apisix.utils.batch-processor-manager")
local plugin = require("apisix.plugin")

local math = math
local pairs = pairs
local type = type
local plugin_name = "kafka-logger"
local batch_processor_manager = bp_manager_mod.new("kafka logger")
local ngx = ngx

local lrucache = core.lrucache.new({
type = "plugin",
Expand Down Expand Up @@ -220,17 +218,7 @@ function _M.log(conf, ctx)
-- core.log.info("origin entry: ", entry)

else
local metadata = plugin.plugin_metadata(plugin_name)
core.log.info("metadata: ", core.json.delay_encode(metadata))
if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
core.log.info("custom log format entry: ", core.json.delay_encode(entry))
else
entry = log_util.get_full_log(ngx, conf)
core.log.info("full log entry: ", core.json.delay_encode(entry))
end
entry = log_util.get_log_entry(plugin_name, conf, ctx)
end

if batch_processor_manager:add_entry(conf, entry) then
Expand Down
15 changes: 3 additions & 12 deletions apisix/plugins/loggly.lua
Original file line number Diff line number Diff line change
Expand Up @@ -175,24 +175,15 @@ end


local function generate_log_message(conf, ctx)
local metadata = plugin.plugin_metadata(plugin_name)
local entry

if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
else
entry = log_util.get_full_log(ngx, conf)
end

local entry = log_util.get_log_entry(plugin_name, conf, ctx)
local json_str, err = core.json.encode(entry)
if not json_str then
core.log.error('error occurred while encoding the data: ', err)
return nil
end

if metadata.value.protocol ~= "syslog" then
local metadata = plugin.plugin_metadata(plugin_name)
if metadata and metadata.value.protocol ~= "syslog" then
return json_str
end

Expand Down
14 changes: 1 addition & 13 deletions apisix/plugins/rocketmq-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,10 @@ local log_util = require("apisix.utils.log-util")
local producer = require ("resty.rocketmq.producer")
local acl_rpchook = require("resty.rocketmq.acl_rpchook")
local bp_manager_mod = require("apisix.utils.batch-processor-manager")
local plugin = require("apisix.plugin")

local type = type
local plugin_name = "rocketmq-logger"
local batch_processor_manager = bp_manager_mod.new("rocketmq logger")
local ngx = ngx

local lrucache = core.lrucache.new({
type = "plugin",
Expand Down Expand Up @@ -140,17 +138,7 @@ function _M.log(conf, ctx)
if conf.meta_format == "origin" then
entry = log_util.get_req_original(ctx, conf)
else
local metadata = plugin.plugin_metadata(plugin_name)
core.log.info("metadata: ", core.json.delay_encode(metadata))
if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
core.log.info("custom log format entry: ", core.json.delay_encode(entry))
else
entry = log_util.get_full_log(ngx, conf)
core.log.info("full log entry: ", core.json.delay_encode(entry))
end
entry = log_util.get_log_entry(plugin_name, conf, ctx)
end

if batch_processor_manager:add_entry(conf, entry) then
Expand Down
14 changes: 1 addition & 13 deletions apisix/plugins/skywalking-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ local log_util = require("apisix.utils.log-util")
local core = require("apisix.core")
local http = require("resty.http")
local url = require("net.url")
local plugin = require("apisix.plugin")

local base64 = require("ngx.base64")
local ngx_re = require("ngx.re")
Expand Down Expand Up @@ -115,18 +114,7 @@ end


function _M.log(conf, ctx)
local metadata = plugin.plugin_metadata(plugin_name)
core.log.info("metadata: ", core.json.delay_encode(metadata))

local log_body
if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
log_body = log_util.get_custom_format_log(ctx, metadata.value.log_format)
else
log_body = log_util.get_full_log(ngx, conf)
end

local log_body = log_util.get_log_entry(plugin_name, conf, ctx)
local trace_context
local sw_header = ngx.req.get_headers()["sw8"]
if sw_header then
Expand Down
13 changes: 1 addition & 12 deletions apisix/plugins/sls-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
local core = require("apisix.core")
local log_util = require("apisix.utils.log-util")
local bp_manager_mod = require("apisix.utils.batch-processor-manager")
local plugin = require("apisix.plugin")


local plugin_name = "sls-logger"
Expand Down Expand Up @@ -131,17 +130,7 @@ end

-- log phase in APISIX
function _M.log(conf, ctx)
local metadata = plugin.plugin_metadata(plugin_name)
local entry

if metadata and metadata.value.log_format
and core.table.nkeys(metadata.value.log_format) > 0
then
entry = log_util.get_custom_format_log(ctx, metadata.value.log_format)
else
entry = log_util.get_full_log(ngx, conf)
end

local entry = log_util.get_log_entry(plugin_name, conf, ctx)
local json_str, err = core.json.encode(entry)
if not json_str then
core.log.error('error occurred while encoding the data: ', err)
Expand Down
Loading