Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(plugins): Datadog for metrics collection #5372

Merged
merged 17 commits into from
Nov 9, 2021
Merged
178 changes: 178 additions & 0 deletions apisix/plugins/datadog.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
--
-- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements. See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to You under the Apache License, Version 2.0
-- (the "License"); you may not use this file except in compliance with
-- the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.

local core = require("apisix.core")
local plugin = require("apisix.plugin")
local send_statsd = require("apisix.plugins.udp-logger").send_udp_data
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved
local fetch_info = require("apisix.plugins.prometheus.exporter").parse_info_from_ctx
local format = string.format
local concat = table.concat
local tostring = tostring
local ngx = ngx


local plugin_name = "datadog"

local schema = {
type = "object",
properties = {
}
}

local metadata_schema = {
type = "object",
properties = {
host = {type = "string"},
port = {type = "integer", minimum = 0},
namespace = {type = "string", default = "apisix.dev"},
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved
sample_rate = {type = "number", default = 1, minimum = 0, maximum = 1},
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved
tags = {
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved
type = "array",
items = {type = "string"},
default = {"source:apisix"}
}
},
required = {"host", "port"}
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved
}

local _M = {
version = 0.1,
priority = 495,
name = plugin_name,
schema = schema,
metadata_schema = metadata_schema,
}

function _M.check_schema(conf, schema_type)
if schema_type == core.schema.TYPE_METADATA then
return core.schema.check(metadata_schema, conf)
end
return core.schema.check(schema, conf)
end

local function generate_tag(sample_rate, tag_arr, route_id, service_id,
consumer_name, balancer_ip, http_status)
local rate, tags = "", ""

if sample_rate and sample_rate ~= 1 then
rate = "|@" .. tostring(sample_rate)
end

if tag_arr and #tag_arr > 0 then
tags = "|#" .. concat(tag_arr, ",")
end

if route_id ~= "" then
tags = tags .. "route_id:" .. route_id
end

if service_id ~= "" then
tags = tags .. "service_id:" .. service_id
end

if consumer_name ~= "" then
tags = tags .. "consumer_name:" .. consumer_name
end
if balancer_ip ~= "" then
tags = tags .. "balancer_ip:" .. balancer_ip
end
if http_status then
tags = tags .. "http_status:" .. http_status
end

if tags ~= "" and tags:sub(1, 1) ~= "|" then
tags = "|#" .. tags
end

return rate .. tags

end

function _M.log(conf, ctx)
local metadata = plugin.plugin_metadata(plugin_name)
if not metadata then
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved
core.log.error("received nil metadata")
end

local udp_conf = {
host = metadata.value.host,
port = metadata.value.port
}

local route_id, service_id, consumer_name, balancer_ip = fetch_info(conf, ctx)
local prefix = metadata.value.namespace

if prefix ~= "" then
prefix = prefix .. "."
end

local suffix = generate_tag(metadata.value.sample_rate, metadata.value.tags,
route_id, service_id, consumer_name, balancer_ip, ctx.var.status)

-- request counter
local ok, err = send_statsd(udp_conf,
format("%s:%s|%s%s", prefix .. "request.counter", 1, "c", suffix))
if not ok then
core.log.error("failed to send request_count metric to DogStatsD. err: " .. err)
end


-- request latency histogram
local latency = (ngx.now() - ngx.req.start_time()) * 1000
local ok, err = send_statsd(udp_conf,
format("%s:%s|%s%s", prefix .. "request.latency", latency, "h", suffix))
if not ok then
core.log.error("failed to send request latency metric to DogStatsD. err: " .. err)
end

-- upstream latency
local apisix_latency = latency
if ctx.var.upstream_response_time then
local upstream_latency = ctx.var.upstream_response_time * 1000
local ok, err = send_statsd(udp_conf,
format("%s:%s|%s%s", prefix .. "upstream.latency", upstream_latency, "h", suffix))
if not ok then
core.log.error("failed to send upstream latency metric to DogStatsD. err: " .. err)
end
apisix_latency = apisix_latency - upstream_latency
if apisix_latency < 0 then
apisix_latency = 0
end
end

-- apisix_latency
local ok, err = send_statsd(udp_conf,
format("%s:%s|%s%s", prefix .. "apisix.latency", apisix_latency, "h", suffix))
if not ok then
core.log.error("failed to send apisix latency metric to DogStatsD. err: " .. err)
end

-- request body size timer
local ok, err = send_statsd(udp_conf,
format("%s:%s|%s%s", prefix .. "ingress.size", ctx.var.request_length, "ms", suffix))
if not ok then
core.log.error("failed to send request body size metric to DogStatsD. err: " .. err)
end

-- response body size timer
local ok, err = send_statsd(udp_conf,
format("%s:%s|%s%s", prefix .. "egress.size", ctx.var.bytes_sent, "ms", suffix))
if not ok then
core.log.error("failed to send response body size metric to DogStatsD. err: " .. err)
end
end

return _M
12 changes: 8 additions & 4 deletions apisix/plugins/prometheus/exporter.lua
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,7 @@ function _M.init()

end


function _M.log(conf, ctx)
local vars = ctx.var

function _M.parse_info_from_ctx(conf, ctx)
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved
local route_id = ""
local balancer_ip = ctx.balancer_ip or ""
local service_id = ""
Expand All @@ -131,6 +128,13 @@ function _M.log(conf, ctx)
end
end
end
return route_id, service_id, consumer_name, balancer_ip
end

function _M.log(conf, ctx)
local vars = ctx.var

local route_id, service_id, consumer_name, balancer_ip = _M.parse_info_from_ctx(conf, ctx)
bisakhmondal marked this conversation as resolved.
Show resolved Hide resolved

local matched_uri = ""
local matched_host = ""
Expand Down
8 changes: 5 additions & 3 deletions apisix/plugins/udp-logger.lua
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,13 @@ function _M.check_schema(conf)
return core.schema.check(schema, conf)
end

local function send_udp_data(conf, log_message)
function _M.send_udp_data(conf, log_message)
local err_msg
local res = true
local sock = udp()
sock:settimeout(conf.timeout * 1000)
if not conf.timeout then
sock:settimeout(conf.timeout * 1000)
end

core.log.info("sending a batch logs to ", conf.host, ":", conf.port)

Expand Down Expand Up @@ -130,7 +132,7 @@ function _M.log(conf, ctx)
return false, 'error occurred while encoding the data: ' .. err
end

return send_udp_data(conf, data)
return _M.send_udp_data(conf, data)
end

local config = {
Expand Down
1 change: 1 addition & 0 deletions conf/config-default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,7 @@ plugins: # plugin list (sorted by priority)
#- dubbo-proxy # priority: 507
- grpc-transcode # priority: 506
- prometheus # priority: 500
- datadog # priority: 495
- echo # priority: 412
- http-logger # priority: 410
- sls-logger # priority: 406
Expand Down
2 changes: 1 addition & 1 deletion t/admin/plugins.t
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ __DATA__
--- request
GET /apisix/admin/plugins/list
--- response_body_like eval
qr/\["real-ip","client-control","ext-plugin-pre-req","zipkin","request-id","fault-injection","serverless-pre-function","batch-requests","cors","ip-restriction","ua-restriction","referer-restriction","uri-blocker","request-validation","openid-connect","authz-casbin","wolf-rbac","ldap-auth","hmac-auth","basic-auth","jwt-auth","key-auth","consumer-restriction","authz-keycloak","proxy-mirror","proxy-cache","proxy-rewrite","api-breaker","limit-conn","limit-count","limit-req","gzip","server-info","traffic-split","redirect","response-rewrite","grpc-transcode","prometheus","echo","http-logger","sls-logger","tcp-logger","kafka-logger","syslog","udp-logger","example-plugin","serverless-post-function","ext-plugin-post-req"\]/
qr/\["real-ip","client-control","ext-plugin-pre-req","zipkin","request-id","fault-injection","serverless-pre-function","batch-requests","cors","ip-restriction","ua-restriction","referer-restriction","uri-blocker","request-validation","openid-connect","authz-casbin","wolf-rbac","ldap-auth","hmac-auth","basic-auth","jwt-auth","key-auth","consumer-restriction","authz-keycloak","proxy-mirror","proxy-cache","proxy-rewrite","api-breaker","limit-conn","limit-count","limit-req","gzip","server-info","traffic-split","redirect","response-rewrite","grpc-transcode","prometheus","datadog","echo","http-logger","sls-logger","tcp-logger","kafka-logger","syslog","udp-logger","example-plugin","serverless-post-function","ext-plugin-post-req"\]/
--- no_error_log
[error]

Expand Down
Loading