diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 37573fd111e1..a205b1f671a0 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -434,3 +434,4 @@ jobs: eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) .ci/run_tests.sh + diff --git a/CHANGELOG.md b/CHANGELOG.md index 08e4bdcdde5c..70ce0b16754d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -199,6 +199,10 @@ ignore the `domains` field. - Plugins with colliding priorities have now deterministic sorting based on their name [#8957](https://github.com/Kong/kong/pull/8957) +- **Statsd**: + - The metric name that is related to the service has been renamed by adding a `service.` prefix. e.g. `kong.service..request.count` [#9046](https://github.com/Kong/kong/pull/9046) + - The metric `kong..request.status.` and `kong..user..request.status.` has been renamed to `kong.service..status.` and `kong.service..user..status.` [#9046](https://github.com/Kong/kong/pull/9046) + - The metric `*.status..total` from metrics `status_count` and `status_count_per_user` has been removed [#9046](https://github.com/Kong/kong/pull/9046) ### Deprecations @@ -348,6 +352,8 @@ [#8900](https://github.com/Kong/kong/pull/8900) - Sync all plugin versions to the Kong version [#8772](https://github.com/Kong/kong/pull/8772) +- **Statsd**: :fireworks: **Newly open-sourced plugin capabilities**: All capabilities of [Statsd Advanced](https://docs.konghq.com/hub/kong-inc/statsd-advanced/) are now bundled in [Statsd](https://docs.konghq.com/hub/kong-inc/statsd). + [#9046](https://github.com/Kong/kong/pull/9046) #### Configuration diff --git a/kong-3.0.0-0.rockspec b/kong-3.0.0-0.rockspec index bf0a4f0106ac..eb97cb890b47 100644 --- a/kong-3.0.0-0.rockspec +++ b/kong-3.0.0-0.rockspec @@ -401,7 +401,9 @@ build = { ["kong.plugins.datadog.schema"] = "kong/plugins/datadog/schema.lua", ["kong.plugins.datadog.statsd_logger"] = "kong/plugins/datadog/statsd_logger.lua", + ["kong.plugins.statsd.constants"] = "kong/plugins/statsd/constants.lua", ["kong.plugins.statsd.handler"] = "kong/plugins/statsd/handler.lua", + ["kong.plugins.statsd.log"] = "kong/plugins/statsd/log.lua", ["kong.plugins.statsd.schema"] = "kong/plugins/statsd/schema.lua", ["kong.plugins.statsd.statsd_logger"] = "kong/plugins/statsd/statsd_logger.lua", diff --git a/kong/plugins/statsd/constants.lua b/kong/plugins/statsd/constants.lua new file mode 100644 index 000000000000..c722c99c537a --- /dev/null +++ b/kong/plugins/statsd/constants.lua @@ -0,0 +1,9 @@ +-- Common constants +local constants = { + -- Lua style pattern, used in schema validation + REGEX_STATUS_CODE_RANGE = [[^[0-9]+-[0-9]+$]], + -- PCRE pattern, used in log_handler.lua + REGEX_SPLIT_STATUS_CODES_BY_DASH = [[(\d\d\d)-(\d\d\d)]], +} + +return constants diff --git a/kong/plugins/statsd/handler.lua b/kong/plugins/statsd/handler.lua index 0295ed2f9c64..27ff23274bf9 100644 --- a/kong/plugins/statsd/handler.lua +++ b/kong/plugins/statsd/handler.lua @@ -1,132 +1,7 @@ -local statsd_logger = require "kong.plugins.statsd.statsd_logger" +local log = require "kong.plugins.statsd.log" local kong_meta = require "kong.meta" -local kong = kong -local ngx = ngx -local timer_at = ngx.timer.at -local pairs = pairs -local gsub = string.gsub -local fmt = string.format - - -local get_consumer_id = { - consumer_id = function(consumer) - return consumer and gsub(consumer.id, "-", "_") - end, - custom_id = function(consumer) - return consumer and consumer.custom_id - end, - username = function(consumer) - return consumer and consumer.username - end -} - - -local metrics = { - status_count = function (service_name, message, metric_config, logger) - local response_status = message.response and message.response.status or 0 - local format = fmt("%s.request.status", service_name, - response_status) - - logger:send_statsd(fmt("%s.%s", format, response_status), - 1, logger.stat_types.counter, metric_config.sample_rate) - - logger:send_statsd(fmt("%s.%s", format, "total"), 1, - logger.stat_types.counter, metric_config.sample_rate) - end, - unique_users = function (service_name, message, metric_config, logger) - local get_consumer_id = get_consumer_id[metric_config.consumer_identifier] - local consumer_id = get_consumer_id(message.consumer) - - if consumer_id then - local stat = fmt("%s.user.uniques", service_name) - - logger:send_statsd(stat, consumer_id, logger.stat_types.set) - end - end, - request_per_user = function (service_name, message, metric_config, logger) - local get_consumer_id = get_consumer_id[metric_config.consumer_identifier] - local consumer_id = get_consumer_id(message.consumer) - - if consumer_id then - local stat = fmt("%s.user.%s.request.count", service_name, consumer_id) - - logger:send_statsd(stat, 1, logger.stat_types.counter, - metric_config.sample_rate) - end - end, - status_count_per_user = function (service_name, message, metric_config, logger) - local get_consumer_id = get_consumer_id[metric_config.consumer_identifier] - local consumer_id = get_consumer_id(message.consumer) - - if consumer_id then - local format = fmt("%s.user.%s.request.status", service_name, consumer_id) - - logger:send_statsd(fmt("%s.%s", format, message.response.status), - 1, logger.stat_types.counter, - metric_config.sample_rate) - - logger:send_statsd(fmt("%s.%s", format, "total"), - 1, logger.stat_types.counter, - metric_config.sample_rate) - end - end, -} - - -local function log(premature, conf, message) - if premature then - return - end - - local name = gsub(message.service.name ~= ngx.null and - message.service.name or message.service.host, - "%.", "_") - - local stat_name = { - request_size = name .. ".request.size", - response_size = name .. ".response.size", - latency = name .. ".latency", - upstream_latency = name .. ".upstream_latency", - kong_latency = name .. ".kong_latency", - request_count = name .. ".request.count", - } - local stat_value = { - request_size = message.request and message.request.size, - response_size = message.response and message.response.size, - latency = message.latencies.request, - upstream_latency = message.latencies.proxy, - kong_latency = message.latencies.kong, - request_count = 1, - } - - local logger, err = statsd_logger:new(conf) - if err then - kong.log.err("failed to create Statsd logger: ", err) - return - end - - for _, metric_config in pairs(conf.metrics) do - local metric = metrics[metric_config.name] - - if metric then - metric(name, message, metric_config, logger) - - else - local stat_name = stat_name[metric_config.name] - local stat_value = stat_value[metric_config.name] - - logger:send_statsd(stat_name, stat_value, - logger.stat_types[metric_config.stat_type], - metric_config.sample_rate) - end - end - - logger:close_socket() -end - - local StatsdHandler = { PRIORITY = 11, VERSION = kong_meta.version, @@ -134,16 +9,7 @@ local StatsdHandler = { function StatsdHandler:log(conf) - if not ngx.ctx.service then - return - end - - local message = kong.log.serialize() - - local ok, err = timer_at(0, log, conf, message) - if not ok then - kong.log.err("failed to create timer: ", err) - end + log.execute(conf) end diff --git a/kong/plugins/statsd/log.lua b/kong/plugins/statsd/log.lua new file mode 100644 index 000000000000..9c3eefa69183 --- /dev/null +++ b/kong/plugins/statsd/log.lua @@ -0,0 +1,357 @@ +local constants = require "kong.plugins.statsd.constants" +local statsd_logger = require "kong.plugins.statsd.statsd_logger" +local ws = require "kong.workspaces" + +local ngx = ngx +local kong = kong +local ngx_timer_at = ngx.timer.at +local ngx_time = ngx.time +local re_gsub = ngx.re.gsub +local pairs = pairs +local string_format = string.format +local match = ngx.re.match +local ipairs = ipairs +local tonumber = tonumber +local knode = (kong and kong.node) and kong.node or require "kong.pdk.node".new() +local null = ngx.null + +local START_RANGE_IDX = 1 +local END_RANGE_IDX = 2 + +local result_cache = setmetatable({}, { __mode = "k" }) +local range_cache = setmetatable({}, { __mode = "k" }) + +local _M = {} + + +local function get_cache_value(cache, cache_key) + local cache_value = cache[cache_key] + if not cache_value then + cache_value = {} + cache[cache_key] = cache_value + end + return cache_value +end + +local function extract_range(status_code_list, range) + local start_code, end_code + local ranges = get_cache_value(range_cache, status_code_list) + + -- If range isn't in the cache, extract and put it in + if not ranges[range] then + local range_result, err = match(range, constants.REGEX_SPLIT_STATUS_CODES_BY_DASH, "oj") + + if err then + kong.log.error(err) + return + end + ranges[range] = { range_result[START_RANGE_IDX], range_result[END_RANGE_IDX] } + end + + start_code = ranges[range][START_RANGE_IDX] + end_code = ranges[range][END_RANGE_IDX] + + return start_code, end_code +end + +-- Returns true if a given status code is within status code ranges +local function is_in_range(status_code_list, status_code) + -- If there is no configuration then pass all response codes + if not status_code_list then + return true + end + + local result_list = get_cache_value(result_cache, status_code_list) + local result = result_list[status_code] + + -- If result is found in a cache then return results instantly + if result ~= nil then + return result + end + + for _, range in ipairs(status_code_list) do + -- Get status code range splitting by "-" character + local start_code, end_code = extract_range(status_code_list, range) + + -- Checks if there is both interval numbers + if start_code and end_code then + -- If HTTP response code is in the range return true + if status_code >= tonumber(start_code) and status_code <= tonumber(end_code) then + -- Storing results in a cache + result_list[status_code] = true + return true + end + end + end + + -- Return false if there are no match for a given status code ranges and store it in cache + result_list[status_code] = false + return false +end + + +local worker_id +local hostname = re_gsub(knode.get_hostname(), [[\.]], "_", "oj") + +-- downsample timestamp +local shdict_metrics_last_sent = 0 +local SHDICT_METRICS_SEND_THRESHOLD = 60 + + +local get_consumer_id = { + consumer_id = function(consumer) + return consumer and consumer.id + end, + custom_id = function(consumer) + return consumer and consumer.custom_id + end, + username = function(consumer) + return consumer and consumer.username + end +} + +local get_service_id = { + service_id = function(service) + return service and service.id + end, + service_name = function(service) + return service and service.name + end, + service_host = function(service) + return service and service.host + end, + service_name_or_host = function(service) + return service and (service.name ~= null and + service.name or service.host) + end +} + +local get_workspace_id = { + workspace_id = function() + return ws.get_workspace_id() + end, + workspace_name = function() + local workspace = ws.get_workspace() + return workspace.name + end +} + +local metrics = { + unique_users = function (scope_name, message, metric_config, logger, conf) + local get_consumer_id = get_consumer_id[metric_config.consumer_identifier or conf.consumer_identifier_default] + local consumer_id = get_consumer_id(message.consumer) + + if consumer_id then + local stat = string_format("%s.user.uniques", scope_name) + logger:send_statsd(stat, consumer_id, logger.stat_types.set) + end + end, + request_per_user = function (scope_name, message, metric_config, logger, conf) + local get_consumer_id = get_consumer_id[metric_config.consumer_identifier or conf.consumer_identifier_default] + local consumer_id = get_consumer_id(message.consumer) + + if consumer_id then + local stat = string_format("%s.user.%s.request.count", scope_name, consumer_id) + logger:send_statsd(stat, 1, logger.stat_types.counter, + metric_config.sample_rate) + end + end, + status_count = function (scope_name, message, metric_config, logger, conf) + logger:send_statsd(string_format("%s.status.%s", scope_name, message.response.status), + 1, logger.stat_types.counter, metric_config.sample_rate) + end, + status_count_per_user = function (scope_name, message, metric_config, logger, conf) + local get_consumer_id = get_consumer_id[metric_config.consumer_identifier or conf.consumer_identifier_default] + local consumer_id = get_consumer_id(message.consumer) + + if consumer_id then + logger:send_statsd(string_format("%s.user.%s.status.%s", scope_name, + consumer_id, message.response.status), + 1, logger.stat_types.counter, + metric_config.sample_rate) + end + end, + status_count_per_workspace = function (scope_name, message, metric_config, logger, conf) + local get_workspace_id = get_workspace_id[metric_config.workspace_identifier or conf.workspace_identifier_default] + local workspace_id = get_workspace_id() + + if workspace_id then + logger:send_statsd(string_format("%s.workspace.%s.status.%s", scope_name, + workspace_id, message.response.status), + 1, logger.stat_types.counter, + metric_config.sample_rate) + end + end, + status_count_per_user_per_route = function (_, message, metric_config, logger, conf) + local get_consumer_id = get_consumer_id[metric_config.consumer_identifier or conf.consumer_identifier_default] + local consumer_id = get_consumer_id(message.consumer) + if not consumer_id then + return + end + + local route = message.route + + if route.id then + logger:send_statsd(string_format("route.%s.user.%s.status.%s", route.id, + consumer_id, message.response.status), + 1, logger.stat_types.counter, + metric_config.sample_rate) + end + end, +} + +-- add shdict metrics +if ngx.config.ngx_lua_version >= 10011 then + metrics.shdict_usage = function (_, message, metric_config, logger) + -- we don't need this for every request, send every 1 minute + -- also only one worker needs to send this because it's shared + if worker_id ~= 0 then + return + end + + local now = ngx_time() + if shdict_metrics_last_sent + SHDICT_METRICS_SEND_THRESHOLD < now then + shdict_metrics_last_sent = now + for shdict_name, shdict in pairs(ngx.shared) do + logger:send_statsd(string_format("node.%s.shdict.%s.free_space", + hostname, shdict_name), + shdict:free_space(), logger.stat_types.gauge, + metric_config.sample_rate) + logger:send_statsd(string_format("node.%s.shdict.%s.capacity", + hostname, shdict_name), + shdict:capacity(), logger.stat_types.gauge, + metric_config.sample_rate) + end + end + end +end + +local function get_scope_name(conf, message, service_identifier) + local api = message.api + local service = message.service + local scope_name + + if service then + scope_name = "service." + -- don't fail on ce schema where service_identifier is not defined + if not service_identifier then + service_identifier = "service_name_or_host" + end + + local service_name = get_service_id[service_identifier](service) + if not service_name or service_name == null then + scope_name = scope_name .. "unnamed" + else + scope_name = scope_name .. re_gsub(service_name, [[\.]], "_", "oj") + end + elseif api then + scope_name = "api." + + if not api or api == null then + scope_name = scope_name .. "unnamed" + else + scope_name = scope_name .. re_gsub(api.name, [[\.]], "_", "oj") + end + else + -- TODO: this follows the pattern used by + -- https://github.com/Kong/kong/pull/2702 (which prevents an error from + -- being thrown and avoids confusing reports as per our metrics keys), but + -- as it stands, hides traffic from monitoring tools when the plugin is + -- configured globally. In fact, this basically disables this plugin when + -- it is configured to run globally, or per-consumer without an + -- API/Route/Service. + + -- Changes in statsd-advanced: we still log these requests, but into a namespace of + -- "global.unmatched". + -- And we don't send upstream_latency and metrics with consumer or route + scope_name = "global.unmatched" + end + + return scope_name +end + +local function log(premature, conf, message) + if premature then + return + end + + local stat_name = { + request_size = "request.size", + response_size = "response.size", + latency = "latency", + upstream_latency = "upstream_latency", + kong_latency = "kong_latency", + request_count = "request.count", + } + local stat_value = { + request_size = message.request.size, + response_size = message.response.size, + latency = message.latencies.request, + upstream_latency = message.latencies.proxy, + kong_latency = message.latencies.kong, + request_count = 1, + } + + local logger, err = statsd_logger:new(conf) + if err then + kong.log.err("failed to create Statsd logger: ", err) + return + end + + for _, metric_config in pairs(conf.metrics) do + local metric_config_name = metric_config.name + local metric = metrics[metric_config_name] + + local name = get_scope_name(conf, message, metric_config.service_identifier or conf.service_identifier_default) + + if metric then + metric(name, message, metric_config, logger, conf) + + else + local stat_name = stat_name[metric_config_name] + local stat_value = stat_value[metric_config_name] + + if stat_value ~= nil and stat_value ~= -1 then + logger:send_statsd(name .. "." .. stat_name, stat_value, + logger.stat_types[metric_config.stat_type], + metric_config.sample_rate) + end + end + end + + logger:close_socket() +end + + + +function _M.execute(conf) + if not is_in_range(conf.allow_status_codes, ngx.status) then + return + end + + kong.log.debug("Status code is within given status code ranges") + + if not worker_id then + worker_id = ngx.worker.id() + end + + conf._prefix = conf.prefix + + if conf.hostname_in_prefix then + conf._prefix = conf._prefix .. ".node." .. hostname + end + + local message = kong.log.serialize({ngx = ngx, kong = kong, }) + message.cache_metrics = ngx.ctx.cache_metrics + + local ok, err = ngx_timer_at(0, log, conf, message) + if not ok then + kong.log.err("failed to create timer: ", err) + end + +end + +-- only for test +_M.is_in_range = is_in_range + +return _M diff --git a/kong/plugins/statsd/schema.lua b/kong/plugins/statsd/schema.lua index 9ef94520c97e..09070e9dace4 100644 --- a/kong/plugins/statsd/schema.lua +++ b/kong/plugins/statsd/schema.lua @@ -1,10 +1,13 @@ local typedefs = require "kong.db.schema.typedefs" +local constants = require "kong.plugins.statsd.constants" local METRIC_NAMES = { "kong_latency", "latency", "request_count", "request_per_user", "request_size", "response_size", "status_count", "status_count_per_user", "unique_users", "upstream_latency", + "status_count_per_workspace", "status_count_per_user_per_route", + "shdict_usage", } @@ -17,58 +20,120 @@ local CONSUMER_IDENTIFIERS = { "consumer_id", "custom_id", "username", } +local SERVICE_IDENTIFIERS = { + "service_id", "service_name", "service_host", "service_name_or_host", +} + +local WORKSPACE_IDENTIFIERS = { + "workspace_id", "workspace_name", +} + local DEFAULT_METRICS = { { - name = "request_count", - stat_type = "counter", - sample_rate = 1, + name = "request_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = nil, }, { - name = "latency", - stat_type = "timer", + name = "latency", + stat_type = "timer", + service_identifier = nil, }, { - name = "request_size", - stat_type = "timer", + name = "request_size", + stat_type = "timer", + service_identifier = nil, }, { - name = "status_count", - stat_type = "counter", - sample_rate = 1, + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = nil, }, { - name = "response_size", - stat_type = "timer" + name = "response_size", + stat_type = "timer", + service_identifier = nil, }, { name = "unique_users", stat_type = "set", - consumer_identifier = "custom_id", + consumer_identifier = nil, + service_identifier = nil, }, { - name = "request_per_user", - stat_type = "counter", - sample_rate = 1, - consumer_identifier = "custom_id", + name = "request_per_user", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = nil, + service_identifier = nil, }, { - name = "upstream_latency", - stat_type = "timer", + name = "upstream_latency", + stat_type = "timer", + service_identifier = nil, }, { - name = "kong_latency", - stat_type = "timer", + name = "kong_latency", + stat_type = "timer", + service_identifier = nil, }, { name = "status_count_per_user", stat_type = "counter", sample_rate = 1, - consumer_identifier = "custom_id", + consumer_identifier = nil, + service_identifier = nil, + }, + { + name = "status_count_per_workspace", + stat_type = "counter", + sample_rate = 1, + workspace_identifier = nil, + }, + { + name = "status_count_per_user_per_route", + stat_type = "counter", + sample_rate = 1, + consumer_identifier = nil, + service_identifier = nil, + }, + { + name = "shdict_usage", + stat_type = "gauge", + sample_rate = 1, + service_identifier = nil, }, } +local MUST_TYPE = {} + +local MUST_IDENTIFIER = {} + +for _, metric in ipairs(DEFAULT_METRICS) do + local typ = metric.stat_type + if typ == "counter" or typ == "set" or typ == "gauge" then + if not MUST_TYPE[typ] then + MUST_TYPE[typ] = { metric.name } + else + MUST_TYPE[typ][#MUST_TYPE[typ]+1] = metric.name + end + end + + for _, id in ipairs({ "service", "consumer", "workspace"}) do + if metric[id .. "_identifier"] then + if not MUST_IDENTIFIER[id] then + MUST_IDENTIFIER[id] = { metric.name } + else + MUST_IDENTIFIER[id][#MUST_IDENTIFIER[id]+1] = metric.name + end + end + end +end + return { name = "statsd", fields = { @@ -89,39 +154,52 @@ return { { stat_type = { type = "string", required = true, one_of = STAT_TYPES }, }, { sample_rate = { type = "number", gt = 0 }, }, { consumer_identifier = { type = "string", one_of = CONSUMER_IDENTIFIERS }, }, + { service_identifier = { type = "string", one_of = SERVICE_IDENTIFIERS }, }, + { workspace_identifier = { type = "string", one_of = WORKSPACE_IDENTIFIERS }, }, }, entity_checks = { { conditional = { - if_field = "name", - if_match = { eq = "unique_users" }, - then_field = "stat_type", - then_match = { eq = "set" }, + if_field = "name", + if_match = { one_of = MUST_TYPE["set"] }, + then_field = "stat_type", + then_match = { eq = "set" }, }, }, - { conditional = { - if_field = "stat_type", - if_match = { one_of = { "counter", "gauge" }, }, - then_field = "sample_rate", - then_match = { required = true }, + if_field = "name", + if_match = { one_of = MUST_TYPE["counter"] }, + then_field = "stat_type", + then_match = { eq = "counter" }, }, }, - { conditional = { - if_field = "name", - if_match = { one_of = { "status_count_per_user", "request_per_user", "unique_users" }, }, - then_field = "consumer_identifier", - then_match = { required = true }, + if_field = "name", + if_match = { one_of = MUST_TYPE["gauge"] }, + then_field = "stat_type", + then_match = { eq = "gauge" }, }, }, - { conditional = { - if_field = "name", - if_match = { one_of = { "status_count", "status_count_per_user", "request_per_user" }, }, - then_field = "stat_type", - then_match = { eq = "counter" }, + if_field = "stat_type", + if_match = { one_of = { "counter", "gauge" }, }, + then_field = "sample_rate", + then_match = { required = true }, }, }, }, }, + }, }, + { allow_status_codes = { + type = "array", + elements = { + type = "string", + match = constants.REGEX_STATUS_CODE_RANGE, }, - }, + }, }, + -- combine udp packet up to this value, don't combine if it's 0 + -- 65,507 bytes (65,535 − 8 byte UDP header − 20 byte IP header) -- Wikipedia + { udp_packet_size = { type = "number", between = {0, 65507}, default = 0 }, }, + { use_tcp = { type = "boolean", default = false }, }, + { hostname_in_prefix = { type = "boolean", default = false }, }, + { consumer_identifier_default = { type = "string", required = true, default = "custom_id", one_of = CONSUMER_IDENTIFIERS }, }, + { service_identifier_default = { type = "string", required = true, default = "service_name_or_host", one_of = SERVICE_IDENTIFIERS }, }, + { workspace_identifier_default = { type = "string", required = true, default = "workspace_id", one_of = WORKSPACE_IDENTIFIERS }, }, }, }, }, diff --git a/kong/plugins/statsd/statsd_logger.lua b/kong/plugins/statsd/statsd_logger.lua index 496d867a2cbb..2a32e32269ae 100644 --- a/kong/plugins/statsd/statsd_logger.lua +++ b/kong/plugins/statsd/statsd_logger.lua @@ -1,9 +1,17 @@ -local kong = kong -local udp = ngx.socket.udp -local setmetatable = setmetatable -local tostring = tostring -local fmt = string.format - +local ngx_socket_udp = ngx.socket.udp +local ngx_socket_tcp = ngx.socket.tcp +local ngx_log = ngx.log +local NGX_ERR = ngx.ERR +local NGX_WARN = ngx.WARN +local NGX_DEBUG = ngx.DEBUG +local setmetatable = setmetatable +local tostring = tostring +local fmt = string.format +local table_concat = table.concat +local new_tab = require "table.new" +local clear_tab = require "table.clear" + +local DEFAULT_METRICS_COUNT = 11 local stat_types = { gauge = "g", @@ -30,44 +38,113 @@ statsd_mt.__index = statsd_mt function statsd_mt:new(conf) - local sock = udp() - local _, err = sock:setpeername(conf.host, conf.port) + local sock, err, _ + if conf.use_tcp then + sock = ngx_socket_tcp() + sock:settimeout(1000) + _, err = sock:connect(conf.host, conf.port) + else + sock = ngx_socket_udp() + _, err = sock:setpeername(conf.host, conf.port) + end + if err then return nil, fmt("failed to connect to %s:%s: %s", conf.host, - tostring(conf.port), err) + tostring(conf.port), err) end local statsd = { host = conf.host, port = conf.port, - prefix = conf.prefix, + prefix = conf._prefix, socket = sock, stat_types = stat_types, + udp_packet_size = conf.udp_packet_size, + use_tcp = conf.use_tcp, + udp_buffer = new_tab(DEFAULT_METRICS_COUNT, 0), + udp_buffer_cnt = 0, + udp_buffer_size = 0, } return setmetatable(statsd, statsd_mt) end function statsd_mt:close_socket() - local ok, err = self.socket:close() - if not ok then - kong.log.err("failed to close connection from ", self.host, ":", - tostring(self.port), ": ", err) - return + if self.use_tcp then + self.socket:setkeepalive() + else + -- send the buffered msg + if self.udp_packet_size > 0 and self.udp_buffer_size > 0 then + local message = table_concat(self.udp_buffer, "\n") + ngx_log(NGX_DEBUG, "[statsd] sending last data to statsd server: ", message) + local ok, err = self.socket:send(message) + if not ok then + ngx_log(NGX_ERR, fmt("[statsd] failed to send last data to %s:%s: %s", self.host, + tostring(self.port), err)) + end + end + + local ok, err = self.socket:close() + if not ok then + ngx_log(NGX_ERR, fmt("[statsd] failed to close connection from %s:%s: %s", self.host, + tostring(self.port), err)) + return + end end end function statsd_mt:send_statsd(stat, delta, kind, sample_rate) - local udp_message = create_statsd_message(self.prefix or "kong", stat, + local message = create_statsd_message(self.prefix or "kong", stat, delta, kind, sample_rate) - kong.log.debug("sending data to statsd server: %s", udp_message) + -- if buffer-and-send is enabled + if not self.use_tcp and self.udp_packet_size > 0 then + local message_size = #message + local new_size = self.udp_buffer_size + message_size + -- if we exceeded the configured pkt_size + if new_size > self.udp_packet_size then + local truncated = false + if self.udp_buffer_size == 0 then + truncated = true + ngx_log(NGX_WARN, + "[statsd] configured udp_packet_size is smaller than single message of length ", + message_size, + ", UDP packet may be truncated") + end + local current_message = message + message = table_concat(self.udp_buffer, "\n") + clear_tab(self.udp_buffer) + self.udp_buffer_cnt = 1 + self.udp_buffer[1] = current_message + self.udp_buffer_size = message_size + if truncated then + -- current message is buffered and will be sent in next call + return + end + else -- if not, buffer the message + local new_buffer_cnt = self.udp_buffer_cnt + 1 + self.udp_buffer_cnt = new_buffer_cnt + self.udp_buffer[new_buffer_cnt] = message + -- add length of \n + self.udp_buffer_size = new_size + 1 + return + end + + end + + ngx_log(NGX_DEBUG, "[statsd] sending data to statsd server: ", message) + + local ok, err = self.socket:send(message) + + -- send the seperator for multi metrics + if self.use_tcp and ok then + ok, err = self.socket:send("\n") + end - local ok, err = self.socket:send(udp_message) if not ok then - kong.log.err("failed to send data to ", self.host, ":", - tostring(self.port), ": ", err) + ngx_log(NGX_ERR, fmt("[statsd] failed to send data to %s:%s: %s", self.host, + tostring(self.port), err)) end end diff --git a/spec/03-plugins/06-statsd/01-log_spec.lua b/spec/03-plugins/06-statsd/01-log_spec.lua index e428c134eca9..b2f7b628a06e 100644 --- a/spec/03-plugins/06-statsd/01-log_spec.lua +++ b/spec/03-plugins/06-statsd/01-log_spec.lua @@ -1,16 +1,38 @@ -local helpers = require "spec.helpers" +local helpers = require "spec.helpers" +local pl_file = require "pl.file" + +local get_hostname = require("kong.pdk.node").new().get_hostname local fmt = string.format local UDP_PORT = 20000 +local TCP_PORT = 20001 + + +local uuid_pattern = "%x%x%x%x%x%x%x%x%-%x%x%x%x%-4%x%x%x%-%x%x%x%x%-%x%x%x%x%x%x%x%x%x%x%x%x" +local workspace_name_pattern = "default" + + +local function get_shdicts() + local prefix = helpers.test_conf.prefix + local ngxconf = helpers.utils.readfile(prefix .. "/nginx.conf") + local pattern = "\n%s*lua_shared_dict%s+(.-)[%s;\n]" + local shdicts = {} + for dict_name in ngxconf:gmatch(pattern) do + table.insert(shdicts, dict_name) + --print(#shdicts, "-", dict_name) + end + return shdicts +end for _, strategy in helpers.each_strategy() do describe("Plugin: statsd (log) [#" .. strategy .. "]", function() local proxy_client local proxy_client_grpc + local shdict_count lazy_setup(function() local bp = helpers.get_db_utils(strategy, { @@ -32,7 +54,7 @@ for _, strategy in helpers.each_strategy() do } local routes = {} - for i = 1, 13 do + for i = 1, 30 do local service = bp.services:insert { protocol = helpers.mock_upstream_protocol, host = helpers.mock_upstream_host, @@ -46,7 +68,6 @@ for _, strategy in helpers.each_strategy() do end bp.key_auth_plugins:insert { route = { id = routes[1].id } } - bp.statsd_plugins:insert { route = { id = routes[1].id }, config = { @@ -54,7 +75,6 @@ for _, strategy in helpers.each_strategy() do port = UDP_PORT, }, } - bp.statsd_plugins:insert { route = { id = routes[2].id }, config = { @@ -236,6 +256,333 @@ for _, strategy in helpers.each_strategy() do }, } + bp.key_auth_plugins:insert { route = { id = routes[14].id } } + + bp.statsd_plugins:insert { + route = { id = routes[14].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "unique_users", + stat_type = "set", + consumer_identifier = "consumer_id", + } + }, + }, + } + + bp.key_auth_plugins:insert { route = { id = routes[15].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[15].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "status_count_per_user_per_route", + stat_type = "counter", + consumer_identifier = "username", + sample_rate = 1, + } + }, + }, + } + + bp.plugins:insert { + name = "statsd", + route = { id = routes[16].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "status_count_per_workspace", + stat_type = "counter", + sample_rate = 1, + workspace_identifier = "workspace_id", + } + }, + }, + } + + bp.plugins:insert { + name = "statsd", + route = { id = routes[17].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "status_count_per_workspace", + stat_type = "counter", + sample_rate = 1, + workspace_identifier = "workspace_name", + } + }, + }, + } + + bp.key_auth_plugins:insert { route = { id = routes[18].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[18].id }, + config = { + host = "127.0.0.1", + port = TCP_PORT, + use_tcp = true, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + } + }, + } + } + + bp.key_auth_plugins:insert { route = { id = routes[19].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[19].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + }, + { + name = "upstream_latency", + stat_type = "timer", + }, + { + name = "kong_latency", + stat_type = "timer", + } + }, + udp_packet_size = 500, + } + } + + bp.key_auth_plugins:insert { route = { id = routes[20].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[20].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + }, + { + name = "upstream_latency", + stat_type = "timer", + }, + { + name = "kong_latency", + stat_type = "timer", + } + }, + udp_packet_size = 100, + } + } + + bp.key_auth_plugins:insert { route = { id = routes[21].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[21].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + }, + { + name = "upstream_latency", + stat_type = "timer", + }, + { + name = "kong_latency", + stat_type = "timer", + } + }, + udp_packet_size = 1, + } + } + + bp.key_auth_plugins:insert { route = { id = routes[22].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[22].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + -- test two types of metrics that are processed in different way + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_id", + }, + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_id", + } + }, + }, + } + + bp.key_auth_plugins:insert { route = { id = routes[23].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[23].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name", + }, + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name", + } + }, + }, + } + + bp.key_auth_plugins:insert { route = { id = routes[24].id } } + bp.plugins:insert { + name = "statsd", + route = { id = routes[24].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_host", + }, + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_host", + } + }, + }, + } + + for i = 100, 102 do + local service = bp.services:insert { + protocol = helpers.mock_upstream_protocol, + host = helpers.mock_upstream_host, + port = helpers.mock_upstream_port, + } + routes[i] = bp.routes:insert { + hosts = { fmt("logging%d.com", i) }, + service = service + } + end + + bp.key_auth_plugins:insert { route = { id = routes[100].id } } + + bp.plugins:insert { + name = "statsd", + route = { id = routes[100].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name_or_host", + }, + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name_or_host", + } + }, + }, + } + + bp.key_auth_plugins:insert { route = { id = routes[101].id } } + + bp.plugins:insert { + name = "statsd", + route = { id = routes[101].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name", + }, + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name", + } + }, + }, + } + + + bp.key_auth_plugins:insert { route = { id = routes[102].id } } + + bp.plugins:insert { + name = "statsd", + route = { id = routes[102].id }, + config = { + host = "127.0.0.1", + port = UDP_PORT, + metrics = { + { + name = "request_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name", + }, + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_name", + } + }, + hostname_in_prefix = true, + }, + } + -- grpc local grpc_routes = {} for i = 1, 2 do @@ -279,6 +626,7 @@ for _, strategy in helpers.each_strategy() do proxy_client = helpers.proxy_client() proxy_client_grpc = helpers.proxy_client_grpc() + shdict_count = #get_shdicts() end) lazy_teardown(function() @@ -291,7 +639,11 @@ for _, strategy in helpers.each_strategy() do describe("metrics", function() it("logs over UDP with default metrics", function() - local thread = helpers.udp_server(UDP_PORT, 12) + local metrics_count = 12 + -- shdict_usage metrics + metrics_count = metrics_count + shdict_count * 2 + + local thread = helpers.udp_server(UDP_PORT, metrics_count, 2) local response = assert(proxy_client:send { method = "GET", path = "/request?apikey=kong", @@ -301,25 +653,34 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, response) - local ok, metrics = thread:join() - assert.True(ok) - assert.contains("kong.statsd1.request.count:1|c", metrics) - assert.contains("kong.statsd1.latency:%d+|ms", metrics, true) - assert.contains("kong.statsd1.request.size:%d+|ms", metrics, true) - assert.contains("kong.statsd1.request.status.200:1|c", metrics) - assert.contains("kong.statsd1.request.status.total:1|c", metrics) - assert.contains("kong.statsd1.response.size:%d+|ms", metrics, true) - assert.contains("kong.statsd1.upstream_latency:%d*|ms", metrics, true) - assert.contains("kong.statsd1.kong_latency:%d*|ms", metrics, true) - assert.contains("kong.statsd1.user.uniques:robert|s", metrics) - assert.contains("kong.statsd1.user.robert.request.count:1|c", metrics) - assert.contains("kong.statsd1.user.robert.request.status.total:1|c", - metrics) - assert.contains("kong.statsd1.user.robert.request.status.200:1|c", - metrics) + local ok, metrics, err = thread:join() + assert(ok, metrics) + assert(#metrics == metrics_count, err) + assert.contains("kong.service.statsd1.request.count:1|c", metrics) + assert.contains("kong.service.statsd1.request.size:%d+|ms", metrics, true) + assert.contains("kong.service.statsd1.response.size:%d+|ms", metrics, true) + assert.contains("kong.service.statsd1.latency:%d+|ms", metrics, true) + assert.contains("kong.service.statsd1.status.200:1|c", metrics) + assert.contains("kong.service.statsd1.upstream_latency:%d*|ms", metrics, true) + assert.contains("kong.service.statsd1.kong_latency:%d*|ms", metrics, true) + assert.contains("kong.service.statsd1.user.uniques:robert|s", metrics) + assert.contains("kong.service.statsd1.user.robert.request.count:1|c", metrics) + assert.contains("kong.service.statsd1.user.robert.status.200:1|c", metrics) + + assert.contains("kong.service.statsd1.workspace." .. uuid_pattern .. ".status.200:1|c", metrics, true) + assert.contains("kong.route." .. uuid_pattern .. ".user.robert.status.200:1|c", metrics, true) + + -- shdict_usage metrics, just test one is enough + assert.contains("kong.node..*.shdict.kong.capacity:%d+|g", metrics, true) + assert.contains("kong.node..*.shdict.kong.free_space:%d+|g", metrics, true) end) it("logs over UDP with default metrics and new prefix", function() - local thread = helpers.udp_server(UDP_PORT, 12) + local metrics_count = 12 + -- shdict_usage metrics, can't test again in 1 minutes + -- metrics_count = metrics_count + shdict_count * 2 + + + local thread = helpers.udp_server(UDP_PORT, metrics_count, 2) local response = assert(proxy_client:send { method = "GET", path = "/request?apikey=kong", @@ -328,25 +689,26 @@ for _, strategy in helpers.each_strategy() do } }) assert.res_status(200, response) - local ok, metrics = thread:join() - assert.True(ok) - assert.contains("prefix.statsd13.request.count:1|c", metrics) - assert.contains("prefix.statsd13.latency:%d+|ms", metrics, true) - assert.contains("prefix.statsd13.request.size:%d*|ms", metrics, true) - assert.contains("prefix.statsd13.request.status.200:1|c", metrics) - assert.contains("prefix.statsd13.request.status.total:1|c", metrics) - assert.contains("prefix.statsd13.response.size:%d+|ms", metrics, true) - assert.contains("prefix.statsd13.upstream_latency:%d*|ms", metrics, true) - assert.contains("prefix.statsd13.kong_latency:%d*|ms", metrics, true) - assert.contains("prefix.statsd13.user.uniques:robert|s", metrics) - assert.contains("prefix.statsd13.user.robert.request.count:1|c", metrics) - assert.contains("prefix.statsd13.user.robert.request.status.total:1|c", - metrics) - assert.contains("prefix.statsd13.user.robert.request.status.200:1|c", - metrics) + local ok, metrics, err = thread:join() + assert(ok, metrics) + assert(#metrics == metrics_count, err) + assert.contains("prefix.service.statsd13.request.count:1|c", metrics) + assert.contains("prefix.service.statsd13.latency:%d+|ms", metrics, true) + assert.contains("prefix.service.statsd13.request.size:%d+|ms", metrics, true) + assert.contains("prefix.service.statsd13.status.200:1|c", metrics) + assert.contains("prefix.service.statsd13.response.size:%d+|ms", metrics, true) + assert.contains("prefix.service.statsd13.upstream_latency:%d*|ms", metrics, true) + assert.contains("prefix.service.statsd13.kong_latency:%d*|ms", metrics, true) + assert.contains("prefix.service.statsd13.user.uniques:robert|s", metrics) + assert.contains("prefix.service.statsd13.user.robert.request.count:1|c", metrics) + assert.contains("prefix.service.statsd13.user.robert.status.200:1|c", metrics) + + assert.contains("prefix.service.statsd13.workspace." .. uuid_pattern .. ".status.200:1|c", + metrics, true) + assert.contains("prefix.route." .. uuid_pattern .. ".user.robert.status.200:1|c", metrics, true) end) it("request_count", function() - local thread = helpers.udp_server(UDP_PORT) + local thread = helpers.udp_server(UDP_PORT, 1, 2) local response = assert(proxy_client:send { method = "GET", path = "/request", @@ -356,12 +718,13 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, response) - local ok, res = thread:join() - assert.True(ok) - assert.equal("kong.statsd5.request.count:1|c", res) + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + assert.equal("kong.service.statsd5.request.count:1|c", res) end) it("status_count", function() - local thread = helpers.udp_server(UDP_PORT, 2) + local thread = helpers.udp_server(UDP_PORT, 2,2) local response = assert(proxy_client:send { method = "GET", path = "/request", @@ -373,8 +736,7 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.contains("kong.statsd3.request.status.200:1|c", res) - assert.contains("kong.statsd3.request.status.total:1|c", res) + assert.contains("kong.service.statsd3.status.200:1|c", res) end) it("request_size", function() local thread = helpers.udp_server(UDP_PORT) @@ -389,7 +751,7 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong.statsd4.request.size:%d+|ms", res) + assert.matches("kong.service.statsd4.request.size:%d+|ms", res) end) it("latency", function() local thread = helpers.udp_server(UDP_PORT) @@ -404,7 +766,7 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong.statsd2.latency:.*|ms", res) + assert.matches("kong.service.statsd2.latency:.*|ms", res) end) it("response_size", function() local thread = helpers.udp_server(UDP_PORT) @@ -419,7 +781,7 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong.statsd6.response.size:%d+|ms", res) + assert.matches("kong.service.statsd6.response.size:%d+|ms", res) end) it("upstream_latency", function() local thread = helpers.udp_server(UDP_PORT) @@ -434,7 +796,7 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong.statsd7.upstream_latency:.*|ms", res) + assert.matches("kong.service.statsd7.upstream_latency:.*|ms", res) end) it("kong_latency", function() local thread = helpers.udp_server(UDP_PORT) @@ -449,7 +811,7 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong.statsd8.kong_latency:.*|ms", res) + assert.matches("kong.service.statsd8.kong_latency:.*|ms", res) end) it("unique_users", function() local thread = helpers.udp_server(UDP_PORT) @@ -464,10 +826,10 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong.statsd9.user.uniques:robert|s", res) + assert.matches("kong.service.statsd9.user.uniques:robert|s", res) end) it("status_count_per_user", function() - local thread = helpers.udp_server(UDP_PORT, 2) + local thread = helpers.udp_server(UDP_PORT, 2, 2) local response = assert(proxy_client:send { method = "GET", path = "/request?apikey=kong", @@ -477,13 +839,13 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, response) - local ok, res = thread:join() - assert.True(ok) - assert.contains("kong.statsd10.user.robert.request.status.200:1|c", res) - assert.contains("kong.statsd10.user.robert.request.status.total:1|c", res) + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + assert.contains("kong.service.statsd10.user.robert.status.200:1|c", res) end) it("request_per_user", function() - local thread = helpers.udp_server(UDP_PORT) + local thread = helpers.udp_server(UDP_PORT, 1, 2) local response = assert(proxy_client:send { method = "GET", path = "/request?apikey=kong", @@ -493,9 +855,10 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, response) - local ok, res = thread:join() - assert.True(ok) - assert.matches("kong.statsd11.user.bob.request.count:1|c", res) + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + assert.matches("kong.service.statsd11.user.bob.request.count:1|c", res) end) it("latency as gauge", function() local thread = helpers.udp_server(UDP_PORT) @@ -510,9 +873,271 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong%.statsd12%.latency:%d+|g", res) + assert.matches("kong%.service.statsd12.latency:%d+|g", res) + end) + it("consumer by consumer_id", function() + local thread = helpers.udp_server(UDP_PORT, 1, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging14.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + assert.matches("^kong.service.statsd14.user.uniques:" .. uuid_pattern .. "|s", res) + end) + it("status_count_per_user_per_route", function() + local thread = helpers.udp_server(UDP_PORT, 1, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging15.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + assert.matches("kong.route." .. uuid_pattern .. ".user.bob.status.200:1|c", res) + end) + it("status_count_per_workspace", function() + local thread = helpers.udp_server(UDP_PORT, 1, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging16.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + assert.matches("kong.service.statsd16.workspace." .. uuid_pattern .. ".status.200:1|c", res) + end) + it("status_count_per_workspace", function() + local thread = helpers.udp_server(UDP_PORT, 1, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging17.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + assert.matches("kong.service.statsd17.workspace." .. workspace_name_pattern .. ".status.200:1|c", res) + end) + it("logs over TCP with one metric", function() + local thread = helpers.tcp_server(TCP_PORT, { timeout = 10 }) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging18.com" + } + }) + assert.res_status(200, response) + + local ok, metrics = thread:join() + + assert.True(ok) + assert.matches("kong.service.statsd18.request.count:1|c", metrics) + end) + it("combines udp packets", function() + local thread = helpers.udp_server(UDP_PORT, 1, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging19.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(res, err) + -- doesn't has single of metrics packet + assert.not_matches("^kong.service.statsd19.request.count:%d+|c$", res) + assert.not_matches("^kong.service.statsd19.upstream_latency:%d+|ms$", res) + assert.not_matches("^kong.service.statsd19.kong_latency:%d+|ms$", res) + -- has a combined multi-metrics packet + assert.matches("^kong.service.statsd19.request.count:%d+|c\n" .. + "kong.service.statsd19.upstream_latency:%d+|ms\n" .. + "kong.service.statsd19.kong_latency:%d+|ms$", res) + end) + it("combines and splits udp packets", function() + local thread = helpers.udp_server(UDP_PORT, 2, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging20.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(#res == 2, err) + -- doesn't contain single of metrics packet + assert.not_contains("^kong.service.statsd20.request.count:%d+|c$", res, true) + assert.not_contains("^kong.service.statsd20.upstream_latency:%d+|ms$", res, true) + -- doesn't contain multi-metrics packet with all three metrics + assert.not_contains("^kong.service.stats20.request.count:%d+|c\n" .. + "kong.service.statsd20.upstream_latency:%d+|ms\n" .. + "kong.service.statsd20.kong_latency:%d+|ms$", res) + -- has a combined multi-metrics packet with up to 100 bytes + assert.contains("^kong.service.statsd20.request.count:%d+|c\n" .. "kong.service.statsd20.upstream_latency:%d+|ms$", res, true) + assert.contains("^kong.service.statsd20.kong_latency:%d+|ms$", res, true) + end) + it("throws an error if udp_packet_size is too small", function() + local thread = helpers.udp_server(UDP_PORT, 3, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging21.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(#res == 3, err) + + assert.contains("^kong.service.statsd21.request.count:%d+|c$", res ,true) + assert.contains("^kong.service.statsd21.upstream_latency:%d+|ms$", res, true) + assert.contains("^kong.service.statsd21.kong_latency:%d+|ms$", res, true) + + local err_log = pl_file.read(helpers.test_conf.nginx_err_logs) + assert.matches("", err_log) + end) + it("logs service by service_id", function() + local thread = helpers.udp_server(UDP_PORT, 2, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging22.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(#res == 2, err) + assert.contains("^kong.service." .. uuid_pattern .. ".request.count:1|c$", res, true) + assert.contains("^kong.service." .. uuid_pattern .. ".status.200:1|c$", res, true) + end) + it("logs service by service_host", function() + local thread = helpers.udp_server(UDP_PORT, 2, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging23.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(#res == 2, err) + assert.contains("^kong.service.statsd23.request.count:1|c$", res, true) + assert.contains("^kong.service.statsd23.status.200:1|c$", res, true) + end) + it("logs service by service_name", function() + local thread = helpers.udp_server(UDP_PORT, 2, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging24.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(#res == 2, err) + assert.contains("^kong.service." .. string.gsub(helpers.mock_upstream_host, "%.", "_") .. + ".request.count:1|c$", res, true) + assert.contains("^kong.service." .. string.gsub(helpers.mock_upstream_host, "%.", "_") .. + ".status.200:1|c$", res, true) + end) + it("logs service by service_name_or_host falls back to service host when service name is not set", function() + local thread = helpers.udp_server(UDP_PORT, 2, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging100.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(#res == 2, err) + assert.contains("^kong.service." .. string.gsub(helpers.mock_upstream_host, "%.", "_") .. + ".request.count:1|c$", res, true) + assert.contains("^kong.service." .. string.gsub(helpers.mock_upstream_host, "%.", "_") .. + ".status.200:1|c$", res, true) + end) + it("logs service by service_name emits unnamed if service name is not set", function() + local thread = helpers.udp_server(UDP_PORT, 2, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging101.com" + } + }) + assert.res_status(200, response) + + local ok, res, err = thread:join() + assert(ok, res) + assert(#res == 2, err) + assert.contains("^kong.service.unnamed.request.count:1|c$", res, true) + assert.contains("^kong.service.unnamed.status.200:1|c$", res, true) end) end) + + describe("hostname_in_prefix", function() + it("prefixes metric names with the hostname", function() + local hostname = get_hostname() + hostname = string.gsub(hostname, "%.", "_") + + local thread = helpers.udp_server(UDP_PORT, 1, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging102.com" + } + }) + assert.res_status(200, response) + + local ok, metrics, err = thread:join() + assert(ok, metrics) + assert(metrics, err) + assert.matches("kong.node." .. hostname .. ".service.unnamed.request.count:1|c", metrics, nil, true) + end) + end) + describe("metrics #grpc", function() it("logs over UDP with default metrics", function() local thread = helpers.udp_server(UDP_PORT, 8) @@ -531,14 +1156,13 @@ for _, strategy in helpers.each_strategy() do local ok, metrics = thread:join() assert.True(ok) - assert.contains("kong.grpc_statsd1.request.count:1|c", metrics) - assert.contains("kong.grpc_statsd1.latency:%d+|ms", metrics, true) - assert.contains("kong.grpc_statsd1.request.size:%d+|ms", metrics, true) - assert.contains("kong.grpc_statsd1.request.status.200:1|c", metrics) - assert.contains("kong.grpc_statsd1.request.status.total:1|c", metrics) - assert.contains("kong.grpc_statsd1.response.size:%d+|ms", metrics, true) - assert.contains("kong.grpc_statsd1.upstream_latency:%d*|ms", metrics, true) - assert.contains("kong.grpc_statsd1.kong_latency:%d*|ms", metrics, true) + assert.contains("kong.service.grpc_statsd1.request.count:1|c", metrics) + assert.contains("kong.service.grpc_statsd1.latency:%d+|ms", metrics, true) + assert.contains("kong.service.grpc_statsd1.request.size:%d+|ms", metrics, true) + assert.contains("kong.service.grpc_statsd1.status.200:1|c", metrics) + assert.contains("kong.service.grpc_statsd1.response.size:%d+|ms", metrics, true) + assert.contains("kong.service.grpc_statsd1.upstream_latency:%d*|ms", metrics, true) + assert.contains("kong.service.grpc_statsd1.kong_latency:%d*|ms", metrics, true) end) it("latency as gauge", function() local thread = helpers.udp_server(UDP_PORT) @@ -557,7 +1181,92 @@ for _, strategy in helpers.each_strategy() do local ok, res = thread:join() assert.True(ok) - assert.matches("kong%.grpc_statsd2%.latency:%d+|g", res) + assert.matches("kong%.service%.grpc_statsd2%.latency:%d+|g", res) + end) + end) + end) + + describe("Plugin: statsd (log) [#" .. strategy .. "]", function() + local proxy_client + + setup(function() + local bp = helpers.get_db_utils(strategy) + + local consumer = bp.consumers:insert { + username = "bob", + custom_id = "robert", + } + + bp.keyauth_credentials:insert { + key = "kong", + consumer = { id = consumer.id }, + } + + bp.plugins:insert { name = "key-auth" } + + bp.plugins:insert { + name = "statsd", + config = { + host = "127.0.0.1", + port = UDP_PORT, + }, + } + + assert(helpers.start_kong({ + database = strategy, + nginx_conf = "spec/fixtures/custom_nginx.template", + })) + + proxy_client = helpers.proxy_client() + + end) + + teardown(function() + if proxy_client then + proxy_client:close() + end + + helpers.stop_kong() + end) + + describe("configures globally", function() + it("sends default metrics with global.matched namespace", function() + local metrics_count = 6 + -- should have no shdict_usage metrics + -- metrics_count = metrics_count + shdict_count * 2 + -- should have no vitals metrics + + local thread = helpers.udp_server(UDP_PORT, metrics_count, 2) + local response = assert(proxy_client:send { + method = "GET", + path = "/request?apikey=kong", + headers = { + host = "logging1.com" + } + }) + assert.res_status(404, response) + + local ok, metrics, err = thread:join() + assert(ok, metrics) + assert(#metrics == metrics_count, err) + assert.contains("kong.global.unmatched.request.count:1|c", metrics) + assert.contains("kong.global.unmatched.latency:%d+|ms", metrics, true) + assert.contains("kong.global.unmatched.request.size:%d+|ms", metrics, true) + assert.contains("kong.global.unmatched.status.404:1|c", metrics) + assert.contains("kong.global.unmatched.response.size:%d+|ms", metrics, true) + assert.not_contains("kong.global.unmatched.upstream_latency:%d*|ms", metrics, true) + assert.contains("kong.global.unmatched.kong_latency:%d+|ms", metrics, true) + assert.not_contains("kong.global.unmatched.user.uniques:robert|s", metrics) + assert.not_contains("kong.global.unmatched.user.robert.request.count:1|c", metrics) + assert.not_contains("kong.global.unmatched.user.robert.status.404:1|c", + metrics) + assert.not_contains("kong.global.unmatched.workspace." .. uuid_pattern .. ".status.200:1|c", + metrics, true) + assert.not_contains("kong.route." .. uuid_pattern .. ".user.robert.status.404:1|c", metrics, true) + + -- shdict_usage metrics, just test one is enough + assert.not_contains("kong.node..*.shdict.kong.capacity:%d+|g", metrics, true) + assert.not_contains("kong.node..*.shdict.kong.free_space:%d+|g", metrics, true) end) end) end) diff --git a/spec/03-plugins/06-statsd/02-schema_spec.lua b/spec/03-plugins/06-statsd/02-schema_spec.lua index c4bc369170ce..b244ede7f56d 100644 --- a/spec/03-plugins/06-statsd/02-schema_spec.lua +++ b/spec/03-plugins/06-statsd/02-schema_spec.lua @@ -1,44 +1,18 @@ -local PLUGIN_NAME = "statsd" - --- helper function to validate data against a schema -local validate do - local validate_entity = require("spec.helpers").validate_plugin_config_schema - local plugin_schema = require("kong.plugins."..PLUGIN_NAME..".schema") - - function validate(data) - return validate_entity(data, plugin_schema) - end -end - - -describe(PLUGIN_NAME .. ": (schema)", function() - local snapshot - - setup(function() - snapshot = assert:snapshot() - assert:set_parameter("TableFormatLevel", -1) - end) - - teardown(function() - snapshot:revert() - end) - +local statsd_schema = require "kong.plugins.statsd.schema" +local validate_entity = require("spec.helpers").validate_plugin_config_schema +describe("Plugin: statsd (schema)", function() it("accepts empty config", function() - local ok, err = validate({}) + local ok, err = validate_entity({}, statsd_schema) assert.is_nil(err) assert.is_truthy(ok) end) - - it("accepts empty metrics", function() local metrics_input = {} - local ok, err = validate({ metrics = metrics_input}) + local ok, err = validate_entity({ metrics = metrics_input}, statsd_schema) assert.is_nil(err) assert.is_truthy(ok) end) - - it("accepts just one metrics", function() local metrics_input = { { @@ -47,12 +21,10 @@ describe(PLUGIN_NAME .. ": (schema)", function() sample_rate = 1 } } - local ok, err = validate({ metrics = metrics_input}) + local ok, err = validate_entity({ metrics = metrics_input}, statsd_schema) assert.is_nil(err) assert.is_truthy(ok) end) - - it("rejects if name or stat not defined", function() local metrics_input = { { @@ -60,36 +32,19 @@ describe(PLUGIN_NAME .. ": (schema)", function() sample_rate = 1 } } - local _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - stat_type = 'field required for entity check' - } - } - } - }, err) - + local _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.equal("field required for entity check", err.config.metrics[1].stat_type) local metrics_input = { { stat_type = "counter", sample_rate = 1 } } - _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - name = 'field required for entity check' - } - } - } - }, err) + _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.equal("field required for entity check", err.config.metrics[1].name) end) - - it("rejects counters without sample rate", function() local metrics_input = { { @@ -97,43 +52,21 @@ describe(PLUGIN_NAME .. ": (schema)", function() stat_type = "counter", } } - local _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - ["@entity"] = { - [1] = "failed conditional validation given value of field 'stat_type'" - }, - sample_rate = 'required field missing' - } - } - } - }, err) + local _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.equal("required field missing", err.config.metrics[1].sample_rate) end) - - it("rejects invalid metrics name", function() local metrics_input = { { name = "invalid_name", stat_type = "counter", - sample_rate = 1, } } - local _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - name = 'expected one of: kong_latency, latency, request_count, request_per_user, request_size, response_size, status_count, status_count_per_user, unique_users, upstream_latency' - } - } - } - }, err) + local _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.match("expected one of:.+", err.config.metrics[1].name) end) - - it("rejects invalid stat type", function() local metrics_input = { { @@ -141,43 +74,74 @@ describe(PLUGIN_NAME .. ": (schema)", function() stat_type = "invalid_stat", } } - local _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - stat_type = 'expected one of: counter, gauge, histogram, meter, set, timer' - } - } + local _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.equal("value must be counter", err.config.metrics[1].stat_type) + end) + it("rejects invalid service identifier", function() + local metrics_input = { + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "fooo", } - }, err) + } + local _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.match("expected one of:.+", err.config.metrics[1].service_identifier) end) - - - it("rejects if consumer identifier missing", function() + it("accepts empty service identifier", function() local metrics_input = { { - name = "status_count_per_user", + name = "status_count", stat_type = "counter", - sample_rate = 1 + sample_rate = 1, } } - local _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - ["@entity"] = { - [1] = "failed conditional validation given value of field 'name'" - }, - consumer_identifier = 'required field missing' - } - } + local ok, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.is_nil(err) + assert.is_truthy(ok) + end) + it("accepts valid service identifier", function() + local metrics_input = { + { + name = "status_count", + stat_type = "counter", + sample_rate = 1, + service_identifier = "service_id", } - }, err) + } + local ok, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.is_nil(err) + assert.is_truthy(ok) + end) + it("rejects invalid workspace identifier", function() + local metrics_input = { + { + name = "status_count_per_workspace", + stat_type = "counter", + sample_rate = 1, + workspace_identifier = "fooo", + } + } + local _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.match("expected one of:.+", err.config.metrics[1].workspace_identifier) + end) + it("accepts valid workspace identifier", function() + local metrics_input = { + { + name = "status_count_per_workspace", + stat_type = "counter", + sample_rate = 1, + workspace_identifier = "workspace_id", + } + } + local ok, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.is_nil(err) + assert.is_truthy(ok) end) - - it("rejects if metric has wrong stat type", function() local metrics_input = { { @@ -185,24 +149,9 @@ describe(PLUGIN_NAME .. ": (schema)", function() stat_type = "counter" } } - local _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - ["@entity"] = { - [1] = "failed conditional validation given value of field 'name'", - [2] = "failed conditional validation given value of field 'stat_type'", - [3] = "failed conditional validation given value of field 'name'" - }, - consumer_identifier = 'required field missing', - sample_rate = 'required field missing', - stat_type = 'value must be set' - } - } - } - }, err) - + local _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.equal("value must be set", err.config.metrics[1].stat_type) metrics_input = { { name = "status_count", @@ -210,18 +159,127 @@ describe(PLUGIN_NAME .. ": (schema)", function() sample_rate = 1 } } - _, err = validate({ metrics = metrics_input}) - assert.same({ - config = { - metrics = { - [1] = { - ["@entity"] = { - [1] = "failed conditional validation given value of field 'name'" - }, - stat_type = 'value must be counter' - } - } - } - }, err) + _, err = validate_entity({ metrics = metrics_input}, statsd_schema) + assert.not_nil(err) + assert.equal("value must be counter", err.config.metrics[1].stat_type) + end) + it("accepts empty allow status codes configuration parameter", function() + local allow_status_codes_input = {} + + local ok, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.is_nil(err) + assert.is_truthy(ok) + end) + it("accepts if allow status codes configuration parameter is given status codes in form of ranges", function() + local allow_status_codes_input = { + "200-299", + "300-399" + } + + local ok, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.is_nil(err) + assert.is_truthy(ok) + end) + it("rejects if allow status codes configuration is given as alphabet values", function() + local allow_status_codes_input = { + "test" + } + + local _, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.not_nil(err) + assert.contains("invalid value: test", err.config.allow_status_codes) + end) + it("rejects if allow status codes configuration is given as special characters", function() + local allow_status_codes_input = { + "$%%" + } + + local _, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.not_nil(err) + assert.contains("invalid value: $%%", err.config.allow_status_codes) + end) + it("rejects if allow status codes configuration is given as alphabet values with dash symbol which indicates range", function() + local allow_status_codes_input = { + "test-test", + } + + local _, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.not_nil(err) + assert.contains("invalid value: test-test", err.config.allow_status_codes) + end) + it("rejects if allow status codes configuration is given as alphabet an numeric values with dash symbol which indicates range", function() + local allow_status_codes_input = { + "test-299", + "300-test" + } + + local _, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.not_nil(err) + assert.contains("invalid value: test-299", err.config.allow_status_codes) + assert.contains("invalid value: 300-test", err.config.allow_status_codes) + end) + it("rejects if one of allow status codes configuration is invalid", function() + local allow_status_codes_input = { + "200-300", + "test-test" + } + + local _, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.not_nil(err) + assert.contains("invalid value: test-test", err.config.allow_status_codes) + end) + it("rejects if allow status codes configuration is given as numeric values without dash symbol which indicates range", function() + local allow_status_codes_input = { + "200", + "299" + } + + local _, err = validate_entity({ allow_status_codes = allow_status_codes_input}, statsd_schema) + assert.not_nil(err) + assert.contains("invalid value: 200", err.config.allow_status_codes) + end) + it("accepts valid udp_packet_size", function() + local ok, err = validate_entity({ udp_packet_size = 0}, statsd_schema) + assert.is_nil(err) + assert.truthy(ok) + local ok, err = validate_entity({ udp_packet_size = 1}, statsd_schema) + assert.is_nil(err) + assert.truthy(ok) + local ok, err = validate_entity({ udp_packet_size = 10000}, statsd_schema) + assert.is_nil(err) + assert.truthy(ok) + end) + it("rejects invalid udp_packet_size", function() + local _, err = validate_entity({ udp_packet_size = -1}, statsd_schema) + assert.not_nil(err) + assert.equal("value should be between 0 and 65507", err.config.udp_packet_size) + local _, err = validate_entity({ udp_packet_size = "a"}, statsd_schema) + assert.not_nil(err) + assert.equal("expected a number", err.config.udp_packet_size) + local _, err = validate_entity({ udp_packet_size = 65508}, statsd_schema) + assert.not_nil(err) + assert.equal("value should be between 0 and 65507", err.config.udp_packet_size) + end) + it("accepts valid identifier_default", function() + local ok, err = validate_entity({ consumer_identifier_default = "consumer_id" }, statsd_schema) + assert.is_nil(err) + assert.truthy(ok) + local ok, err = validate_entity({ service_identifier_default = "service_id" }, statsd_schema) + assert.is_nil(err) + assert.truthy(ok) + local ok, err = validate_entity({ workspace_identifier_default = "workspace_id" }, statsd_schema) + assert.is_nil(err) + assert.truthy(ok) + end) + it("rejects invalid identifier_default", function() + local _, err = validate_entity({ + consumer_identifier_default = "invalid type", + service_identifier_default = "invalid type", + workspace_identifier_default = "invalid type" + }, statsd_schema) + assert.not_nil(err) + assert.equal("expected one of: consumer_id, custom_id, username", err.config.consumer_identifier_default) + assert.equal("expected one of: service_id, service_name, service_host, service_name_or_host", err.config.service_identifier_default) + assert.equal("expected one of: workspace_id, workspace_name", err.config.workspace_identifier_default) end) end) diff --git a/spec/03-plugins/06-statsd/03-allow_status_codes_spec.lua b/spec/03-plugins/06-statsd/03-allow_status_codes_spec.lua new file mode 100644 index 000000000000..63184f04aef7 --- /dev/null +++ b/spec/03-plugins/06-statsd/03-allow_status_codes_spec.lua @@ -0,0 +1,34 @@ +local log = require "kong.plugins.statsd.log" + +describe("Plugin: statsd (log_helper)", function() + + it("should be true with any status code when allow_status_codes is nil", function() + local allow_status_codes = nil + assert.is_truthy(log.is_in_range(allow_status_codes, 200)) + assert.is_truthy(log.is_in_range(allow_status_codes, 201)) + assert.is_truthy(log.is_in_range(allow_status_codes, 401)) + assert.is_truthy(log.is_in_range(allow_status_codes, 500)) + end) + + it("should be true when status code is in allowed status code range", function() + local allow_status_codes = { + "200-204" + } + + assert.is_truthy(log.is_in_range(allow_status_codes, 200)) + assert.is_truthy(log.is_in_range(allow_status_codes, 201)) + assert.is_truthy(log.is_in_range(allow_status_codes, 203)) + assert.is_truthy(log.is_in_range(allow_status_codes, 204)) + end) + + it("should be false when status code is not in between two configured ranges", function() + local allow_status_codes = { + "200-204", + "400-404" + } + assert.is_false(log.is_in_range(allow_status_codes, 205)) + assert.is_false(log.is_in_range(allow_status_codes, 301)) + assert.is_false(log.is_in_range(allow_status_codes, 500)) + end) +end) +