diff --git a/apisix/plugins/loggly.lua b/apisix/plugins/loggly.lua new file mode 100644 index 000000000000..32f352aee668 --- /dev/null +++ b/apisix/plugins/loggly.lua @@ -0,0 +1,260 @@ +-- +-- Licensed to the Apache Software Foundation (ASF) under one or more +-- contributor license agreements. See the NOTICE file distributed with +-- this work for additional information regarding copyright ownership. +-- The ASF licenses this file to You under the Apache License, Version 2.0 +-- (the "License"); you may not use this file except in compliance with +-- the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- +local core = require("apisix.core") +local plugin = require("apisix.plugin") +local bp_manager_mod = require("apisix.utils.batch-processor-manager") +local log_util = require("apisix.utils.log-util") +local ngx = ngx +local tostring = tostring +local pairs = pairs +local tab_concat = table.concat +local udp = ngx.socket.udp + +local plugin_name = "loggly" +local batch_processor_manager = bp_manager_mod.new(plugin_name) + + +local severity = { + EMEGR = 0, -- system is unusable + ALERT = 1, -- action must be taken immediately + CRIT = 2, -- critical conditions + ERR = 3, -- error conditions + WARNING = 4, -- warning conditions + NOTICE = 5, -- normal but significant condition + INFO = 6, -- informational + DEBUG = 7, -- debug-level messages +} + + +local severity_enums = {} +do + for k, _ in pairs(severity) do + severity_enums[#severity_enums+1] = k + severity_enums[#severity_enums+1] = k:lower() + end +end + + +local schema = { + type = "object", + properties = { + customer_token = {type = "string"}, + severity = { + type = "string", + default = "INFO", + enum = severity_enums, + description = "base severity log level", + }, + include_req_body = {type = "boolean", default = false}, + include_resp_body = {type = "boolean", default = false}, + include_resp_body_expr = { + type = "array", + minItems = 1, + items = { + type = "array" + } + }, + tags = { + type = "array", + minItems = 1, + items = { + type = "string", + -- we prevent of having `tag=` prefix + pattern = "^(?!tag=)[ -~]*", + }, + }, + }, + required = {"customer_token"} +} + + +local defaults = { + host = "logs-01.loggly.com", + port = 514, + protocol = "syslog", + timeout = 5000 +} + + +local metadata_schema = { + type = "object", + properties = { + host = { + type = "string", + default = defaults.host + }, + port = { + type = "integer", + default = defaults.port + }, + protocol = { + type = "string", + default = defaults.protocol, + -- more methods coming soon + enum = {"syslog"} + }, + timeout = { + type = "integer", + minimum = 1, + default= defaults.timeout + }, + log_format = { + type = "object", + } + } +} + + +local _M = { + version = 0.1, + priority = 411, + name = plugin_name, + schema = batch_processor_manager:wrap_schema(schema), + metadata_schema = metadata_schema +} + + +function _M.check_schema(conf, schema_type) + if schema_type == core.schema.TYPE_METADATA then + return core.schema.check(metadata_schema, conf) + end + + local ok, err = core.schema.check(schema, conf) + if not ok then + return nil, err + end + return log_util.check_log_schema(conf) +end + + +function _M.body_filter(conf, ctx) + log_util.collect_body(conf, ctx) +end + + +local function generate_log_message(conf, ctx) + local metadata = plugin.plugin_metadata(plugin_name) + local entry + + if metadata and metadata.value.log_format + and core.table.nkeys(metadata.value.log_format) > 0 + then + entry = log_util.get_custom_format_log(ctx, metadata.value.log_format) + else + entry = log_util.get_full_log(ngx, conf) + end + + -- generate rfc5424 compliant syslog event + local json_str, err = core.json.encode(entry) + if not json_str then + core.log.error('error occurred while encoding the data: ', err) + return nil + end + + local timestamp = log_util.get_rfc3339_zulu_timestamp() + local taglist = {} + if conf.tags then + for i = 1, #conf.tags do + core.table.insert(taglist, "tag=\"" .. conf.tags[i] .. "\"") + end + end + local message = { + -- facility LOG_USER - random user level message + "<".. tostring(8 + severity[conf.severity:upper()]) .. ">1",-- 1 + timestamp, -- timestamp + ctx.var.host or "-", -- hostname + "apisix", -- appname + ctx.var.pid, -- proc-id + "-", -- msgid + "[" .. conf.customer_token .. "@41058 " .. tab_concat(taglist, " ") .. "]", + json_str + } + + return tab_concat(message, " ") +end + + +local function send_data_over_udp(message) + local metadata = plugin.plugin_metadata(plugin_name) + core.log.info("metadata: ", core.json.delay_encode(metadata)) + + if not metadata then + core.log.info("received nil metadata: using metadata defaults: ", + core.json.delay_encode(defaults, true)) + metadata = {} + metadata.value = defaults + end + local err_msg + local res = true + local sock = udp() + local host, port = metadata.value.host, metadata.value.port + sock:settimeout(metadata.value.timeout) + + core.log.info("sending a batch logs to ", host, ":", port) + + local ok, err = sock:setpeername(host, port) + + if not ok then + core.log.error("failed to send log: ", err) + return false, "failed to connect to UDP server: host[" .. host + .. "] port[" .. tostring(port) .. "] err: " .. err + end + + ok, err = sock:send(message) + if not ok then + res = false + core.log.error("failed to send log: ", err) + err_msg = "failed to send data to UDP server: host[" .. host + .. "] port[" .. tostring(port) .. "] err:" .. err + end + + ok, err = sock:close() + if not ok then + core.log.error("failed to close the UDP connection, host[", + host, "] port[", port, "] ", err) + end + + return res, err_msg +end + + +local function handle_log(entries) + for i = 1, #entries do + local ok, err = send_data_over_udp(entries[i]) + if not ok then + return false, err + end + end + return true +end + + +function _M.log(conf, ctx) + local log_data = generate_log_message(conf, ctx) + if not log_data then + return + end + + if batch_processor_manager:add_entry(conf, log_data) then + return + end + + batch_processor_manager:add_entry_to_new_processor(conf, log_data, ctx, handle_log) +end + + +return _M diff --git a/conf/config-default.yaml b/conf/config-default.yaml index f39e488a93a9..3a9da6257fc6 100644 --- a/conf/config-default.yaml +++ b/conf/config-default.yaml @@ -369,6 +369,7 @@ plugins: # plugin list (sorted by priority) - prometheus # priority: 500 - datadog # priority: 495 - echo # priority: 412 + - loggly # priority: 411 - http-logger # priority: 410 - splunk-hec-logging # priority: 409 - skywalking-logger # priority: 408 diff --git a/docs/assets/images/plugin/loggly-dashboard.png b/docs/assets/images/plugin/loggly-dashboard.png new file mode 100644 index 000000000000..5e7fab243023 Binary files /dev/null and b/docs/assets/images/plugin/loggly-dashboard.png differ diff --git a/docs/en/latest/config.json b/docs/en/latest/config.json index 9a710f116b0f..6c8b0e77936e 100644 --- a/docs/en/latest/config.json +++ b/docs/en/latest/config.json @@ -129,7 +129,8 @@ "plugins/sls-logger", "plugins/google-cloud-logging", "plugins/splunk-hec-logging", - "plugins/file-logger" + "plugins/file-logger", + "plugins/loggly" ] }, { diff --git a/docs/en/latest/plugins/loggly.md b/docs/en/latest/plugins/loggly.md new file mode 100644 index 000000000000..1b4f74616a30 --- /dev/null +++ b/docs/en/latest/plugins/loggly.md @@ -0,0 +1,155 @@ +--- +title: loggly +--- + + + +## Summary + +- [Name](#name) +- [Attributes](#attributes) +- [Metadata](#metadata) +- [How To Enable](#how-to-enable) + - [Full configuration](#full-configuration) + - [Minimal configuration](#minimal-configuration) +- [Test Plugin](#test-plugin) +- [Disable Plugin](#disable-plugin) + +## Name + +The `loggly` plugin is used to forward the request log of `Apache APISIX` to `Loggly by SolarWinds` for analysis and storage. After the plugin is enabled, `Apache APISIX` will obtain request context information in `Log Phase` serialize it into [Loggly Syslog](https://documentation.solarwinds.com/en/success_center/loggly/content/admin/streaming-syslog-without-using-files.htm?cshid=loggly_streaming-syslog-without-using-files) data format which is actually syslog events with [RFC5424](https://datatracker.ietf.org/doc/html/rfc5424) compliant headers and submit it to the batch queue. When the maximum processing capacity of each batch of the batch processing queue or the maximum time to refresh the buffer is triggered, the data in the queue will be submitted to `Loggly` enterprise syslog endpoint. + +> At present, APISIX loggly plugin supports sending logs to Loggly server via syslog protocol, support for more event protocols are coming soon. + +For more info on Batch-Processor in Apache APISIX please refer to: +[Batch-Processor](../batch-processor.md) + +## Attributes + +| Name | Type | Requirement | Default | Description | +| ----------------------- | ---- | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| customer_token | string | required || A unique identifier is used when sending log data to Loggly to ensure that the logs are sent to the right organization account. | +| severity | string (enum) | optional | INFO | Log event severity level (choose between: "DEBUG", "INFO", "NOTICE", "WARNING", "ERR", "CRIT", "ALERT", "EMEGR" ) [case insensitive] | +| tags | array | optional | | To aid in segmentation & filtering. They are metadata you can set and they will be included with any event that is transmitted to Loggly. | +| include_req_body | boolean | optional | false | Whether to include the request body. false: indicates that the requested body is not included; true: indicates that the requested body is included. Note: if the request body is too big to be kept in the memory, it can't be logged due to Nginx's limitation. | +| include_resp_body| boolean | optional | false | Whether to include the response body. The response body is included if and only if it is `true`. | +| include_resp_body_expr | array | optional | | When `include_resp_body` is true, control the behavior based on the result of the [lua-resty-expr](https://github.com/api7/lua-resty-expr) expression. If present, only log the response body when the result is true. | +| max_retry_count | integer | optional | 0 | max number of retries before removing from the processing pipe line | +| retry_delay | integer | optional | 1 | number of seconds the process execution should be delayed if the execution fails | +| buffer_duration | integer | optional | 60 | max age in seconds of the oldest entry in a batch before the batch must be processed | +| inactive_timeout | integer | optional | 5 | max age in seconds when the buffer will be flushed if inactive | +| batch_max_size | integer | optional | 1000 | max size of each batch | + +To generate a Customer Token, head over to `/loggly.com/tokens` or navigate to `Logs > Source Setup > Customer Tokens` to generate a new token. + +## Metadata + +| Name | Type | Requirement | Default | Valid | Description | +| ----------- | ------ | ----------- | ------- | ----- | ---------------------------------------------------------------------- | +| host | string | optional | "logs-01.loggly.com" | | The host address endpoint where logs are being sent. | +| port | integer | optional | 514 | | Loggly host port to make a connection request. | +| timeout | integer | optional | 5000 | | Loggly send data request timeout in milliseconds. | +| protocol | string | optional | "syslog" | | Protocol through which the logs are sent to Loggly from APISIX (currently supported protocol : "syslog") | +| log_format | object | optional | nil | | Log format declared as key value pair in JSON format. Only string is supported in the `value` part. If the value starts with `$`, it means to get [`APISIX` variables](../apisix-variable.md) or [Nginx variable](http://nginx.org/en/docs/varindex.html). If it is nil or empty object, APISIX generates full log info. | + +## How To Enable + +The following is an example of how to enable the `loggly` for a specific route. + +### Full configuration + +```shell +curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "plugins":{ + "loggly":{ + "customer_token":"0e6fe4bf-376e-40f4-b25f-1d55cb29f5a2", + "tags":["apisix", "testroute"], + "severity":"info", + "buffer_duration":60, + "max_retry_count":0, + "retry_delay":1, + "inactive_timeout":2, + "batch_max_size":10 + } + }, + "upstream":{ + "type":"roundrobin", + "nodes":{ + "127.0.0.1:80":1 + } + }, + "uri":"/index.html" +}' +``` + +### Minimal configuration + +```shell +curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "plugins":{ + "loggly":{ + "customer_token":"0e6fe4bf-376e-40f4-b25f-1d55cb29f5a2", + } + }, + "upstream":{ + "type":"roundrobin", + "nodes":{ + "127.0.0.1:80":1 + } + }, + "uri":"/index.html" +}' +``` + +## Test Plugin + +* Send request to route configured with the `loggly` plugin + +```shell +$ curl -i http://127.0.0.1:9080/index.html +HTTP/1.1 200 OK +... + + +``` + +* Login to Loggly Dashboard to search and view + +![Loggly Dashboard](../../../assets/images/plugin/loggly-dashboard.png) + +## Disable Plugin + +Disabling the `loggly` plugin is very simple, just remove the `JSON` configuration corresponding to `loggly`. APISIX plugins are hot loaded, so no need to restart APISIX. + +```shell +$ curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -d ' +{ + "uri": "/index.html", + "plugins": {}, + "upstream": { + "type": "roundrobin", + "nodes": { + "127.0.0.1:80": 1 + } + } +}' +``` diff --git a/t/admin/plugins.t b/t/admin/plugins.t index f3e78925d4a0..3b42258b5eb0 100644 --- a/t/admin/plugins.t +++ b/t/admin/plugins.t @@ -105,6 +105,7 @@ grpc-web prometheus datadog echo +loggly http-logger splunk-hec-logging skywalking-logger diff --git a/t/lib/mock_dogstatsd.lua b/t/lib/mock_layer4.lua similarity index 52% rename from t/lib/mock_dogstatsd.lua rename to t/lib/mock_layer4.lua index f4ee675162fa..cc15bb04fcb9 100644 --- a/t/lib/mock_dogstatsd.lua +++ b/t/lib/mock_layer4.lua @@ -15,12 +15,14 @@ -- limitations under the License. -- local core = require("apisix.core") +local json_decode = require("toolkit.json").decode +local json_encode = require("toolkit.json").encode local ngx = ngx local socket = ngx.req.socket local _M = {} -function _M.go() +function _M.dogstatsd() local sock, err = socket() if not sock then core.log.error("failed to get the request socket: ", err) @@ -34,11 +36,42 @@ function _M.go() if err and err ~= "no more data" then core.log.error("socket error, returning: ", err) end + return + end + core.log.warn("message received: ", data) + end +end + + +function _M.loggly() + local sock, err = socket() + if not sock then + core.log.error("failed to get the request socket: ", err) + return + end + while true do + local data, err = sock:receive() + + if not data then + if err and err ~= "no more data" then + core.log.error("socket error, returning: ", err) + end return - else - core.log.warn("message received: ", data) end + local m, err = ngx.re.match(data, "(^[ -~]*] )([ -~]*)") + if not m then + core.log.error("unknown data received, failed to extract: ", err) + return + end + if #m ~= 2 then + core.log.error("failed to match two (header, log body) subgroups", #m) + end + -- m[1] contains syslog header header <14>1 .... & m[2] contains actual log body + local logbody = json_decode(m[2]) + -- order keys + logbody = json_encode(logbody) + core.log.warn("message received: ", m[1] .. logbody) end end diff --git a/t/plugin/datadog.t b/t/plugin/datadog.t index 4eed735ebfde..3448e4384292 100644 --- a/t/plugin/datadog.t +++ b/t/plugin/datadog.t @@ -31,7 +31,7 @@ add_block_preprocessor(sub { server { listen 8125 udp; content_by_lua_block { - require("lib.mock_dogstatsd").go() + require("lib.mock_layer4").dogstatsd() } } _EOC_ diff --git a/t/plugin/loggly.t b/t/plugin/loggly.t new file mode 100644 index 000000000000..5cec02372c2f --- /dev/null +++ b/t/plugin/loggly.t @@ -0,0 +1,512 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +use t::APISIX 'no_plan'; + +repeat_each(1); +no_long_string(); +no_root_location(); + +add_block_preprocessor(sub { + my ($block) = @_; + + $block->set_value("stream_conf_enable", 1); + + if (!defined $block->extra_stream_config) { + my $stream_config = <<_EOC_; + server { + listen 8126 udp; + content_by_lua_block { + -- mock udp server is just accepts udp connection and log into error.log + require("lib.mock_layer4").loggly() + } + } +_EOC_ + $block->set_value("extra_stream_config", $stream_config); + } + + if ((!defined $block->error_log) && (!defined $block->no_error_log)) { + $block->set_value("no_error_log", "[error]"); + } + + if (!defined $block->request) { + $block->set_value("request", "GET /t"); + } + +}); + +run_tests(); + +__DATA__ + +=== TEST 1: sanity check metadata +--- config + location /t { + content_by_lua_block { + local plugin = require("apisix.plugins.loggly") + local configs = { + -- full configuration + { + customer_token = "TEST-Token-Must-Be-Passed", + severity = "INFO", + tags = {"special-route", "highpriority-route"}, + max_retry_count = 0, + retry_delay = 1, + buffer_duration = 60, + inactive_timeout = 2, + batch_max_size = 10, + }, + -- minimize schema + { + customer_token = "minimized-cofig", + }, + -- property "customer_token" is required + { + severity = "DEBUG", + }, + -- unknown severity + { + customer_token = "test", + severity = "UNKNOWN", + }, + -- severity in lower case, should pass + { + customer_token = "test", + severity = "crit", + } + } + + for i = 1, #configs do + local ok, err = plugin.check_schema(configs[i]) + if not ok then + ngx.say(err) + else + ngx.say("passed") + end + end + } + } +--- response_body +passed +passed +property "customer_token" is required +property "severity" validation failed: matches none of the enum values +passed + + + +=== TEST 2: set route with loggly enabled +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "loggly": { + "customer_token" : "test-token", + "batch_max_size": 1 + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "name": "loggly-enabled-route", + "uri": "/opentracing" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 3: update loggly metadata with host port +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/loggly', + ngx.HTTP_PUT, + [[{ + "host":"127.0.0.1", + "port": 8126 + }]], + [[{ + "node": { + "value": { + "host": "127.0.0.1", + "protocol": "syslog", + "timeout": 5000, + "port": 8126 + }, + "key": "/apisix/plugin_metadata/loggly" + }, + "action": "set" + }]]) + + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.say(body) + } + } +--- response_body +passed + + + +=== TEST 4: testing udp packet with mock loggly udp suite +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + + -- request 1 + local code, _, body = t("/opentracing", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + + -- request 2 + local code, _, body = t("/opentracing", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + } + } +--- wait: 0.5 +--- response_body +opentracing +opentracing +--- grep_error_log eval +qr/message received: .+?(?= \{)/ +--- grep_error_log_out eval +qr/message received: <14>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[test-token\@41058 ] +message received: <14>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[test-token\@41058 ]/ + + + +=== TEST 5: checking loggly tags +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "loggly": { + "customer_token" : "token-1", + "batch_max_size": 1, + "tags": ["abc", "def"] + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "uri": "/opentracing" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + + local code, _, body = t("/opentracing", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + } + } +--- wait: 0.5 +--- response_body +passed +opentracing +--- grep_error_log eval +qr/message received: .+?(?= \{)/ +--- grep_error_log_out eval +qr/message received: <14>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[token-1\@41058 tag="abc" tag="def"]/ + + + +=== TEST 6: checking loggly log severity +log severity is calculated based on PRIVAL +8 + LOG_SEVERITY value +CRIT has value 2 so test should return PRIVAL <10> +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "loggly": { + "customer_token" : "token-1", + "batch_max_size": 1, + "severity": "CRIT" + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "uri": "/opentracing" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + + local code, _, body = t("/opentracing", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + } + } +--- wait: 0.5 +--- response_body +passed +opentracing +--- grep_error_log eval +qr/message received: .+?(?= \{)/ +--- grep_error_log_out eval +qr/message received: <10>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[token-1\@41058 ]/ + + + +=== TEST 7: collect response full log +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, _, body = t("/opentracing", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + } + } +--- response_body +opentracing +--- grep_error_log eval +qr/message received: [ -~]+/ +--- grep_error_log_out eval +qr/message received: <10>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[token-1\@41058 ] \{"apisix_latency":[\d.]*,"client_ip":"127\.0\.0\.1","latency":[\d.]*,"request":\{"headers":\{"content-type":"application\/x-www-form-urlencoded","host":"127\.0\.0\.1:1984","user-agent":"lua-resty-http\/[\d.]* \(Lua\) ngx_lua\/[\d]*"\},"method":"GET","querystring":\{\},"size":[\d]+,"uri":"\/opentracing","url":"http:\/\/127\.0\.0\.1:1984\/opentracing"\},"response":\{"headers":\{"connection":"close","content-type":"text\/plain","server":"APISIX\/[\d.]+","transfer-encoding":"chunked"\},"size":[\d]*,"status":200\},"route_id":"1","server":\{"hostname":"[ -~]*","version":"[\d.]+"\},"service_id":"","start_time":[\d]*,"upstream":"127\.0\.0\.1:1982","upstream_latency":[\d]*\}/ + + + +=== TEST 8: collect response log with include_resp_body +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "loggly": { + "customer_token" : "tok", + "batch_max_size": 1, + "include_resp_body": true + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "uri": "/opentracing" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + local code, _, body = t("/opentracing", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + } + } +--- response_body +passed +opentracing +--- grep_error_log eval +qr/message received: [ -~]+/ +--- grep_error_log_out eval +qr/message received: <14>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[tok\@41058 ] \{"apisix_latency":[\d.]*,"client_ip":"127\.0\.0\.1","latency":[\d.]*,"request":\{"headers":\{"content-type":"application\/x-www-form-urlencoded","host":"127\.0\.0\.1:1984","user-agent":"lua-resty-http\/[\d.]* \(Lua\) ngx_lua\/[\d]*"\},"method":"GET","querystring":\{\},"size":[\d]+,"uri":"\/opentracing","url":"http:\/\/127\.0\.0\.1:1984\/opentracing"\},"response":\{"body":"opentracing\\n","headers":\{"connection":"close","content-type":"text\/plain","server":"APISIX\/[\d.]+","transfer-encoding":"chunked"\},"size":[\d]*,"status":200\},"route_id":"1","server":\{"hostname":"[ -~]*","version":"[\d.]+"\},"service_id":"","start_time":[\d]*,"upstream":"127\.0\.0\.1:1982","upstream_latency":[\d]*\}/ + + + +=== TEST 9: collect log with include_resp_body_expr +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/routes/1', + ngx.HTTP_PUT, + [[{ + "plugins": { + "loggly": { + "customer_token" : "tok", + "batch_max_size": 1, + "include_resp_body": true, + "include_resp_body_expr": [ + ["arg_bar", "==", "bar"] + ] + } + }, + "upstream": { + "nodes": { + "127.0.0.1:1982": 1 + }, + "type": "roundrobin" + }, + "uri": "/opentracing" + }]] + ) + + if code >= 300 then + ngx.status = code + end + ngx.say(body) + -- this will include resp body + local code, _, body = t("/opentracing?bar=bar", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + + } + } +--- response_body +passed +opentracing +--- grep_error_log eval +qr/message received: [ -~]+/ +--- grep_error_log_out eval +qr/message received: <14>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[tok\@41058 ] \{"apisix_latency":[\d.]*,"client_ip":"127\.0\.0\.1","latency":[\d.]*,"request":\{"headers":\{"content-type":"application\/x-www-form-urlencoded","host":"127\.0\.0\.1:1984","user-agent":"lua-resty-http\/[\d.]* \(Lua\) ngx_lua\/[\d]*"\},"method":"GET","querystring":\{"bar":"bar"\},"size":[\d]+,"uri":"\/opentracing\?bar=bar","url":"http:\/\/127\.0\.0\.1:1984\/opentracing\?bar=bar"\},"response":\{"body":"opentracing\\n","headers":\{"connection":"close","content-type":"text\/plain","server":"APISIX\/[\d.]+","transfer-encoding":"chunked"\},"size":[\d]*,"status":200\},"route_id":"1","server":\{"hostname":"[ -~]*","version":"[\d.]+"\},"service_id":"","start_time":[\d]*,"upstream":"127\.0\.0\.1:1982","upstream_latency":[\d]*\}/ + + + +=== TEST 10: collect log with include_resp_body_expr mismatch +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, _, body = t("/opentracing?foo=bar", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + + } + } +--- response_body +opentracing +--- grep_error_log eval +qr/message received: [ -~]+/ +--- grep_error_log_out eval +qr/message received: <14>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[tok\@41058 ] \{"apisix_latency":[\d.]*,"client_ip":"127\.0\.0\.1","latency":[\d.]*,"request":\{"headers":\{"content-type":"application\/x-www-form-urlencoded","host":"127\.0\.0\.1:1984","user-agent":"lua-resty-http\/[\d.]* \(Lua\) ngx_lua\/[\d]*"\},"method":"GET","querystring":\{"foo":"bar"\},"size":[\d]+,"uri":"\/opentracing\?foo=bar","url":"http:\/\/127\.0\.0\.1:1984\/opentracing\?foo=bar"\},"response":\{"headers":\{"connection":"close","content-type":"text\/plain","server":"APISIX\/[\d.]+","transfer-encoding":"chunked"\},"size":[\d]*,"status":200\},"route_id":"1","server":\{"hostname":"[ -~]*","version":"[\d.]+"\},"service_id":"","start_time":[\d]*,"upstream":"127\.0\.0\.1:1982","upstream_latency":[\d]*\}/ + + + +=== TEST 11: collect log with log_format +--- config + location /t { + content_by_lua_block { + local t = require("lib.test_admin").test + local code, body = t('/apisix/admin/plugin_metadata/loggly', + ngx.HTTP_PUT, + [[{ + "host":"127.0.0.1", + "port": 8126, + "log_format":{ + "host":"$host", + "client":"$remote_addr" + } + }]] + ) + + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.say(body) + + local code, _, body = t("/opentracing?foo=bar", "GET") + if code >= 300 then + ngx.status = code + ngx.say("fail") + return + end + ngx.print(body) + } + } +--- response_body +passed +opentracing +--- grep_error_log eval +qr/message received: [ -~]+/ +--- grep_error_log_out eval +qr/message received: <14>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[tok\@41058 ] \{"client":"[\d.]+","host":"[\d.]+","route_id":"1"\}/