diff --git a/FAQ.md b/FAQ.md index e8fc85a1d2ba..d6f770273b11 100644 --- a/FAQ.md +++ b/FAQ.md @@ -259,17 +259,8 @@ Now you can trace the info level log in logs/error.log. ## How to reload your own plugin -The Apache APISIX plugin supports hot reloading. If your APISIX node has the Admin API turned on, then for scenarios such as adding / deleting / modifying plugins, you can hot reload the plugin by calling the HTTP interface without restarting the service. - -```shell -curl http://127.0.0.1:9080/apisix/admin/plugins/reload -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -``` - -If your APISIX node does not open the Admin API, then you can manually load the plug-in by reloading APISIX. - -```shell -apisix reload -``` +The Apache APISIX plugin supports hot reloading. +See the `Hot reload` section in [plugins](./doc/plugins.md) for how to do that. ## How to make APISIX listen on multiple ports when handling HTTP or HTTPS requests? diff --git a/FAQ_CN.md b/FAQ_CN.md index 7b20a3538190..886a5611b72f 100644 --- a/FAQ_CN.md +++ b/FAQ_CN.md @@ -209,17 +209,9 @@ Server: APISIX web server ## 如何加载自己编写的插件 -Apache APISIX 的插件支持热加载,如果你的 APISIX 节点打开了 Admin API,那么对于新增/删除/修改插件等场景,均可以通过调用 HTTP 接口的方式热加载插件,不需要重启服务。 +Apache APISIX 的插件支持热加载。 -```shell -curl http://127.0.0.1:9080/apisix/admin/plugins/reload -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT -``` - -如果你的 APISIX 节点并没有打开 Admin API,那么你可以通过手动 reload APISIX 的方式加载插件。 - -```shell -apisix reload -``` +具体怎么做参考 [插件](./doc/zh-cn/plugins.md) 中关于“热加载”的部分。 ## 如何让 APISIX 在处理 HTTP 或 HTTPS 请求时监听多个端口 diff --git a/apisix/admin/init.lua b/apisix/admin/init.lua index 5aaafa4ab964..7ac2b85ad7ab 100644 --- a/apisix/admin/init.lua +++ b/apisix/admin/init.lua @@ -20,10 +20,14 @@ local route = require("resty.radixtree") local plugin = require("apisix.plugin") local ngx = ngx local get_method = ngx.req.get_method +local ngx_time = ngx.time +local ngx_timer_at = ngx.timer.at +local ngx_worker_id = ngx.worker.id local tonumber = tonumber local str_lower = string.lower local reload_event = "/apisix/admin/plugins/reload" local ipairs = ipairs +local error = error local events local MAX_REQ_BODY = 1024 * 1024 * 1.5 -- 1.5 MiB @@ -245,7 +249,7 @@ local function post_reload_plugins() core.response.exit(401) end - local success, err = events.post(reload_event, get_method(), ngx.time()) + local success, err = events.post(reload_event, get_method(), ngx_time()) if not success then core.response.exit(500, err) end @@ -254,9 +258,40 @@ local function post_reload_plugins() end +local function sync_local_conf_to_etcd() + core.log.warn("sync local conf to etcd") + + local local_conf = core.config.local_conf() + + local plugins = {} + for _, name in ipairs(local_conf.plugins) do + core.table.insert(plugins, { + name = name, + }) + end + + for _, name in ipairs(local_conf.stream_plugins) do + core.table.insert(plugins, { + name = name, + stream = true, + }) + end + + -- need to store all plugins name into one key so that it can be updated atomically + local res, err = core.etcd.set("/plugins", plugins) + if not res then + core.log.error("failed to set plugins: ", err) + end +end + + local function reload_plugins(data, event, source, pid) core.log.info("start to hot reload plugins") plugin.load() + + if ngx_worker_id() == 0 then + sync_local_conf_to_etcd() + end end @@ -294,6 +329,20 @@ function _M.init_worker() events = require("resty.worker.events") events.register(reload_plugins, reload_event, "PUT") + + if ngx_worker_id() == 0 then + local ok, err = ngx_timer_at(0, function(premature) + if premature then + return + end + + sync_local_conf_to_etcd() + end) + + if not ok then + error("failed to sync local configure to etcd: " .. err) + end + end end diff --git a/apisix/core/config_etcd.lua b/apisix/core/config_etcd.lua index 7fb12411963a..bb71313691d2 100644 --- a/apisix/core/config_etcd.lua +++ b/apisix/core/config_etcd.lua @@ -185,10 +185,6 @@ local function sync_data(self) return false, err end - if not dir_res.nodes then - dir_res.nodes = {} - end - if self.values then for i, val in ipairs(self.values) do if val and val.clean_handlers then @@ -203,19 +199,14 @@ local function sync_data(self) self.values_hash = nil end - self.values = new_tab(#dir_res.nodes, 0) - self.values_hash = new_tab(0, #dir_res.nodes) - local changed = false - for _, item in ipairs(dir_res.nodes) do - local key = short_key(self, item.key) - local data_valid = true - if type(item.value) ~= "table" then - data_valid = false - log.error("invalid item data of [", self.key .. "/" .. key, - "], val: ", item.value, - ", it shoud be a object") - end + + if self.single_item then + self.values = new_tab(1, 0) + self.values_hash = new_tab(0, 1) + + local item = dir_res + local data_valid = item.value ~= nil if data_valid and self.item_schema then data_valid, err = check_schema(self.item_schema, item.value) @@ -228,8 +219,8 @@ local function sync_data(self) if data_valid then changed = true insert_tab(self.values, item) - self.values_hash[key] = #self.values - item.value.id = key + self.values_hash[self.key] = #self.values + item.clean_handlers = {} if self.filter then @@ -238,6 +229,48 @@ local function sync_data(self) end self:upgrade_version(item.modifiedIndex) + + else + if not dir_res.nodes then + dir_res.nodes = {} + end + + self.values = new_tab(#dir_res.nodes, 0) + self.values_hash = new_tab(0, #dir_res.nodes) + + for _, item in ipairs(dir_res.nodes) do + local key = short_key(self, item.key) + local data_valid = true + if type(item.value) ~= "table" then + data_valid = false + log.error("invalid item data of [", self.key .. "/" .. key, + "], val: ", item.value, + ", it shoud be a object") + end + + if data_valid and self.item_schema then + data_valid, err = check_schema(self.item_schema, item.value) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.encode(item.value)) + end + end + + if data_valid then + changed = true + insert_tab(self.values, item) + self.values_hash[key] = #self.values + + item.value.id = key + item.clean_handlers = {} + + if self.filter then + self.filter(item) + end + end + + self:upgrade_version(item.modifiedIndex) + end end if headers then @@ -285,9 +318,16 @@ local function sync_data(self) end local res_copy = res + -- waitdir will return [res] even for self.single_item = true for _, res in ipairs(res_copy) do - local key = short_key(self, res.key) - if res.value and type(res.value) ~= "table" then + local key + if self.single_item then + key = self.key + else + key = short_key(self, res.key) + end + + if res.value and not self.single_item and type(res.value) ~= "table" then self:upgrade_version(res.modifiedIndex) return false, "invalid item data of [" .. self.key .. "/" .. key .. "], val: " .. res.value @@ -314,10 +354,6 @@ local function sync_data(self) return false end - if self.filter then - self.filter(res) - end - local pre_index = self.values_hash[key] if pre_index then local pre_val = self.values[pre_index] @@ -329,7 +365,10 @@ local function sync_data(self) end if res.value then - res.value.id = key + if not self.single_item then + res.value.id = key + end + self.values[pre_index] = res res.clean_handlers = {} log.info("update data by key: ", key) @@ -345,7 +384,10 @@ local function sync_data(self) res.clean_handlers = {} insert_tab(self.values, res) self.values_hash[key] = #self.values - res.value.id = key + if not self.single_item then + res.value.id = key + end + log.info("insert data by key: ", key) end @@ -372,6 +414,12 @@ local function sync_data(self) self.sync_times = 0 end + -- /plugins' filter need to known self.values when it is called + -- so the filter should be called after self.values set. + if self.filter then + self.filter(res) + end + self.conf_version = self.conf_version + 1 end @@ -476,6 +524,7 @@ function _M.new(key, opts) local item_schema = opts and opts.item_schema local filter_fun = opts and opts.filter local timeout = opts and opts.timeout + local single_item = opts and opts.single_item local obj = setmetatable({ etcd_cli = nil, @@ -493,6 +542,7 @@ function _M.new(key, opts) last_err = nil, last_err_time = nil, timeout = timeout, + single_item = single_item, filter = filter_fun, }, mt) diff --git a/apisix/core/config_yaml.lua b/apisix/core/config_yaml.lua index 8575e0d3d86e..00b006dd8f66 100644 --- a/apisix/core/config_yaml.lua +++ b/apisix/core/config_yaml.lua @@ -141,25 +141,18 @@ local function sync_data(self) self.values = nil end - self.values = new_tab(#items, 0) - self.values_hash = new_tab(0, #items) + if self.single_item then + -- treat items as a single item + self.values = new_tab(1, 0) + self.values_hash = new_tab(0, 1) - local err - for i, item in ipairs(items) do - local id = tostring(i) - local data_valid = true - if type(item) ~= "table" then - data_valid = false - log.error("invalid item data of [", self.key .. "/" .. id, - "], val: ", json.delay_encode(item), - ", it shoud be a object") - end - - local key = item.id or "arr_" .. i + local item = items local conf_item = {value = item, modifiedIndex = apisix_yaml_ctime, - key = "/" .. self.key .. "/" .. key} + key = "/" .. self.key} - if data_valid and self.item_schema then + local data_valid = true + local err + if self.item_schema then data_valid, err = check_schema(self.item_schema, item) if not data_valid then log.error("failed to check item data of [", self.key, @@ -169,16 +162,54 @@ local function sync_data(self) if data_valid then insert_tab(self.values, conf_item) - local item_id = conf_item.value.id or self.key .. "#" .. id - item_id = tostring(item_id) - self.values_hash[item_id] = #self.values - conf_item.value.id = item_id + self.values_hash[self.key] = #self.values conf_item.clean_handlers = {} if self.filter then self.filter(conf_item) end end + + else + self.values = new_tab(#items, 0) + self.values_hash = new_tab(0, #items) + + local err + for i, item in ipairs(items) do + local id = tostring(i) + local data_valid = true + if type(item) ~= "table" then + data_valid = false + log.error("invalid item data of [", self.key .. "/" .. id, + "], val: ", json.delay_encode(item), + ", it shoud be a object") + end + + local key = item.id or "arr_" .. i + local conf_item = {value = item, modifiedIndex = apisix_yaml_ctime, + key = "/" .. self.key .. "/" .. key} + + if data_valid and self.item_schema then + data_valid, err = check_schema(self.item_schema, item) + if not data_valid then + log.error("failed to check item data of [", self.key, + "] err:", err, " ,val: ", json.delay_encode(item)) + end + end + + if data_valid then + insert_tab(self.values, conf_item) + local item_id = conf_item.value.id or self.key .. "#" .. id + item_id = tostring(item_id) + self.values_hash[item_id] = #self.values + conf_item.value.id = item_id + conf_item.clean_handlers = {} + + if self.filter then + self.filter(conf_item) + end + end + end end self.conf_version = apisix_yaml_ctime @@ -256,6 +287,7 @@ function _M.new(key, opts) local automatic = opts and opts.automatic local item_schema = opts and opts.item_schema local filter_fun = opts and opts.filter + local single_item = opts and opts.single_item -- like /routes and /upstreams, remove first char `/` if key then @@ -274,6 +306,7 @@ function _M.new(key, opts) last_err = nil, last_err_time = nil, key = key, + single_item = single_item, filter = filter_fun, }, mt) diff --git a/apisix/core/table.lua b/apisix/core/table.lua index 10bfaa75aba7..ed998d0ef0b4 100644 --- a/apisix/core/table.lua +++ b/apisix/core/table.lua @@ -173,4 +173,20 @@ end _M.patch = patch +-- Compare two tables as if they are sets (only compare the key part) +function _M.set_eq(a, b) + if nkeys(a) ~= nkeys(b) then + return false + end + + for k in pairs(a) do + if b[k] == nil then + return false + end + end + + return true +end + + return _M diff --git a/apisix/plugin.lua b/apisix/plugin.lua index 3077b9460953..5936288e26bf 100644 --- a/apisix/plugin.lua +++ b/apisix/plugin.lua @@ -16,6 +16,7 @@ -- local require = require local core = require("apisix.core") +local config_util = require("apisix.core.config_util") local pkg_loaded = package.loaded local sort_tab = table.sort local pcall = pcall @@ -98,24 +99,29 @@ local function load_plugin(name, plugins_list, is_stream_plugin) end -local function load() - core.table.clear(local_plugins) - core.table.clear(local_plugins_hash) - - local_conf = core.config.local_conf(true) - local plugin_names = local_conf.plugins - if not plugin_names then - return nil, "failed to read plugin list from local file" - end - +local function load(plugin_names) local processed = {} for _, name in ipairs(plugin_names) do if processed[name] == nil then processed[name] = true - load_plugin(name, local_plugins) end end + -- the same configure may be synchronized more than one + if core.table.set_eq(local_plugins_hash, processed) then + core.log.info("plugins not changed") + return true + end + + core.log.warn("new plugins: ", core.json.delay_encode(processed)) + + core.table.clear(local_plugins) + core.table.clear(local_plugins_hash) + + for name in pairs(processed) do + load_plugin(name, local_plugins) + end + -- sort by plugin's priority if #local_plugins > 1 then sort_tab(local_plugins, sort_plugin) @@ -137,24 +143,29 @@ local function load() end -local function load_stream() - core.table.clear(stream_local_plugins) - core.table.clear(stream_local_plugins_hash) - - local plugin_names = local_conf.stream_plugins - if not plugin_names then - core.log.warn("failed to read stream plugin list from local file") - return true - end - +local function load_stream(plugin_names) local processed = {} for _, name in ipairs(plugin_names) do if processed[name] == nil then processed[name] = true - load_plugin(name, stream_local_plugins, true) end end + -- the same configure may be synchronized more than one + if core.table.set_eq(stream_local_plugins_hash, processed) then + core.log.info("plugins not changed") + return true + end + + core.log.warn("new plugins: ", core.json.delay_encode(processed)) + + core.table.clear(stream_local_plugins) + core.table.clear(stream_local_plugins_hash) + + for name in pairs(processed) do + load_plugin(name, stream_local_plugins, true) + end + -- sort by plugin's priority if #stream_local_plugins > 1 then sort_tab(stream_local_plugins, sort_plugin) @@ -178,19 +189,47 @@ local function load_stream() end -function _M.load() - local_conf = core.config.local_conf(true) +function _M.load(config) + local http_plugin_names + local stream_plugin_names + + if not config then + local_conf = core.config.local_conf(true) + http_plugin_names = local_conf.plugins + stream_plugin_names = local_conf.stream_plugins + else + http_plugin_names = {} + stream_plugin_names = {} + for _, conf_value in config_util.iterate_values(config.values) do + local plugins_conf = conf_value.value + for _, conf in ipairs(plugins_conf) do + if conf.stream then + core.table.insert(stream_plugin_names, conf.name) + else + core.table.insert(http_plugin_names, conf.name) + end + end + end + end - if ngx.config.subsystem == "http" then - local ok, err = load() - if not ok then - core.log.error("failed to load plugins: ", err) + if ngx.config.subsystem == "http"then + if not http_plugin_names then + core.log.error("failed to read plugin list from local file") + else + local ok, err = load(http_plugin_names) + if not ok then + core.log.error("failed to load plugins: ", err) + end end end - local ok, err = load_stream() - if not ok then - core.log.error("failed to load stream plugins: ", err) + if not stream_plugin_names then + core.log.warn("failed to read stream plugin list from local file") + else + local ok, err = load_stream(stream_plugin_names) + if not ok then + core.log.error("failed to load stream plugins: ", err) + end end -- for test @@ -351,9 +390,39 @@ function _M.merge_consumer_route(route_conf, consumer_conf, api_ctx) end +local init_plugins_syncer +do + local plugins_conf + + function init_plugins_syncer() + local err + plugins_conf, err = core.config.new("/plugins", { + automatic = true, + item_schema = core.schema.plugins, + single_item = true, + filter = function() + _M.load(plugins_conf) + end, + }) + if not plugins_conf then + error("failed to create etcd instance for fetching /plugins : " .. err) + end + end +end + + function _M.init_worker() _M.load() + -- some plugins need to be initialized in init* phases + if ngx.config.subsystem == "http"then + require("apisix.plugins.prometheus.exporter").init() + end + + if local_conf and not local_conf.apisix.enable_admin then + init_plugins_syncer() + end + local plugin_metadatas, err = core.config.new("/plugin_metadata", {automatic = true} ) diff --git a/apisix/plugins/prometheus.lua b/apisix/plugins/prometheus.lua index b87741a0556f..8a0a340b4a0b 100644 --- a/apisix/plugins/prometheus.lua +++ b/apisix/plugins/prometheus.lua @@ -29,7 +29,6 @@ local _M = { version = 0.2, priority = 500, name = plugin_name, - init = exporter.init, log = exporter.log, schema = schema, } diff --git a/apisix/schema_def.lua b/apisix/schema_def.lua index b5231915546e..f61feb72dd31 100644 --- a/apisix/schema_def.lua +++ b/apisix/schema_def.lua @@ -665,6 +665,25 @@ _M.stream_route = { } +_M.plugins = { + type = "array", + items = { + type = "object", + properties = { + name = { + type = "string", + minLength = 1, + }, + stream = { + type = "boolean" + }, + additionalProperties = false, + }, + required = {"name"} + } +} + + _M.id_schema = id_schema diff --git a/doc/plugins.md b/doc/plugins.md index bf39cf15f0a4..9d923c56417a 100644 --- a/doc/plugins.md +++ b/doc/plugins.md @@ -23,8 +23,12 @@ APISIX plug-ins are hot-loaded. No matter you add, delete or modify plug-ins, you don't need to restart the service. -Just send an HTTP request through admin API: +If your APISIX node has the Admin API turned on, just send an HTTP request through admin API: ```shell curl http://127.0.0.1:9080/apisix/admin/plugins/reload -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT ``` + +### Hot reload in stand-alone mode + +For stand-alone mode, see plugin related section in [stand alone mode](stand-alone.md). diff --git a/doc/stand-alone.md b/doc/stand-alone.md index de75b70117f7..78b4c9138fb6 100644 --- a/doc/stand-alone.md +++ b/doc/stand-alone.md @@ -155,3 +155,13 @@ upstreams: #END ``` +#### How to config Plugins + +```yml +# plugins listed here will be hot reloaded and override the boot configuration +plugins: + - name: ip-restriction + - name: jwt-auth + - name: mqtt-proxy + stream: true # set 'stream' to true for stream plugins +``` diff --git a/doc/zh-cn/plugins.md b/doc/zh-cn/plugins.md index dc861fb6029e..f768e2cee35c 100644 --- a/doc/zh-cn/plugins.md +++ b/doc/zh-cn/plugins.md @@ -28,3 +28,7 @@ APISIX 的插件是热加载的,不管你是新增、删除还是修改插件 ```shell curl http://127.0.0.1:9080/apisix/admin/plugins/reload -H 'X-API-KEY: edd1c9f034335f136f87ad84b625c8f1' -X PUT ``` + +## stand-alone 模式下的热加载 + +参考 [stand alone 模式](stand-alone.md) 文档里关于配置插件的内容。 diff --git a/doc/zh-cn/stand-alone.md b/doc/zh-cn/stand-alone.md index 389268bc1821..dddcc0e72cf9 100644 --- a/doc/zh-cn/stand-alone.md +++ b/doc/zh-cn/stand-alone.md @@ -154,3 +154,14 @@ upstreams: type: roundrobin #END ``` + +#### 配置 Plugins + +```yml +# 列出的插件会被热加载并覆盖掉启动时的配置 +plugins: + - name: ip-restriction + - name: jwt-auth + - name: mqtt-proxy + stream: true # stream 插件需要设置 stream 属性为 true +``` diff --git a/t/APISIX.pm b/t/APISIX.pm index 76653eb4ef2c..86e55ee9ee0d 100644 --- a/t/APISIX.pm +++ b/t/APISIX.pm @@ -24,6 +24,7 @@ repeat_each(1); log_level('info'); no_long_string(); no_shuffle(); +no_root_location(); # avoid generated duplicate 'location /' worker_connections(128); my $apisix_home = $ENV{APISIX_HOME} || cwd(); @@ -135,6 +136,7 @@ add_block_preprocessor(sub { worker_rlimit_core 500M; env ENABLE_ETCD_AUTH; env APISIX_PROFILE; +env TEST_NGINX_HTML_DIR; _EOC_ # set default `timeout` to 5sec diff --git a/t/admin/plugins-reload.t b/t/admin/plugins-reload.t index bf8417257547..d675306c619b 100644 --- a/t/admin/plugins-reload.t +++ b/t/admin/plugins-reload.t @@ -24,6 +24,14 @@ log_level("info"); workers(2); master_on(); +add_block_preprocessor(sub { + my ($block) = @_; + + $block->set_value("no_error_log", "[error]"); + + $block; +}); + run_tests; __DATA__ @@ -50,5 +58,68 @@ load plugin times: 1 load plugin times: 1 start to hot reload plugins start to hot reload plugins -load plugin times: 2 -load plugin times: 2 +load(): plugins not changed +load_stream(): plugins not changed +load(): plugins not changed +load_stream(): plugins not changed + + + +=== TEST 2: reload plugins triggers plugin list sync +--- config +location /t { + content_by_lua_block { + local core = require "apisix.core" + local config_util = require("apisix.core.config_util") + ngx.sleep(0.1) -- make sure the sync happened when admin starts is already finished + + local plugins_conf, err + plugins_conf, err = core.config.new("/plugins", { + automatic = true, + single_item = true, + filter = function() + -- called twice, one for readir, another for waitdir + ngx.log(ngx.WARN, "reload plugins on node ") + local plugins = {} + for _, conf_value in config_util.iterate_values(plugins_conf.values) do + core.table.insert_tail(plugins, unpack(conf_value.value)) + end + ngx.log(ngx.WARN, core.json.encode(plugins)) + end, + }) + if not plugins_conf then + error("failed to create etcd instance for fetching /plugins : " + .. err) + end + + local data = [[ +apisix: + node_listen: 1984 + admin_key: null +plugins: + - jwt-auth +stream_plugins: + - mqtt-proxy + ]] + require("lib.test_admin").set_config_yaml(data) + + local t = require("lib.test_admin").test + local code, _, org_body = t('/apisix/admin/plugins/reload', + ngx.HTTP_PUT) + + ngx.status = code + ngx.say(org_body) + ngx.sleep(0.2) + } +} +--- request +GET /t +--- response_body +done +--- grep_error_log eval +qr/reload plugins on node/ +--- grep_error_log_out +reload plugins on node +reload plugins on node +--- error_log +filter(): [{"name":"jwt-auth"},{"name":"mqtt-proxy","stream":true}] diff --git a/t/config-center-yaml/plugin.t b/t/config-center-yaml/plugin.t new file mode 100644 index 000000000000..165c5a9c6efd --- /dev/null +++ b/t/config-center-yaml/plugin.t @@ -0,0 +1,151 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +use t::APISIX 'no_plan'; + +repeat_each(1); +log_level('info'); +no_root_location(); +no_shuffle(); + +add_block_preprocessor(sub { + my ($block) = @_; + + my $yaml_config = $block->yaml_config // <<_EOC_; +apisix: + node_listen: 1984 + config_center: yaml + enable_admin: false +_EOC_ + + $block->set_value("yaml_config", $yaml_config); + + my $routes = <<_EOC_; +routes: + - + uri: /hello + upstream: + nodes: + "127.0.0.1:1980": 1 + type: roundrobin +#END +_EOC_ + + $block->set_value("apisix_yaml", $block->apisix_yaml . $routes); + + if (!$block->no_error_log) { + $block->set_value("no_error_log", "[error]"); + } +}); + +run_tests(); + +__DATA__ + +=== TEST 1: sanity +--- apisix_yaml +plugins: + - name: ip-restriction + - name: jwt-auth + - name: mqtt-proxy + stream: true +--- request +GET /hello +--- response_body +hello world +--- error_log +use config_center: yaml +load(): new plugins: {"ip-restriction":true,"jwt-auth":true} +load_stream(): new plugins: {"mqtt-proxy":true} +--- grep_error_log eval +qr/load\(\): new plugins/ +--- grep_error_log_out +load(): new plugins +load(): new plugins + + + +=== TEST 2: plugins not changed +--- yaml_config +apisix: + node_listen: 1984 + config_center: yaml + enable_admin: false +plugins: + - ip-restriction + - jwt-auth +stream_plugins: + - mqtt-proxy +--- apisix_yaml +plugins: + - name: ip-restriction + - name: jwt-auth + - name: mqtt-proxy + stream: true +--- request +GET /hello +--- response_body +hello world +--- error_log +load(): new plugins: {"ip-restriction":true,"jwt-auth":true} +load_stream(): new plugins: {"mqtt-proxy":true} +load(): plugins not changed +load_stream(): plugins not changed + + + +=== TEST 3: disable plugin and its router +--- apisix_yaml +plugins: + - name: jwt-auth +--- request +GET /apisix/prometheus/metrics +--- error_code: 404 + + + +=== TEST 4: enable plugin and its router +--- apisix_yaml +plugins: + - name: prometheus +--- request +GET /apisix/prometheus/metrics + + + +=== TEST 5: invalid plugin config +--- yaml_config +apisix: + node_listen: 1984 + config_center: yaml + enable_admin: false +plugins: + - ip-restriction + - jwt-auth +stream_plugins: + - mqtt-proxy +--- apisix_yaml +plugins: + - name: xxx + stream: ip-restriction +--- request +GET /hello +--- response_body +hello world +--- error_log +property "stream" validation failed: wrong type: expected boolean, got string +--- no_error_log +load(): plugins not changed diff --git a/t/core/table.t b/t/core/table.t index 69bf70d3d54c..a466f626e08f 100644 --- a/t/core/table.t +++ b/t/core/table.t @@ -103,3 +103,36 @@ value nil --- no_error_log [error] + + + +=== TEST 4: set_eq +--- config + location /t { + content_by_lua_block { + local core = require("apisix.core") + local cases = { + {expect = true, a = {}, b = {}}, + {expect = true, a = {a = 1}, b = {a = 1}}, + {expect = true, a = {a = 1}, b = {a = 2}}, + {expect = false, a = {b = 1}, b = {a = 1}}, + {expect = false, a = {a = 1, b = 1}, b = {a = 1}}, + {expect = false, a = {a = 1}, b = {a = 1, b = 2}}, + } + for _, t in ipairs(cases) do + local actual = core.table.set_eq(t.a, t.b) + local expect = t.expect + if actual ~= expect then + ngx.say("expect ", expect, ", actual ", actual) + return + end + end + ngx.say("ok") + } + } +--- response_body +ok +--- request +GET /t +--- no_error_log +[error] diff --git a/t/lib/test_admin.lua b/t/lib/test_admin.lua index bba93cd0fb89..7f7f0172ec56 100644 --- a/t/lib/test_admin.lua +++ b/t/lib/test_admin.lua @@ -130,6 +130,13 @@ function _M.comp_tab(left_tab, right_tab) end +function _M.set_config_yaml(data) + local f = assert(io.open(os.getenv("TEST_NGINX_HTML_DIR") .. "/../conf/config.yaml", 'w')) + assert(f:write(data)) + f:close() +end + + function _M.test(uri, method, body, pattern, headers) if not headers then headers = {}