From 7dc7a11da7a88ce8f82b2a1b540dccfcef7a486b Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Thu, 1 Jun 2023 15:00:47 +0300 Subject: [PATCH 1/3] chore(deps): import resty.mlcache from lua-resty-mlcache ### Summary Forking `resty.mlcache` into Kong in preparations on adding functionality and changes to it that are out of the scope for the upstream project. Co-authored-by: Thibault Charbonnier (@thibaultcha) Co-authored-by: Thijs Schreijer (@Tieske) Co-authored-by: Julien Desgats(@jdesgats) Co-authored-by: Robert Paprocki (@p0pr0ck5) Co-authored-by: Aapo Talvensaari (@bungle) Co-authored-by: Chrono Law (@chronolaw) Co-authored-by: Michael Martin (@flrgh) Co-authored-by: Hamish Forbes (@hamishforbes) Co-authored-by: Piotr Przybylski (@piotrp) Co-authored-by: Martin Amps (@martinamps) Co-authored-by: Yuchen Wu (@eaufavor) Signed-off-by: Aapo Talvensaari --- kong-3.4.0-0.rockspec | 4 +- kong/cache/init.lua | 2 +- kong/plugins/pre-function/_handler.lua | 2 +- kong/resty/mlcache/init.lua | 1395 ++++++++++++ kong/resty/mlcache/ipc.lua | 257 +++ kong/runloop/certificate.lua | 2 +- t/05-mlcache/00-ipc.t | 717 +++++++ t/05-mlcache/01-new.t | 605 ++++++ t/05-mlcache/02-get.t | 2702 ++++++++++++++++++++++++ t/05-mlcache/03-peek.t | 666 ++++++ t/05-mlcache/04-update.t | 117 + t/05-mlcache/05-set.t | 624 ++++++ t/05-mlcache/06-delete.t | 252 +++ t/05-mlcache/07-l1_serializer.t | 741 +++++++ t/05-mlcache/08-purge.t | 402 ++++ t/05-mlcache/09-isolation.t | 375 ++++ t/05-mlcache/10-ipc_shm.t | 319 +++ t/05-mlcache/11-locks_shm.t | 115 + t/05-mlcache/12-resurrect-stale.t | 1047 +++++++++ t/05-mlcache/13-get_bulk.t | 1735 +++++++++++++++ t/05-mlcache/14-bulk-and-res.t | 227 ++ 21 files changed, 12302 insertions(+), 4 deletions(-) create mode 100644 kong/resty/mlcache/init.lua create mode 100644 kong/resty/mlcache/ipc.lua create mode 100644 t/05-mlcache/00-ipc.t create mode 100644 t/05-mlcache/01-new.t create mode 100644 t/05-mlcache/02-get.t create mode 100644 t/05-mlcache/03-peek.t create mode 100644 t/05-mlcache/04-update.t create mode 100644 t/05-mlcache/05-set.t create mode 100644 t/05-mlcache/06-delete.t create mode 100644 t/05-mlcache/07-l1_serializer.t create mode 100644 t/05-mlcache/08-purge.t create mode 100644 t/05-mlcache/09-isolation.t create mode 100644 t/05-mlcache/10-ipc_shm.t create mode 100644 t/05-mlcache/11-locks_shm.t create mode 100644 t/05-mlcache/12-resurrect-stale.t create mode 100644 t/05-mlcache/13-get_bulk.t create mode 100644 t/05-mlcache/14-bulk-and-res.t diff --git a/kong-3.4.0-0.rockspec b/kong-3.4.0-0.rockspec index 9ebb1ef2e084..11f600674c81 100644 --- a/kong-3.4.0-0.rockspec +++ b/kong-3.4.0-0.rockspec @@ -32,7 +32,6 @@ dependencies = { "luaxxhash >= 1.0", "lua-protobuf == 0.5.0", "lua-resty-healthcheck == 1.6.2", - "lua-resty-mlcache == 2.6.0", "lua-messagepack == 0.5.2", "lua-resty-openssl == 0.8.22", "lua-resty-counter == 0.2.1", @@ -94,6 +93,9 @@ build = { ["kong.resty.dns.utils"] = "kong/resty/dns/utils.lua", ["kong.resty.ctx"] = "kong/resty/ctx.lua", + ["kong.resty.mlcache"] = "kong/resty/mlcache/init.lua", + ["kong.resty.mlcache.ipc"] = "kong/resty/mlcache/ipc.lua", + ["kong.cmd"] = "kong/cmd/init.lua", ["kong.cmd.roar"] = "kong/cmd/roar.lua", ["kong.cmd.stop"] = "kong/cmd/stop.lua", diff --git a/kong/cache/init.lua b/kong/cache/init.lua index 9014eb26c0d8..a18fe772d706 100644 --- a/kong/cache/init.lua +++ b/kong/cache/init.lua @@ -1,4 +1,4 @@ -local resty_mlcache = require "resty.mlcache" +local resty_mlcache = require "kong.resty.mlcache" local marshall = require "kong.cache.marshall" diff --git a/kong/plugins/pre-function/_handler.lua b/kong/plugins/pre-function/_handler.lua index a4c513256458..ebac8fada298 100644 --- a/kong/plugins/pre-function/_handler.lua +++ b/kong/plugins/pre-function/_handler.lua @@ -1,4 +1,4 @@ -local resty_mlcache = require "resty.mlcache" +local resty_mlcache = require "kong.resty.mlcache" local sandbox = require "kong.tools.sandbox" local kong_meta = require "kong.meta" diff --git a/kong/resty/mlcache/init.lua b/kong/resty/mlcache/init.lua new file mode 100644 index 000000000000..aad500780ec5 --- /dev/null +++ b/kong/resty/mlcache/init.lua @@ -0,0 +1,1395 @@ +-- vim: ts=4 sts=4 sw=4 et: + +local new_tab = require "table.new" +local lrucache = require "resty.lrucache" +local resty_lock = require "resty.lock" +local tablepool +do + local pok + pok, tablepool = pcall(require, "tablepool") + if not pok then + -- fallback for OpenResty < 1.15.8.1 + tablepool = { + fetch = function(_, narr, nrec) + return new_tab(narr, nrec) + end, + release = function(_, _, _) + -- nop (obj will be subject to GC) + end, + } + end +end +local codec +do + local pok + pok, codec = pcall(require, "string.buffer") + if not pok then + codec = require "cjson" + end +end + + +local now = ngx.now +local min = math.min +local ceil = math.ceil +local fmt = string.format +local sub = string.sub +local find = string.find +local type = type +local pcall = pcall +local xpcall = xpcall +local traceback = debug.traceback +local error = error +local tostring = tostring +local tonumber = tonumber +local encode = codec.encode +local decode = codec.decode +local thread_spawn = ngx.thread.spawn +local thread_wait = ngx.thread.wait +local setmetatable = setmetatable +local shared = ngx.shared +local ngx_log = ngx.log +local WARN = ngx.WARN +local ERR = ngx.ERR + + +local CACHE_MISS_SENTINEL_LRU = {} +local LOCK_KEY_PREFIX = "lua-resty-mlcache:lock:" +local LRU_INSTANCES = setmetatable({}, { __mode = "v" }) +local SHM_SET_DEFAULT_TRIES = 3 +local BULK_DEFAULT_CONCURRENCY = 3 + + +local TYPES_LOOKUP = { + number = 1, + boolean = 2, + string = 3, + table = 4, +} + + +local SHM_FLAGS = { + stale = 0x00000001, +} + + +local marshallers = { + shm_value = function(str_value, value_type, at, ttl) + return fmt("%d:%f:%f:%s", value_type, at, ttl, str_value) + end, + + shm_nil = function(at, ttl) + return fmt("0:%f:%f:", at, ttl) + end, + + [1] = function(number) -- number + return tostring(number) + end, + + [2] = function(bool) -- boolean + return bool and "true" or "false" + end, + + [3] = function(str) -- string + return str + end, + + [4] = function(t) -- table + local pok, str = pcall(encode, t) + if not pok then + return nil, "could not encode table value: " .. str + end + + return str + end, +} + + +local unmarshallers = { + shm_value = function(marshalled) + -- split our shm marshalled value by the hard-coded ":" tokens + -- "type:at:ttl:value" + -- 1:1501831735.052000:0.500000:123 + local ttl_last = find(marshalled, ":", 21, true) - 1 + + local value_type = sub(marshalled, 1, 1) -- n:... + local at = sub(marshalled, 3, 19) -- n:1501831160 + local ttl = sub(marshalled, 21, ttl_last) + local str_value = sub(marshalled, ttl_last + 2) + + return str_value, tonumber(value_type), tonumber(at), tonumber(ttl) + end, + + [0] = function() -- nil + return nil + end, + + [1] = function(str) -- number + return tonumber(str) + end, + + [2] = function(str) -- boolean + return str == "true" + end, + + [3] = function(str) -- string + return str + end, + + [4] = function(str) -- table + local pok, t = pcall(decode, str) + if not pok then + return nil, "could not decode table value: " .. t + end + + return t + end, +} + + +local function rebuild_lru(self) + if self.lru then + if self.lru.flush_all then + self.lru:flush_all() + return + end + + -- fallback for OpenResty < 1.13.6.2 + -- Invalidate the entire LRU by GC-ing it. + LRU_INSTANCES[self.name] = nil + self.lru = nil + end + + -- Several mlcache instances can have the same name and hence, the same + -- lru instance. We need to GC such LRU instance when all mlcache instances + -- using them are GC'ed. We do this with a weak table. + local lru = LRU_INSTANCES[self.name] + if not lru then + lru = lrucache.new(self.lru_size) + LRU_INSTANCES[self.name] = lru + end + + self.lru = lru +end + + +local _M = { + _VERSION = "2.6.0", + _AUTHOR = "Thibault Charbonnier", + _LICENSE = "MIT", + _URL = "https://github.com/thibaultcha/lua-resty-mlcache", +} +local mt = { __index = _M } + + +function _M.new(name, shm, opts) + if type(name) ~= "string" then + error("name must be a string", 2) + end + + if type(shm) ~= "string" then + error("shm must be a string", 2) + end + + if opts ~= nil then + if type(opts) ~= "table" then + error("opts must be a table", 2) + end + + if opts.lru_size ~= nil and type(opts.lru_size) ~= "number" then + error("opts.lru_size must be a number", 2) + end + + if opts.ttl ~= nil then + if type(opts.ttl) ~= "number" then + error("opts.ttl must be a number", 2) + end + + if opts.ttl < 0 then + error("opts.ttl must be >= 0", 2) + end + end + + if opts.neg_ttl ~= nil then + if type(opts.neg_ttl) ~= "number" then + error("opts.neg_ttl must be a number", 2) + end + + if opts.neg_ttl < 0 then + error("opts.neg_ttl must be >= 0", 2) + end + end + + if opts.resurrect_ttl ~= nil then + if type(opts.resurrect_ttl) ~= "number" then + error("opts.resurrect_ttl must be a number", 2) + end + + if opts.resurrect_ttl < 0 then + error("opts.resurrect_ttl must be >= 0", 2) + end + end + + if opts.resty_lock_opts ~= nil + and type(opts.resty_lock_opts) ~= "table" + then + error("opts.resty_lock_opts must be a table", 2) + end + + if opts.ipc_shm ~= nil and type(opts.ipc_shm) ~= "string" then + error("opts.ipc_shm must be a string", 2) + end + + if opts.ipc ~= nil then + if opts.ipc_shm then + error("cannot specify both of opts.ipc_shm and opts.ipc", 2) + end + + if type(opts.ipc) ~= "table" then + error("opts.ipc must be a table", 2) + end + + if type(opts.ipc.register_listeners) ~= "function" then + error("opts.ipc.register_listeners must be a function", 2) + end + + if type(opts.ipc.broadcast) ~= "function" then + error("opts.ipc.broadcast must be a function", 2) + end + + if opts.ipc.poll ~= nil and type(opts.ipc.poll) ~= "function" then + error("opts.ipc.poll must be a function", 2) + end + end + + if opts.l1_serializer ~= nil + and type(opts.l1_serializer) ~= "function" + then + error("opts.l1_serializer must be a function", 2) + end + + if opts.shm_set_tries ~= nil then + if type(opts.shm_set_tries) ~= "number" then + error("opts.shm_set_tries must be a number", 2) + end + + if opts.shm_set_tries < 1 then + error("opts.shm_set_tries must be >= 1", 2) + end + end + + if opts.shm_miss ~= nil and type(opts.shm_miss) ~= "string" then + error("opts.shm_miss must be a string", 2) + end + + if opts.shm_locks ~= nil and type(opts.shm_locks) ~= "string" then + error("opts.shm_locks must be a string", 2) + end + else + opts = {} + end + + local dict = shared[shm] + if not dict then + return nil, "no such lua_shared_dict: " .. shm + end + + local dict_miss + if opts.shm_miss then + dict_miss = shared[opts.shm_miss] + if not dict_miss then + return nil, "no such lua_shared_dict for opts.shm_miss: " + .. opts.shm_miss + end + end + + if opts.shm_locks then + local dict_locks = shared[opts.shm_locks] + if not dict_locks then + return nil, "no such lua_shared_dict for opts.shm_locks: " + .. opts.shm_locks + end + end + + local self = { + name = name, + dict = dict, + shm = shm, + dict_miss = dict_miss, + shm_miss = opts.shm_miss, + shm_locks = opts.shm_locks or shm, + ttl = opts.ttl or 30, + neg_ttl = opts.neg_ttl or 5, + resurrect_ttl = opts.resurrect_ttl, + lru_size = opts.lru_size or 100, + resty_lock_opts = opts.resty_lock_opts, + l1_serializer = opts.l1_serializer, + shm_set_tries = opts.shm_set_tries or SHM_SET_DEFAULT_TRIES, + debug = opts.debug, + } + + if opts.ipc_shm or opts.ipc then + self.events = { + ["invalidation"] = { + channel = fmt("mlcache:invalidations:%s", name), + handler = function(key) + self.lru:delete(key) + end, + }, + ["purge"] = { + channel = fmt("mlcache:purge:%s", name), + handler = function() + rebuild_lru(self) + end, + } + } + + if opts.ipc_shm then + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + local ipc, err = mlcache_ipc.new(opts.ipc_shm, opts.debug) + if not ipc then + return nil, "failed to initialize mlcache IPC " .. + "(could not instantiate mlcache.ipc): " .. err + end + + for _, ev in pairs(self.events) do + ipc:subscribe(ev.channel, ev.handler) + end + + self.broadcast = function(channel, data) + return ipc:broadcast(channel, data) + end + + self.poll = function(timeout) + return ipc:poll(timeout) + end + + self.ipc = ipc + + else + -- opts.ipc + local ok, err = opts.ipc.register_listeners(self.events) + if not ok and err ~= nil then + return nil, "failed to initialize custom IPC " .. + "(opts.ipc.register_listeners returned an error): " + .. err + end + + self.broadcast = opts.ipc.broadcast + self.poll = opts.ipc.poll + + self.ipc = true + end + end + + if opts.lru then + self.lru = opts.lru + + else + rebuild_lru(self) + end + + return setmetatable(self, mt) +end + + +local function set_lru(self, key, value, ttl, neg_ttl, l1_serializer) + if value == nil then + ttl = neg_ttl + value = CACHE_MISS_SENTINEL_LRU + + elseif l1_serializer then + local ok, err + ok, value, err = pcall(l1_serializer, value) + if not ok then + return nil, "l1_serializer threw an error: " .. value + end + + if err then + return nil, err + end + + if value == nil then + return nil, "l1_serializer returned a nil value" + end + end + + if ttl == 0 then + -- indefinite ttl for lua-resty-lrucache is 'nil' + ttl = nil + end + + self.lru:set(key, value, ttl) + + return value +end + + +local function marshall_for_shm(value, ttl, neg_ttl) + local at = now() + + if value == nil then + return marshallers.shm_nil(at, neg_ttl), nil, true -- is_nil + end + + -- serialize insertion time + Lua types for shm storage + + local value_type = TYPES_LOOKUP[type(value)] + + if not marshallers[value_type] then + error("cannot cache value of type " .. type(value)) + end + + local str_marshalled, err = marshallers[value_type](value) + if not str_marshalled then + return nil, "could not serialize value for lua_shared_dict insertion: " + .. err + end + + return marshallers.shm_value(str_marshalled, value_type, at, ttl) +end + + +local function unmarshall_from_shm(shm_v) + local str_serialized, value_type, at, ttl = unmarshallers.shm_value(shm_v) + + local value, err = unmarshallers[value_type](str_serialized) + if err then + return nil, err + end + + return value, nil, at, ttl +end + + +local function set_shm(self, shm_key, value, ttl, neg_ttl, flags, shm_set_tries, + throw_no_mem) + local shm_value, err, is_nil = marshall_for_shm(value, ttl, neg_ttl) + if not shm_value then + return nil, err + end + + local shm = self.shm + local dict = self.dict + + if is_nil then + ttl = neg_ttl + + if self.dict_miss then + shm = self.shm_miss + dict = self.dict_miss + end + end + + -- we will call `set()` N times to work around potential shm fragmentation. + -- when the shm is full, it will only evict about 30 to 90 items (via + -- LRU), which could lead to a situation where `set()` still does not + -- have enough memory to store the cached value, in which case we + -- try again to try to trigger more LRU evictions. + + local tries = 0 + local ok, err + + while tries < shm_set_tries do + tries = tries + 1 + + ok, err = dict:set(shm_key, shm_value, ttl, flags or 0) + if ok or err and err ~= "no memory" then + break + end + end + + if not ok then + if err ~= "no memory" or throw_no_mem then + return nil, "could not write to lua_shared_dict '" .. shm + .. "': " .. err + end + + ngx_log(WARN, "could not write to lua_shared_dict '", + shm, "' after ", tries, " tries (no memory), ", + "it is either fragmented or cannot allocate more ", + "memory, consider increasing 'opts.shm_set_tries'") + end + + return true +end + + +local function set_shm_set_lru(self, key, shm_key, value, ttl, neg_ttl, flags, + shm_set_tries, l1_serializer, throw_no_mem) + + local ok, err = set_shm(self, shm_key, value, ttl, neg_ttl, flags, + shm_set_tries, throw_no_mem) + if not ok then + return nil, err + end + + return set_lru(self, key, value, ttl, neg_ttl, l1_serializer) +end + + +local function get_shm_set_lru(self, key, shm_key, l1_serializer) + local v, shmerr, went_stale = self.dict:get_stale(shm_key) + if v == nil and shmerr then + -- shmerr can be 'flags' upon successful get_stale() calls, so we + -- also check v == nil + return nil, "could not read from lua_shared_dict: " .. shmerr + end + + if self.shm_miss and v == nil then + -- if we cache misses in another shm, maybe it is there + v, shmerr, went_stale = self.dict_miss:get_stale(shm_key) + if v == nil and shmerr then + -- shmerr can be 'flags' upon successful get_stale() calls, so we + -- also check v == nil + return nil, "could not read from lua_shared_dict: " .. shmerr + end + end + + if v ~= nil then + local value, err, at, ttl = unmarshall_from_shm(v) + if err then + return nil, "could not deserialize value after lua_shared_dict " .. + "retrieval: " .. err + end + + if went_stale then + return value, nil, went_stale + end + + -- 'shmerr' is 'flags' on :get_stale() success + local is_stale = shmerr == SHM_FLAGS.stale + + local remaining_ttl + if ttl == 0 then + -- indefinite ttl, keep '0' as it means 'forever' + remaining_ttl = 0 + + else + -- compute elapsed time to get remaining ttl for LRU caching + remaining_ttl = ttl - (now() - at) + + if remaining_ttl <= 0 then + -- value has less than 1ms of lifetime in the shm, avoid + -- setting it in LRU which would be wasteful and could + -- indefinitely cache the value when ttl == 0 + return value, nil, nil, is_stale + end + end + + value, err = set_lru(self, key, value, remaining_ttl, remaining_ttl, + l1_serializer) + if err then + return nil, err + end + + return value, nil, nil, is_stale + end +end + + +local function check_opts(self, opts) + local ttl + local neg_ttl + local resurrect_ttl + local l1_serializer + local shm_set_tries + + if opts ~= nil then + if type(opts) ~= "table" then + error("opts must be a table", 3) + end + + ttl = opts.ttl + if ttl ~= nil then + if type(ttl) ~= "number" then + error("opts.ttl must be a number", 3) + end + + if ttl < 0 then + error("opts.ttl must be >= 0", 3) + end + end + + neg_ttl = opts.neg_ttl + if neg_ttl ~= nil then + if type(neg_ttl) ~= "number" then + error("opts.neg_ttl must be a number", 3) + end + + if neg_ttl < 0 then + error("opts.neg_ttl must be >= 0", 3) + end + end + + resurrect_ttl = opts.resurrect_ttl + if resurrect_ttl ~= nil then + if type(resurrect_ttl) ~= "number" then + error("opts.resurrect_ttl must be a number", 3) + end + + if resurrect_ttl < 0 then + error("opts.resurrect_ttl must be >= 0", 3) + end + end + + l1_serializer = opts.l1_serializer + if l1_serializer ~= nil and type(l1_serializer) ~= "function" then + error("opts.l1_serializer must be a function", 3) + end + + shm_set_tries = opts.shm_set_tries + if shm_set_tries ~= nil then + if type(shm_set_tries) ~= "number" then + error("opts.shm_set_tries must be a number", 3) + end + + if shm_set_tries < 1 then + error("opts.shm_set_tries must be >= 1", 3) + end + end + end + + if not ttl then + ttl = self.ttl + end + + if not neg_ttl then + neg_ttl = self.neg_ttl + end + + if not resurrect_ttl then + resurrect_ttl = self.resurrect_ttl + end + + if not l1_serializer then + l1_serializer = self.l1_serializer + end + + if not shm_set_tries then + shm_set_tries = self.shm_set_tries + end + + return ttl, neg_ttl, resurrect_ttl, l1_serializer, shm_set_tries +end + + +local function unlock_and_ret(lock, res, err, hit_lvl) + local ok, lerr = lock:unlock() + if not ok and lerr ~= "unlocked" then + return nil, "could not unlock callback: " .. lerr + end + + return res, err, hit_lvl +end + + +local function run_callback(self, key, shm_key, data, ttl, neg_ttl, + went_stale, l1_serializer, resurrect_ttl, shm_set_tries, cb, ...) + local lock, err = resty_lock:new(self.shm_locks, self.resty_lock_opts) + if not lock then + return nil, "could not create lock: " .. err + end + + local elapsed, lerr = lock:lock(LOCK_KEY_PREFIX .. shm_key) + if not elapsed and lerr ~= "timeout" then + return nil, "could not acquire callback lock: " .. lerr + end + + do + -- check for another worker's success at running the callback, but + -- do not return data if it is still the same stale value (this is + -- possible if the value was still not evicted between the first + -- get() and this one) + + local data2, err, went_stale2, stale2 = get_shm_set_lru(self, key, + shm_key, + l1_serializer) + if err then + return unlock_and_ret(lock, nil, err) + end + + if data2 ~= nil and not went_stale2 then + -- we got a fresh item from shm: other worker succeeded in running + -- the callback + if data2 == CACHE_MISS_SENTINEL_LRU then + data2 = nil + end + + return unlock_and_ret(lock, data2, nil, stale2 and 4 or 2) + end + end + + -- we are either the 1st worker to hold the lock, or + -- a subsequent worker whose lock has timed out before the 1st one + -- finished to run the callback + + if lerr == "timeout" then + local errmsg = "could not acquire callback lock: timeout" + + -- no stale data nor desire to resurrect it + if not went_stale or not resurrect_ttl then + return nil, errmsg + end + + -- do not resurrect the value here (another worker is running the + -- callback and will either get the new value, or resurrect it for + -- us if the callback fails) + + ngx_log(WARN, errmsg) + + -- went_stale is true, hence the value cannot be set in the LRU + -- cache, and cannot be CACHE_MISS_SENTINEL_LRU + + return data, nil, 4 + end + + -- still not in shm, we are the 1st worker to hold the lock, and thus + -- responsible for running the callback + + local pok, perr, err, new_ttl = xpcall(cb, traceback, ...) + if not pok then + return unlock_and_ret(lock, nil, "callback threw an error: " .. + tostring(perr)) + end + + if err then + -- callback returned nil + err + + -- be resilient in case callbacks return wrong error type + err = tostring(err) + + -- no stale data nor desire to resurrect it + if not went_stale or not resurrect_ttl then + return unlock_and_ret(lock, perr, err) + end + + -- we got 'data' from the shm, even though it is stale + -- 1. log as warn that the callback returned an error + -- 2. resurrect: insert it back into shm if 'resurrect_ttl' + -- 3. signify the staleness with a high hit_lvl of '4' + + ngx_log(WARN, "callback returned an error (", err, ") but stale ", + "value found in shm will be resurrected for ", + resurrect_ttl, "s (resurrect_ttl)") + + local res_data, res_err = set_shm_set_lru(self, key, shm_key, + data, resurrect_ttl, + resurrect_ttl, + SHM_FLAGS.stale, + shm_set_tries, l1_serializer) + if res_err then + ngx_log(WARN, "could not resurrect stale data (", res_err, ")") + end + + if res_data == CACHE_MISS_SENTINEL_LRU then + res_data = nil + end + + return unlock_and_ret(lock, res_data, nil, 4) + end + + -- successful callback run returned 'data, nil, new_ttl?' + + data = perr + + -- override ttl / neg_ttl + + if type(new_ttl) == "number" then + if new_ttl < 0 then + -- bypass cache + return unlock_and_ret(lock, data, nil, 3) + end + + if data == nil then + neg_ttl = new_ttl + + else + ttl = new_ttl + end + end + + data, err = set_shm_set_lru(self, key, shm_key, data, ttl, neg_ttl, nil, + shm_set_tries, l1_serializer) + if err then + return unlock_and_ret(lock, nil, err) + end + + if data == CACHE_MISS_SENTINEL_LRU then + data = nil + end + + -- unlock and return + + return unlock_and_ret(lock, data, nil, 3) +end + + +function _M:get(key, opts, cb, ...) + if type(key) ~= "string" then + error("key must be a string", 2) + end + + if cb ~= nil and type(cb) ~= "function" then + error("callback must be nil or a function", 2) + end + + -- worker LRU cache retrieval + + local data = self.lru:get(key) + if data == CACHE_MISS_SENTINEL_LRU then + return nil, nil, 1 + end + + if data ~= nil then + return data, nil, 1 + end + + -- not in worker's LRU cache, need shm lookup + + -- restrict this key to the current namespace, so we isolate this + -- mlcache instance from potential other instances using the same + -- shm + local namespaced_key = self.name .. key + + -- opts validation + + local ttl, neg_ttl, resurrect_ttl, l1_serializer, shm_set_tries = + check_opts(self, opts) + + local err, went_stale, is_stale + data, err, went_stale, is_stale = get_shm_set_lru(self, key, namespaced_key, + l1_serializer) + if err then + return nil, err + end + + if data ~= nil and not went_stale then + if data == CACHE_MISS_SENTINEL_LRU then + data = nil + end + + return data, nil, is_stale and 4 or 2 + end + + -- not in shm either + + if cb == nil then + -- no L3 callback, early exit + return nil, nil, -1 + end + + -- L3 callback, single worker to run it + + return run_callback(self, key, namespaced_key, data, ttl, neg_ttl, + went_stale, l1_serializer, resurrect_ttl, + shm_set_tries, cb, ...) +end + + +do +local function run_thread(self, ops, from, to) + for i = from, to do + local ctx = ops[i] + + ctx.data, ctx.err, ctx.hit_lvl = run_callback(self, ctx.key, + ctx.shm_key, ctx.data, + ctx.ttl, ctx.neg_ttl, + ctx.went_stale, + ctx.l1_serializer, + ctx.resurrect_ttl, + ctx.shm_set_tries, + ctx.cb, ctx.arg) + end +end + + +local bulk_mt = {} +bulk_mt.__index = bulk_mt + + +function _M.new_bulk(n_ops) + local bulk = new_tab((n_ops or 2) * 4, 1) -- 4 slots per op + bulk.n = 0 + + return setmetatable(bulk, bulk_mt) +end + + +function bulk_mt:add(key, opts, cb, arg) + local i = (self.n * 4) + 1 + self[i] = key + self[i + 1] = opts + self[i + 2] = cb + self[i + 3] = arg + self.n = self.n + 1 +end + + +local function bulk_res_iter(res, i) + local idx = i * 3 + 1 + if idx > res.n then + return + end + + i = i + 1 + + local data = res[idx] + local err = res[idx + 1] + local hit_lvl = res[idx + 2] + + return i, data, err, hit_lvl +end + + +function _M.each_bulk_res(res) + if not res.n then + error("res must have res.n field; is this a get_bulk() result?", 2) + end + + return bulk_res_iter, res, 0 +end + + +function _M:get_bulk(bulk, opts) + if type(bulk) ~= "table" then + error("bulk must be a table", 2) + end + + if not bulk.n then + error("bulk must have n field", 2) + end + + if opts then + if type(opts) ~= "table" then + error("opts must be a table", 2) + end + + if opts.concurrency then + if type(opts.concurrency) ~= "number" then + error("opts.concurrency must be a number", 2) + end + + if opts.concurrency <= 0 then + error("opts.concurrency must be > 0", 2) + end + end + end + + local n_bulk = bulk.n * 4 + local res = new_tab(n_bulk - n_bulk / 4, 1) + local res_idx = 1 + + -- only used if running L3 callbacks + local n_cbs = 0 + local cb_ctxs + + -- bulk + -- { "key", opts, cb, arg } + -- + -- res + -- { data, "err", hit_lvl } + + for i = 1, n_bulk, 4 do + local b_key = bulk[i] + local b_opts = bulk[i + 1] + local b_cb = bulk[i + 2] + + if type(b_key) ~= "string" then + error("key at index " .. i .. " must be a string for operation " .. + ceil(i / 4) .. " (got " .. type(b_key) .. ")", 2) + end + + if type(b_cb) ~= "function" then + error("callback at index " .. i + 2 .. " must be a function " .. + "for operation " .. ceil(i / 4) .. " (got " .. type(b_cb) .. + ")", 2) + end + + -- worker LRU cache retrieval + + local data = self.lru:get(b_key) + if data ~= nil then + if data == CACHE_MISS_SENTINEL_LRU then + data = nil + end + + res[res_idx] = data + --res[res_idx + 1] = nil + res[res_idx + 2] = 1 + + else + local pok, ttl, neg_ttl, resurrect_ttl, l1_serializer, shm_set_tries + = pcall(check_opts, self, b_opts) + if not pok then + -- strip the stacktrace + local err = ttl:match("mlcache%.lua:%d+:%s(.*)") + error("options at index " .. i + 1 .. " for operation " .. + ceil(i / 4) .. " are invalid: " .. err, 2) + end + + -- not in worker's LRU cache, need shm lookup + -- we will prepare a task for each cache miss + local namespaced_key = self.name .. b_key + + local err, went_stale, is_stale + data, err, went_stale, is_stale = get_shm_set_lru(self, b_key, + namespaced_key, + l1_serializer) + if err then + --res[res_idx] = nil + res[res_idx + 1] = err + --res[res_idx + 2] = nil + + elseif data ~= nil and not went_stale then + if data == CACHE_MISS_SENTINEL_LRU then + data = nil + end + + res[res_idx] = data + --res[res_idx + 1] = nil + res[res_idx + 2] = is_stale and 4 or 2 + + else + -- not in shm either, we have to prepare a task to run the + -- L3 callback + + n_cbs = n_cbs + 1 + + if n_cbs == 1 then + cb_ctxs = tablepool.fetch("bulk_cb_ctxs", 1, 0) + end + + local ctx = tablepool.fetch("bulk_cb_ctx", 0, 15) + ctx.res_idx = res_idx + ctx.cb = b_cb + ctx.arg = bulk[i + 3] -- arg + ctx.key = b_key + ctx.shm_key = namespaced_key + ctx.data = data + ctx.ttl = ttl + ctx.neg_ttl = neg_ttl + ctx.went_stale = went_stale + ctx.l1_serializer = l1_serializer + ctx.resurrect_ttl = resurrect_ttl + ctx.shm_set_tries = shm_set_tries + ctx.data = data + ctx.err = nil + ctx.hit_lvl = nil + + cb_ctxs[n_cbs] = ctx + end + end + + res_idx = res_idx + 3 + end + + if n_cbs == 0 then + -- no callback to run, all items were in L1/L2 + res.n = res_idx - 1 + return res + end + + -- some L3 callbacks have to run + -- schedule threads as per our concurrency settings + -- we will use this thread as well + + local concurrency + if opts then + concurrency = opts.concurrency + end + + if not concurrency then + concurrency = BULK_DEFAULT_CONCURRENCY + end + + local threads + local threads_idx = 0 + + do + -- spawn concurrent threads + local thread_size + local n_threads = min(n_cbs, concurrency) - 1 + + if n_threads > 0 then + threads = tablepool.fetch("bulk_threads", n_threads, 0) + thread_size = ceil(n_cbs / concurrency) + end + + if self.debug then + ngx.log(ngx.DEBUG, "spawning ", n_threads, " threads to run ", + n_cbs, " callbacks") + end + + local from = 1 + local rest = n_cbs + + for i = 1, n_threads do + local to + if rest >= thread_size then + rest = rest - thread_size + to = from + thread_size - 1 + else + rest = 0 + to = from + end + + if self.debug then + ngx.log(ngx.DEBUG, "thread ", i, " running callbacks ", from, + " to ", to) + end + + threads_idx = threads_idx + 1 + threads[i] = thread_spawn(run_thread, self, cb_ctxs, from, to) + + from = from + thread_size + + if rest == 0 then + break + end + end + + if rest > 0 then + -- use this thread as one of our concurrent threads + local to = from + rest - 1 + + if self.debug then + ngx.log(ngx.DEBUG, "main thread running callbacks ", from, + " to ", to) + end + + run_thread(self, cb_ctxs, from, to) + end + end + + -- wait for other threads + + for i = 1, threads_idx do + local ok, err = thread_wait(threads[i]) + if not ok then + -- when thread_wait() fails, we don't get res_idx, and thus + -- cannot populate the appropriate res indexes with the + -- error + ngx_log(ERR, "failed to wait for thread number ", i, ": ", err) + end + end + + for i = 1, n_cbs do + local ctx = cb_ctxs[i] + local ctx_res_idx = ctx.res_idx + + res[ctx_res_idx] = ctx.data + res[ctx_res_idx + 1] = ctx.err + res[ctx_res_idx + 2] = ctx.hit_lvl + + tablepool.release("bulk_cb_ctx", ctx, true) -- no clear tab + end + + tablepool.release("bulk_cb_ctxs", cb_ctxs) + + if threads then + tablepool.release("bulk_threads", threads) + end + + res.n = res_idx - 1 + + return res +end + + +end -- get_bulk() + + +function _M:peek(key, stale) + if type(key) ~= "string" then + error("key must be a string", 2) + end + + -- restrict this key to the current namespace, so we isolate this + -- mlcache instance from potential other instances using the same + -- shm + local namespaced_key = self.name .. key + + local v, err, went_stale = self.dict:get_stale(namespaced_key) + if v == nil and err then + -- err can be 'flags' upon successful get_stale() calls, so we + -- also check v == nil + return nil, "could not read from lua_shared_dict: " .. err + end + + -- if we specified shm_miss, it might be a negative hit cached + -- there + if self.dict_miss and v == nil then + v, err, went_stale = self.dict_miss:get_stale(namespaced_key) + if v == nil and err then + -- err can be 'flags' upon successful get_stale() calls, so we + -- also check v == nil + return nil, "could not read from lua_shared_dict: " .. err + end + end + + if went_stale and not stale then + return nil + end + + if v ~= nil then + local value, err, at, ttl = unmarshall_from_shm(v) + if err then + return nil, "could not deserialize value after lua_shared_dict " .. + "retrieval: " .. err + end + + local remaining_ttl = ttl - (now() - at) + + return remaining_ttl, nil, value, went_stale + end +end + + +function _M:set(key, opts, value) + if not self.broadcast then + error("no ipc to propagate update, specify opts.ipc_shm or opts.ipc", 2) + end + + if type(key) ~= "string" then + error("key must be a string", 2) + end + + do + -- restrict this key to the current namespace, so we isolate this + -- mlcache instance from potential other instances using the same + -- shm + local ttl, neg_ttl, _, l1_serializer, shm_set_tries = check_opts(self, + opts) + local namespaced_key = self.name .. key + + if self.dict_miss then + -- since we specified a separate shm for negative caches, we + -- must make sure that we clear any value that may have been + -- set in the other shm + local dict = value == nil and self.dict or self.dict_miss + + -- TODO: there is a potential race-condition here between this + -- :delete() and the subsequent :set() in set_shm() + local ok, err = dict:delete(namespaced_key) + if not ok then + return nil, "could not delete from shm: " .. err + end + end + + local _, err = set_shm_set_lru(self, key, namespaced_key, value, ttl, + neg_ttl, nil, shm_set_tries, + l1_serializer, true) + if err then + return nil, err + end + end + + local _, err = self.broadcast(self.events.invalidation.channel, key) + if err then + return nil, "could not broadcast update: " .. err + end + + return true +end + + +function _M:delete(key) + if not self.broadcast then + error("no ipc to propagate deletion, specify opts.ipc_shm or opts.ipc", + 2) + end + + if type(key) ~= "string" then + error("key must be a string", 2) + end + + -- delete from shm first + do + -- restrict this key to the current namespace, so we isolate this + -- mlcache instance from potential other instances using the same + -- shm + local namespaced_key = self.name .. key + + local ok, err = self.dict:delete(namespaced_key) + if not ok then + return nil, "could not delete from shm: " .. err + end + + -- instance uses shm_miss for negative caches, since we don't know + -- where the cached value is (is it nil or not?), we must remove it + -- from both + if self.dict_miss then + ok, err = self.dict_miss:delete(namespaced_key) + if not ok then + return nil, "could not delete from shm: " .. err + end + end + end + + -- delete from LRU and propagate + self.lru:delete(key) + + local _, err = self.broadcast(self.events.invalidation.channel, key) + if err then + return nil, "could not broadcast deletion: " .. err + end + + return true +end + + +function _M:purge(flush_expired) + if not self.broadcast then + error("no ipc to propagate purge, specify opts.ipc_shm or opts.ipc", 2) + end + + if not self.lru.flush_all and LRU_INSTANCES[self.name] ~= self.lru then + error("cannot purge when using custom LRU cache with " .. + "OpenResty < 1.13.6.2", 2) + end + + -- clear shm first + self.dict:flush_all() + + -- clear negative caches shm if specified + if self.dict_miss then + self.dict_miss:flush_all() + end + + if flush_expired then + self.dict:flush_expired() + + if self.dict_miss then + self.dict_miss:flush_expired() + end + end + + -- clear LRU content and propagate + rebuild_lru(self) + + local _, err = self.broadcast(self.events.purge.channel, "") + if err then + return nil, "could not broadcast purge: " .. err + end + + return true +end + + +function _M:update(timeout) + if not self.poll then + error("no polling configured, specify opts.ipc_shm or opts.ipc.poll", 2) + end + + local _, err = self.poll(timeout) + if err then + return nil, "could not poll ipc events: " .. err + end + + return true +end + + +return _M diff --git a/kong/resty/mlcache/ipc.lua b/kong/resty/mlcache/ipc.lua new file mode 100644 index 000000000000..8a7916c42846 --- /dev/null +++ b/kong/resty/mlcache/ipc.lua @@ -0,0 +1,257 @@ +-- vim: ts=4 sts=4 sw=4 et: + +local ERR = ngx.ERR +local WARN = ngx.WARN +local INFO = ngx.INFO +local sleep = ngx.sleep +local shared = ngx.shared +local worker_pid = ngx.worker.pid +local ngx_log = ngx.log +local fmt = string.format +local sub = string.sub +local find = string.find +local min = math.min +local type = type +local pcall = pcall +local error = error +local insert = table.insert +local tonumber = tonumber +local setmetatable = setmetatable + + +local INDEX_KEY = "lua-resty-ipc:index" +local FORCIBLE_KEY = "lua-resty-ipc:forcible" +local POLL_SLEEP_RATIO = 2 + + +local function marshall(worker_pid, channel, data) + return fmt("%d:%d:%s%s", worker_pid, #data, channel, data) +end + + +local function unmarshall(str) + local sep_1 = find(str, ":", nil , true) + local sep_2 = find(str, ":", sep_1 + 1, true) + + local pid = tonumber(sub(str, 1 , sep_1 - 1)) + local data_len = tonumber(sub(str, sep_1 + 1, sep_2 - 1)) + + local channel_last_pos = #str - data_len + + local channel = sub(str, sep_2 + 1, channel_last_pos) + local data = sub(str, channel_last_pos + 1) + + return pid, channel, data +end + + +local function log(lvl, ...) + return ngx_log(lvl, "[ipc] ", ...) +end + + +local _M = {} +local mt = { __index = _M } + + +function _M.new(shm, debug) + local dict = shared[shm] + if not dict then + return nil, "no such lua_shared_dict: " .. shm + end + + local self = { + dict = dict, + pid = debug and 0 or worker_pid(), + idx = 0, + callbacks = {}, + } + + return setmetatable(self, mt) +end + + +function _M:subscribe(channel, cb) + if type(channel) ~= "string" then + error("channel must be a string", 2) + end + + if type(cb) ~= "function" then + error("callback must be a function", 2) + end + + if not self.callbacks[channel] then + self.callbacks[channel] = { cb } + + else + insert(self.callbacks[channel], cb) + end +end + + +function _M:broadcast(channel, data) + if type(channel) ~= "string" then + error("channel must be a string", 2) + end + + if type(data) ~= "string" then + error("data must be a string", 2) + end + + local marshalled_event = marshall(worker_pid(), channel, data) + + local idx, err = self.dict:incr(INDEX_KEY, 1, 0) + if not idx then + return nil, "failed to increment index: " .. err + end + + local ok, err, forcible = self.dict:set(idx, marshalled_event) + if not ok then + return nil, "failed to insert event in shm: " .. err + end + + if forcible then + -- take note that eviction has started + -- we repeat this flagging to avoid this key from ever being + -- evicted itself + local ok, err = self.dict:set(FORCIBLE_KEY, true) + if not ok then + return nil, "failed to set forcible flag in shm: " .. err + end + end + + return true +end + + +-- Note: if this module were to be used by users (that is, users can implement +-- their own pub/sub events and thus, callbacks), this method would then need +-- to consider the time spent in callbacks to prevent long running callbacks +-- from penalizing the worker. +-- Since this module is currently only used by mlcache, whose callback is an +-- shm operation, we only worry about the time spent waiting for events +-- between the 'incr()' and 'set()' race condition. +function _M:poll(timeout) + if timeout ~= nil and type(timeout) ~= "number" then + error("timeout must be a number", 2) + end + + local shm_idx, err = self.dict:get(INDEX_KEY) + if err then + return nil, "failed to get index: " .. err + end + + if shm_idx == nil then + -- no events to poll yet + return true + end + + if type(shm_idx) ~= "number" then + return nil, "index is not a number, shm tampered with" + end + + if not timeout then + timeout = 0.3 + end + + if self.idx == 0 then + local forcible, err = self.dict:get(FORCIBLE_KEY) + if err then + return nil, "failed to get forcible flag from shm: " .. err + end + + if forcible then + -- shm lru eviction occurred, we are likely a new worker + -- skip indexes that may have been evicted and resume current + -- polling idx + self.idx = shm_idx - 1 + end + + else + -- guard: self.idx <= shm_idx + self.idx = min(self.idx, shm_idx) + end + + local elapsed = 0 + + for _ = self.idx, shm_idx - 1 do + -- fetch event from shm with a retry policy in case + -- we run our :get() in between another worker's + -- :incr() and :set() + + local v + local idx = self.idx + 1 + + do + local perr + local pok = true + local sleep_step = 0.001 + + while elapsed < timeout do + v, err = self.dict:get(idx) + if v ~= nil or err then + break + end + + if pok then + log(INFO, "no event data at index '", idx, "', ", + "retrying in: ", sleep_step, "s") + + -- sleep is not available in all ngx_lua contexts + -- if we fail once, never retry to sleep + pok, perr = pcall(sleep, sleep_step) + if not pok then + log(WARN, "could not sleep before retry: ", perr, + " (note: it is safer to call this function ", + "in contexts that support the ngx.sleep() ", + "API)") + end + end + + elapsed = elapsed + sleep_step + sleep_step = min(sleep_step * POLL_SLEEP_RATIO, + timeout - elapsed) + end + end + + -- fetch next event on next iteration + -- even if we timeout, we might miss 1 event (we return in timeout and + -- we don't retry that event), but it's better than being stuck forever + -- on an event that might have been evicted from the shm. + self.idx = idx + + if elapsed >= timeout then + return nil, "timeout" + end + + if err then + log(ERR, "could not get event at index '", self.idx, "': ", err) + + elseif type(v) ~= "string" then + log(ERR, "event at index '", self.idx, "' is not a string, ", + "shm tampered with") + + else + local pid, channel, data = unmarshall(v) + + if self.pid ~= pid then + -- coming from another worker + local cbs = self.callbacks[channel] + if cbs then + for j = 1, #cbs do + local pok, perr = pcall(cbs[j], data) + if not pok then + log(ERR, "callback for channel '", channel, + "' threw a Lua error: ", perr) + end + end + end + end + end + end + + return true +end + + +return _M diff --git a/kong/runloop/certificate.lua b/kong/runloop/certificate.lua index 1d999f381d9b..53da6b3d8d35 100644 --- a/kong/runloop/certificate.lua +++ b/kong/runloop/certificate.lua @@ -1,6 +1,6 @@ local ngx_ssl = require "ngx.ssl" local pl_utils = require "pl.utils" -local mlcache = require "resty.mlcache" +local mlcache = require "kong.resty.mlcache" local new_tab = require "table.new" local openssl_x509_store = require "resty.openssl.x509.store" local openssl_x509 = require "resty.openssl.x509" diff --git a/t/05-mlcache/00-ipc.t b/t/05-mlcache/00-ipc.t new file mode 100644 index 000000000000..a808ead53004 --- /dev/null +++ b/t/05-mlcache/00-ipc.t @@ -0,0 +1,717 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +workers(1); + +plan tests => repeat_each() * (blocks() * 5); + +our $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict ipc 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: new() ensures shm exists +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + local ipc, err = mlcache_ipc.new("foo") + ngx.say(err) + } + } +--- request +GET /t +--- response_body +no such lua_shared_dict: foo +--- no_error_log +[error] + + + +=== TEST 2: broadcast() sends an event through shm +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "received event from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "hello world")) + + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] +--- error_log +received event from my_channel: hello world + + + +=== TEST 3: broadcast() runs event callback in protected mode +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + error("my callback had an error") + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "hello world")) + + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[error\] .*? \[ipc\] callback for channel 'my_channel' threw a Lua error: init_worker_by_lua:\d: my callback had an error/ +--- no_error_log +lua entry thread aborted: runtime error + + + +=== TEST 4: poll() catches invalid timeout arg +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + } +} +--- config + location = /t { + content_by_lua_block { + local ok, err = pcall(ipc.poll, ipc, false) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +timeout must be a number +--- no_error_log +[error] + + + +=== TEST 5: poll() catches up with all events +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "received event from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "msg 1")) + assert(ipc:broadcast("my_channel", "msg 2")) + assert(ipc:broadcast("my_channel", "msg 3")) + + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] +--- error_log +received event from my_channel: msg 1 +received event from my_channel: msg 2 +received event from my_channel: msg 3 + + + +=== TEST 6: poll() resumes to current idx if events were previously evicted +This ensures new workers spawned during a master process' lifecycle do not +attempt to replay all events from index 0. +https://github.com/thibaultcha/lua-resty-mlcache/issues/87 +https://github.com/thibaultcha/lua-resty-mlcache/issues/93 +--- http_config eval +qq{ + lua_package_path "$::pwd/lib/?.lua;;"; + lua_shared_dict ipc 32k; + + init_by_lua_block { + require "resty.core" + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "my_channel event: ", data) + end) + + for i = 1, 32 do + -- fill shm, simulating busy workers + -- this must trigger eviction for this test to succeed + assert(ipc:broadcast("my_channel", string.rep(".", 2^10))) + end + } +} +--- config + location = /t { + content_by_lua_block { + ngx.say("ipc.idx: ", ipc.idx) + + assert(ipc:broadcast("my_channel", "first broadcast")) + assert(ipc:broadcast("my_channel", "second broadcast")) + + -- first poll without new() to simulate new worker + assert(ipc:poll()) + + -- ipc.idx set to shm_idx-1 ("second broadcast") + ngx.say("ipc.idx: ", ipc.idx) + } + } +--- request +GET /t +--- response_body +ipc.idx: 0 +ipc.idx: 34 +--- error_log +my_channel event: second broadcast +--- no_error_log +my_channel event: first broadcast +[error] + + + +=== TEST 7: poll() does not execute events from self (same pid) +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc")) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "received event from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "hello world")) + + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] +received event from my_channel: hello world + + + +=== TEST 8: poll() runs all registered callbacks for a channel +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback 1 from my_channel: ", data) + end) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback 2 from my_channel: ", data) + end) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback 3 from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "hello world")) + + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] +--- error_log +callback 1 from my_channel: hello world +callback 2 from my_channel: hello world +callback 3 from my_channel: hello world + + + +=== TEST 9: poll() exits when no event to poll +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] +callback from my_channel: hello world + + + +=== TEST 10: poll() runs all callbacks from all channels +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback 1 from my_channel: ", data) + end) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback 2 from my_channel: ", data) + end) + + ipc:subscribe("other_channel", function(data) + ngx.log(ngx.NOTICE, "callback 1 from other_channel: ", data) + end) + + ipc:subscribe("other_channel", function(data) + ngx.log(ngx.NOTICE, "callback 2 from other_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "hello world")) + assert(ipc:broadcast("other_channel", "hello ipc")) + assert(ipc:broadcast("other_channel", "hello ipc 2")) + + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] +--- error_log +callback 1 from my_channel: hello world +callback 2 from my_channel: hello world +callback 1 from other_channel: hello ipc +callback 2 from other_channel: hello ipc +callback 1 from other_channel: hello ipc 2 +callback 2 from other_channel: hello ipc 2 + + + +=== TEST 11: poll() catches tampered shm (by third-party users) +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "msg 1")) + + assert(ngx.shared.ipc:set("lua-resty-ipc:index", false)) + + local ok, err = ipc:poll() + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +index is not a number, shm tampered with +--- no_error_log +[error] + + + +=== TEST 12: poll() retries getting an event until timeout +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "msg 1")) + + ngx.shared.ipc:delete(1) + ngx.shared.ipc:flush_expired() + + local ok, err = ipc:poll() + if not ok then + ngx.log(ngx.ERR, "could not poll: ", err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +[ + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.001s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.002s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.004s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.008s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.016s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.032s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.064s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.128s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.045s/, + qr/\[error\] .*? could not poll: timeout/, +] + + + +=== TEST 13: poll() reaches custom timeout +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "msg 1")) + + ngx.shared.ipc:delete(1) + ngx.shared.ipc:flush_expired() + + local ok, err = ipc:poll(0.01) + if not ok then + ngx.log(ngx.ERR, "could not poll: ", err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +[ + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.001s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.002s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.004s/, + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.003s/, + qr/\[error\] .*? could not poll: timeout/, +] + + + +=== TEST 14: poll() logs errors and continue if event has been tampered with +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("my_channel", "msg 1")) + assert(ipc:broadcast("my_channel", "msg 2")) + + assert(ngx.shared.ipc:set(1, false)) + + assert(ipc:poll()) + } + } +--- request +GET /t +--- response_body + +--- error_log eval +[ + qr/\[error\] .*? \[ipc\] event at index '1' is not a string, shm tampered with/, + qr/\[notice\] .*? callback from my_channel: msg 2/, +] + + + +=== TEST 15: poll() is safe to be called in contexts that don't support ngx.sleep() +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback from my_channel: ", data) + end) + } +} +--- config + location = /t { + return 200; + + log_by_lua_block { + assert(ipc:broadcast("my_channel", "msg 1")) + + ngx.shared.ipc:delete(1) + ngx.shared.ipc:flush_expired() + + local ok, err = ipc:poll() + if not ok then + ngx.log(ngx.ERR, "could not poll: ", err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +[ + qr/\[info\] .*? \[ipc\] no event data at index '1', retrying in: 0\.001s/, + qr/\[warn\] .*? \[ipc\] could not sleep before retry: API disabled in the context of log_by_lua/, + qr/\[error\] .*? could not poll: timeout/, +] + + + +=== TEST 16: poll() guards self.idx from growing beyond the current shm idx +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + assert(ipc:broadcast("other_channel", "")) + assert(ipc:poll()) + assert(ipc:broadcast("my_channel", "fist broadcast")) + assert(ipc:broadcast("other_channel", "")) + assert(ipc:broadcast("my_channel", "second broadcast")) + + -- shm idx is 5, let's mess with the instance's idx + ipc.idx = 10 + assert(ipc:poll()) + + -- we may have skipped the above events, but we are able to resume polling + assert(ipc:broadcast("other_channel", "")) + assert(ipc:broadcast("my_channel", "third broadcast")) + assert(ipc:poll()) + } + } +--- request +GET /t +--- ignore_response_body +--- error_log +callback from my_channel: third broadcast +--- no_error_log +callback from my_channel: first broadcast +callback from my_channel: second broadcast +[error] + + + +=== TEST 17: poll() JITs +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + for i = 1, 10e3 do + assert(ipc:poll()) + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):2 loop\]/ + + + +=== TEST 18: broadcast() JITs +--- http_config eval +qq{ + $::HttpConfig + + init_worker_by_lua_block { + local mlcache_ipc = require "kong.resty.mlcache.ipc" + + ipc = assert(mlcache_ipc.new("ipc", true)) + + ipc:subscribe("my_channel", function(data) + ngx.log(ngx.NOTICE, "callback from my_channel: ", data) + end) + } +} +--- config + location = /t { + content_by_lua_block { + for i = 1, 10e3 do + assert(ipc:broadcast("my_channel", "hello world")) + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):2 loop\]/ diff --git a/t/05-mlcache/01-new.t b/t/05-mlcache/01-new.t new file mode 100644 index 000000000000..afd4e8c9ea4d --- /dev/null +++ b/t/05-mlcache/01-new.t @@ -0,0 +1,605 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3) + 4; + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; +}; + +run_tests(); + +__DATA__ + +=== TEST 1: module has version number +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + ngx.say(mlcache._VERSION) + } + } +--- request +GET /t +--- response_body_like +\d+\.\d+\.\d+ +--- no_error_log +[error] + + + +=== TEST 2: new() validates name +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new) + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +name must be a string + + + +=== TEST 3: new() validates shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name") + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +shm must be a string + + + +=== TEST 4: new() validates opts +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", "foo") + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +opts must be a table + + + +=== TEST 5: new() ensures shm exists +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("name", "foo") + if not cache then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +no such lua_shared_dict: foo + + + +=== TEST 6: new() supports ipc_shm option and validates it +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { ipc_shm = 1 }) + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +ipc_shm must be a string + + + +=== TEST 7: new() supports opts.ipc_shm and ensures it exists +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("name", "cache_shm", { ipc_shm = "ipc" }) + if not cache then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- ignore_response_body +--- error_log eval +qr/\[error\] .*? no such lua_shared_dict: ipc/ +--- no_error_log +[crit] + + + +=== TEST 8: new() supports ipc options and validates it +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { ipc = false }) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.ipc must be a table +--- no_error_log +[error] + + + +=== TEST 9: new() prevents both opts.ipc_shm and opts.ipc to be given +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + ipc_shm = "ipc", + ipc = {} + }) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +cannot specify both of opts.ipc_shm and opts.ipc +--- no_error_log +[error] + + + +=== TEST 10: new() validates ipc.register_listeners + ipc.broadcast + ipc.poll (type: custom) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local args = { + "register_listeners", + "broadcast", + "poll", + } + + for _, arg in ipairs(args) do + local ipc_opts = { + register_listeners = function() end, + broadcast = function() end, + poll = function() end, + } + + ipc_opts[arg] = false + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + ipc = ipc_opts, + }) + if not ok then + ngx.say(err) + end + end + } + } +--- request +GET /t +--- response_body +opts.ipc.register_listeners must be a function +opts.ipc.broadcast must be a function +opts.ipc.poll must be a function +--- no_error_log +[error] + + + +=== TEST 11: new() ipc.register_listeners can return nil + err (type: custom) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("name", "cache_shm", { + ipc = { + register_listeners = function() + return nil, "something happened" + end, + broadcast = function() end, + poll = function() end, + } + }) + if not cache then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body_like +failed to initialize custom IPC \(opts\.ipc\.register_listeners returned an error\): something happened +--- no_error_log +[error] + + + +=== TEST 12: new() calls ipc.register_listeners with events array (type: custom) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("name", "cache_shm", { + ipc = { + register_listeners = function(events) + local res = {} + for ev_name, ev in pairs(events) do + table.insert(res, string.format("%s | channel: %s | handler: %s", + ev_name, ev.channel, type(ev.handler))) + end + + table.sort(res) + + for i = 1, #res do + ngx.say(res[i]) + end + end, + broadcast = function() end, + poll = function() end, + } + }) + if not cache then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +invalidation | channel: mlcache:invalidations:name | handler: function +purge | channel: mlcache:purge:name | handler: function +--- no_error_log +[error] + + + +=== TEST 13: new() ipc.poll is optional (some IPC libraries might not need it +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("name", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function() end, + poll = nil + } + }) + if not cache then + ngx.say(err) + end + + ngx.say("ok") + } + } +--- request +GET /t +--- response_body +ok +--- no_error_log +[error] + + + +=== TEST 14: new() validates opts.lru_size +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + lru_size = "", + }) + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +opts.lru_size must be a number + + + +=== TEST 15: new() validates opts.ttl +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + ttl = "" + }) + if not ok then + ngx.log(ngx.ERR, err) + end + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + ttl = -1 + }) + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +opts.ttl must be a number +opts.ttl must be >= 0 + + + +=== TEST 16: new() validates opts.neg_ttl +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + neg_ttl = "" + }) + if not ok then + ngx.log(ngx.ERR, err) + end + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + neg_ttl = -1 + }) + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +opts.neg_ttl must be a number +opts.neg_ttl must be >= 0 + + + +=== TEST 17: new() validates opts.resty_lock_opts +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + resty_lock_opts = false, + }) + if not ok then + ngx.log(ngx.ERR, err) + end + } + } +--- request +GET /t +--- response_body + +--- error_log +opts.resty_lock_opts must be a table + + + +=== TEST 18: new() validates opts.shm_set_tries +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local values = { + false, + -1, + 0, + } + + for _, v in ipairs(values) do + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + shm_set_tries = v, + }) + if not ok then + ngx.say(err) + end + end + } + } +--- request +GET /t +--- response_body +opts.shm_set_tries must be a number +opts.shm_set_tries must be >= 1 +opts.shm_set_tries must be >= 1 +--- no_error_log +[error] + + + +=== TEST 19: new() validates opts.shm_miss +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + shm_miss = false, + }) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.shm_miss must be a string +--- no_error_log +[error] + + + +=== TEST 20: new() ensures opts.shm_miss exists +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = mlcache.new("name", "cache_shm", { + shm_miss = "foo", + }) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +no such lua_shared_dict for opts.shm_miss: foo +--- no_error_log +[error] + + + +=== TEST 21: new() creates an mlcache object with default attributes +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("name", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + end + + ngx.say(type(cache)) + ngx.say(type(cache.ttl)) + ngx.say(type(cache.neg_ttl)) + } + } +--- request +GET /t +--- response_body +table +number +number +--- no_error_log +[error] + + + +=== TEST 22: new() accepts user-provided LRU instances via opts.lru +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local pureffi_lrucache = require "resty.lrucache.pureffi" + + local my_lru = pureffi_lrucache.new(100) + + local cache = assert(mlcache.new("name", "cache_shm", { lru = my_lru })) + + ngx.say("lru is user-provided: ", cache.lru == my_lru) + } + } +--- request +GET /t +--- response_body +lru is user-provided: true +--- no_error_log +[error] diff --git a/t/05-mlcache/02-get.t b/t/05-mlcache/02-get.t new file mode 100644 index 000000000000..85500b023e6b --- /dev/null +++ b/t/05-mlcache/02-get.t @@ -0,0 +1,2702 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); +use lib '.'; +use t::Util; + +no_long_string(); + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3) + 9; + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict cache_shm_miss 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: get() validates key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = pcall(cache.get, cache) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +key must be a string +--- no_error_log +[error] + + + +=== TEST 2: get() accepts callback as nil or function +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = pcall(cache.get, cache, "key", nil, nil) + if not ok then + ngx.say(err) + end + + local ok, err = pcall(cache.get, cache, "key", nil, function() end) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] + + + +=== TEST 3: get() rejects callbacks not nil or function +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = pcall(cache.get, cache, "key", nil, "not a function") + if not ok then + ngx.say(err) + end + + local ok, err = pcall(cache.get, cache, "key", nil, false) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +callback must be nil or a function +callback must be nil or a function +--- no_error_log +[error] + + + +=== TEST 4: get() validates opts +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = pcall(cache.get, cache, "key", "opts") + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts must be a table +--- no_error_log +[error] + + + +=== TEST 5: get() calls callback in protected mode with stack traceback +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + error("oops") + end + + local data, err = cache:get("key", nil, cb) + if err then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body_like chomp +callback threw an error: .*? oops +stack traceback: +\s+\[C\]: in function 'error' +\s+content_by_lua\(nginx\.conf:\d+\):\d+: in function +--- no_error_log +[error] + + + +=== TEST 6: get() is resilient to callback runtime errors with non-string arguments +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key", nil, function() error(ngx.null) end) + if err then + ngx.say(err) + end + + local data, err = cache:get("key", nil, function() error({}) end) + if err then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body_like +callback threw an error: userdata: NULL +callback threw an error: table: 0x[0-9a-fA-F]+ +--- no_error_log +[error] + + + +=== TEST 7: get() caches a number +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return 123 + end + + -- from callback + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from callback: ", type(data), " ", data) + + -- from lru + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from lru: ", type(data), " ", data) + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from shm: ", type(data), " ", data) + } + } +--- request +GET /t +--- response_body +from callback: number 123 +from lru: number 123 +from shm: number 123 +--- no_error_log +[error] + + + +=== TEST 8: get() caches a boolean (true) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return true + end + + -- from callback + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from callback: ", type(data), " ", data) + + -- from lru + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from lru: ", type(data), " ", data) + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from shm: ", type(data), " ", data) + } + } +--- request +GET /t +--- response_body +from callback: boolean true +from lru: boolean true +from shm: boolean true +--- no_error_log +[error] + + + +=== TEST 9: get() caches a boolean (false) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return false + end + + -- from callback + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from callback: ", type(data), " ", data) + + -- from lru + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from lru: ", type(data), " ", data) + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from shm: ", type(data), " ", data) + } + } +--- request +GET /t +--- response_body +from callback: boolean false +from lru: boolean false +from shm: boolean false +--- no_error_log +[error] + + + +=== TEST 10: get() caches nil +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return nil + end + + -- from callback + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from callback: ", type(data), " ", data) + + -- from lru + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from lru: ", type(data), " ", data) + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from shm: ", type(data), " ", data) + } + } +--- request +GET /t +--- response_body +from callback: nil nil +from lru: nil nil +from shm: nil nil +--- no_error_log +[error] + + + +=== TEST 11: get() caches nil in 'shm_miss' if specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + local dict_miss = ngx.shared.cache_shm_miss + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + shm_miss = "cache_shm_miss" + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + -- from callback + + local data, err = cache:get("key", nil, function() return nil end) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("from callback: ", type(data), " ", data) + + -- direct shm checks + -- concat key since shm values are namespaced per their the + -- mlcache name + local key = "my_mlcachekey" + + local v, err = dict:get(key) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("no value in shm: ", v == nil) + + local v, err = dict_miss:get(key) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("value in shm_miss is a sentinel nil value: ", v ~= nil) + + -- subsequent calls from shm + + cache.lru:delete("key") + + -- here, we return 'true' and not nil in the callback. this is to + -- ensure that get() will check the shm_miss shared dict and read + -- the nil sentinel value in there, thus will not call the + -- callback. + + local data, err = cache:get("key", nil, function() return true end) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("from shm: ", type(data), " ", data) + + -- from lru + + local v = cache.lru:get("key") + + ngx.say("value in lru is a sentinel nil value: ", v ~= nil) + } + } +--- request +GET /t +--- response_body +from callback: nil nil +no value in shm: true +value in shm_miss is a sentinel nil value: true +from shm: nil nil +value in lru is a sentinel nil value: true +--- no_error_log +[error] + + + +=== TEST 12: get() caches a string +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return "hello world" + end + + -- from callback + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from callback: ", type(data), " ", data) + + -- from lru + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from lru: ", type(data), " ", data) + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from shm: ", type(data), " ", data) + } + } +--- request +GET /t +--- response_body +from callback: string hello world +from lru: string hello world +from shm: string hello world +--- no_error_log +[error] + + + +=== TEST 13: get() caches a table +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local cjson = require "cjson" + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return { + hello = "world", + subt = { foo = "bar" } + } + end + + -- from callback + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from callback: ", type(data), " ", data.hello, " ", data.subt.foo) + + -- from lru + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from lru: ", type(data), " ", data.hello, " ", data.subt.foo) + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("from shm: ", type(data), " ", data.hello, " ", data.subt.foo) + } + } +--- request +GET /t +--- response_body +from callback: table world bar +from lru: table world bar +from shm: table world bar +--- no_error_log +[error] + + + +=== TEST 14: get() errors when caching an unsupported type +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local cjson = require "cjson" + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return ngx.null + end + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + } + } +--- request +GET /t +--- error_code: 500 +--- error_log eval +qr/\[error\] .*?mlcache\.lua:\d+: cannot cache value of type userdata/ + + + +=== TEST 15: get() calls callback with args +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb(a, b) + return a + b + end + + local data, err = cache:get("key", nil, cb, 1, 2) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + } + } +--- request +GET /t +--- response_body +3 +--- no_error_log +[error] + + + +=== TEST 16: get() caches hit for 'ttl' from LRU (in ms) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { ttl = 0.3 })) + + local function cb() + ngx.say("in callback") + return 123 + end + + local data = assert(cache:get("key", nil, cb)) + assert(data == 123) + + ngx.sleep(0.2) + + data = assert(cache:get("key", nil, cb)) + assert(data == 123) + + ngx.sleep(0.2) + + data = assert(cache:get("key", nil, cb)) + assert(data == 123) + } + } +--- request +GET /t +--- response_body +in callback +in callback +--- no_error_log +[error] + + + +=== TEST 17: get() caches miss (nil) for 'neg_ttl' from LRU (in ms) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 10, + neg_ttl = 0.3 + })) + + local function cb() + ngx.say("in callback") + return nil + end + + local data, err = cache:get("key", nil, cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.2) + + data, err = cache:get("key", nil, cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.2) + + data, err = cache:get("key", nil, cb) + assert(err == nil, err) + assert(data == nil) + } + } +--- request +GET /t +--- response_body +in callback +in callback +--- no_error_log +[error] + + + +=== TEST 18: get() caches for 'opts.ttl' from LRU (in ms) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { ttl = 10 })) + + local function cb() + ngx.say("in callback") + return 123 + end + + local data = assert(cache:get("key", { ttl = 0.3 }, cb)) + assert(data == 123) + + ngx.sleep(0.2) + + data = assert(cache:get("key", nil, cb)) + assert(data == 123) + + ngx.sleep(0.2) + + data = assert(cache:get("key", nil, cb)) + assert(data == 123) + } + } +--- request +GET /t +--- response_body +in callback +in callback +--- no_error_log +[error] + + + +=== TEST 19: get() caches for 'opts.neg_ttl' from LRU (in ms) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { neg_ttl = 2 })) + + local function cb() + ngx.say("in callback") + return nil + end + + local data, err = cache:get("key", { neg_ttl = 0.3 }, cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.2) + + data, err = cache:get("key", nil, cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.2) + + data, err = cache:get("key", nil, cb) + assert(err == nil, err) + assert(data == nil) + } + } +--- request +GET /t +--- response_body +in callback +in callback +--- no_error_log +[error] + + + +=== TEST 20: get() with ttl of 0 means indefinite caching +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { ttl = 0.3 })) + + local function cb() + ngx.say("in callback") + return 123 + end + + local data = assert(cache:get("key", { ttl = 0 }, cb)) + assert(data == 123) + + ngx.sleep(0.4) + + -- still in LRU + local data, stale = cache.lru:get("key") + if stale then + ngx.say("in LRU after 1.1s: stale") + + else + ngx.say("in LRU after exp: ", data) + end + + cache.lru:delete("key") + + -- still in shm + data = assert(cache:get("key", nil, cb)) + + ngx.say("in shm after exp: ", data) + } + } +--- request +GET /t +--- response_body +in callback +in LRU after exp: 123 +in shm after exp: 123 +--- no_error_log +[error] + + + +=== TEST 21: get() with neg_ttl of 0 means indefinite caching for nil values +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = assert(mlcache.new("my_mlcache", "cache_shm", { ttl = 0.3 })) + + local function cb() + ngx.say("in callback") + return nil + end + + local data, err = cache:get("key", { neg_ttl = 0 }, cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.4) + + -- still in LRU + local data, stale = cache.lru:get("key") + if stale then + ngx.say("in LRU after 0.4s: stale") + + else + ngx.say("in LRU after exp: ", tostring(data)) + end + + cache.lru:delete("key") + + -- still in shm + data, err = cache:get("key", nil, cb) + assert(err == nil, err) + + ngx.say("in shm after exp: ", tostring(data)) + } + } +--- request +GET /t +--- response_body_like +in callback +in LRU after exp: table: \S+ +in shm after exp: nil +--- no_error_log +[error] + + + +=== TEST 22: get() errors when ttl < 0 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + ngx.say("in callback") + return 123 + end + + local ok, err = pcall(cache.get, cache, "key", { ttl = -1 }, cb) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.ttl must be >= 0 +--- no_error_log +[error] + + + +=== TEST 23: get() errors when neg_ttl < 0 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + ngx.say("in callback") + return 123 + end + + local ok, err = pcall(cache.get, cache, "key", { neg_ttl = -1 }, cb) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.neg_ttl must be >= 0 +--- no_error_log +[error] + + + +=== TEST 24: get() shm -> LRU caches for 'opts.ttl - since' in ms +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return 123 + end + + local data = assert(cache:get("key", { ttl = 0.5 }, cb)) + assert(data == 123) + + ngx.sleep(0.2) + + -- delete from LRU + cache.lru:delete("key") + + -- from shm, setting LRU with smaller ttl + data, err = assert(cache:get("key", nil, cb)) + assert(data == 123) + + ngx.sleep(0.2) + + -- still in LRU + local data, stale = cache.lru:get("key") + if stale then + ngx.say("is stale in LRU: ", stale) + + else + ngx.say("is not expired in LRU: ", data) + end + + ngx.sleep(0.1) + + -- expired in LRU + local data, stale = cache.lru:get("key") + if stale then + ngx.say("is stale in LRU: ", stale) + + else + ngx.say("is not expired in LRU: ", data) + end + } + } +--- request +GET /t +--- response_body +is not expired in LRU: 123 +is stale in LRU: 123 +--- no_error_log +[error] + + + +=== TEST 25: get() shm -> LRU caches non-nil for 'indefinite' if ttl is 0 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return 123 + end + + local data = assert(cache:get("key", { ttl = 0 }, cb)) + assert(data == 123) + + ngx.sleep(0.2) + + -- delete from LRU + cache.lru:delete("key") + + -- from shm, setting LRU with indefinite ttl too + data, err = assert(cache:get("key", nil, cb)) + assert(data == 123) + + -- still in LRU + local data, stale = cache.lru:get("key") + if stale then + ngx.say("is stale in LRU: ", stale) + + else + ngx.say("is not expired in LRU: ", data) + end + } + } +--- request +GET /t +--- response_body +is not expired in LRU: 123 +--- no_error_log +[error] + + + +=== TEST 26: get() shm -> LRU caches for 'opts.neg_ttl - since' in ms +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil + end + + local data, err = cache:get("key", { neg_ttl = 0.5 }, cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.2) + + -- delete from LRU + cache.lru:delete("key") + + -- from shm, setting LRU with smaller ttl + data, err = cache:get("key", nil, cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.2) + + -- still in LRU + local data, stale = cache.lru:get("key") + if stale then + ngx.say("is stale in LRU: ", tostring(stale)) + + else + ngx.say("is not expired in LRU: ", tostring(data)) + end + + ngx.sleep(0.1) + + -- expired in LRU + local data, stale = cache.lru:get("key") + if stale then + ngx.say("is stale in LRU: ", tostring(stale)) + + else + ngx.say("is not expired in LRU: ", tostring(data)) + end + } + } +--- request +GET /t +--- response_body_like +is not expired in LRU: table: \S+ +is stale in LRU: table: \S+ +--- no_error_log +[error] + + + +=== TEST 27: get() shm -> LRU caches nil for 'indefinite' if neg_ttl is 0 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil + end + + local data, err =cache:get("key", { neg_ttl = 0 }, cb) + assert(err == nil) + assert(data == nil) + + ngx.sleep(0.2) + + -- delete from LRU + cache.lru:delete("key") + + -- from shm, setting LRU with indefinite ttl too + data, err = cache:get("key", nil, cb) + assert(err == nil) + assert(data == nil) + + -- still in LRU + local data, stale = cache.lru:get("key") + ngx.say("is stale in LRU: ", stale) + + -- data is a table (nil sentinel value) so rely on stale instead + } + } +--- request +GET /t +--- response_body +is stale in LRU: nil +--- no_error_log +[error] + + + +=== TEST 28: get() returns hit level +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return 123 + end + + local _, _, hit_lvl = assert(cache:get("key", nil, cb)) + ngx.say("hit level from callback: ", hit_lvl) + + _, _, hit_lvl = assert(cache:get("key", nil, cb)) + ngx.say("hit level from LRU: ", hit_lvl) + + -- delete from LRU + + cache.lru:delete("key") + + _, _, hit_lvl = assert(cache:get("key", nil, cb)) + ngx.say("hit level from shm: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +hit level from callback: 3 +hit level from LRU: 1 +hit level from shm: 2 +--- no_error_log +[error] + + + +=== TEST 29: get() returns hit level for nil hits +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil + end + + local _, _, hit_lvl = cache:get("key", nil, cb) + ngx.say("hit level from callback: ", hit_lvl) + + _, _, hit_lvl = cache:get("key", nil, cb) + ngx.say("hit level from LRU: ", hit_lvl) + + -- delete from LRU + + cache.lru:delete("key") + + _, _, hit_lvl = cache:get("key", nil, cb) + ngx.say("hit level from shm: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +hit level from callback: 3 +hit level from LRU: 1 +hit level from shm: 2 +--- no_error_log +[error] + + + +=== TEST 30: get() returns hit level for boolean false hits +--- skip_eval: 3: t::Util::skip_openresty('<', '1.11.2.3') +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return false + end + + local _, _, hit_lvl = cache:get("key", nil, cb) + ngx.say("hit level from callback: ", hit_lvl) + + _, _, hit_lvl = cache:get("key", nil, cb) + ngx.say("hit level from LRU: ", hit_lvl) + + -- delete from LRU + + cache.lru:delete("key") + + _, _, hit_lvl = cache:get("key", nil, cb) + ngx.say("hit level from shm: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +hit level from callback: 3 +hit level from LRU: 1 +hit level from shm: 2 +--- no_error_log +[error] + + + +=== TEST 31: get() JITs when hit coming from LRU +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return 123456 + end + + for i = 1, 10e3 do + local data = assert(cache:get("key", nil, cb)) + assert(data == 123456) + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):10 loop\]/ +--- no_error_log +[error] + + + +=== TEST 32: get() JITs when hit of scalar value coming from shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb_number() + return 123456 + end + + local function cb_string() + return "hello" + end + + local function cb_bool() + return false + end + + for i = 1, 10e2 do + local data, err, hit_lvl = assert(cache:get("number", nil, cb_number)) + assert(err == nil) + assert(data == 123456) + assert(hit_lvl == (i == 1 and 3 or 2)) + + cache.lru:delete("number") + end + + for i = 1, 10e2 do + local data, err, hit_lvl = assert(cache:get("string", nil, cb_string)) + assert(err == nil) + assert(data == "hello") + assert(hit_lvl == (i == 1 and 3 or 2)) + + cache.lru:delete("string") + end + + for i = 1, 10e2 do + local data, err, hit_lvl = cache:get("bool", nil, cb_bool) + assert(err == nil) + assert(data == false) + assert(hit_lvl == (i == 1 and 3 or 2)) + + cache.lru:delete("bool") + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +[ + qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):18 loop\]/, + qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):27 loop\]/, + qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):36 loop\]/, +] +--- no_error_log +[error] + + + +=== TEST 33: get() JITs when hit of table value coming from shm +--- SKIP: blocked until l2_serializer +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb_table() + return { hello = "world" } + end + + for i = 1, 10e2 do + local data = assert(cache:get("table", nil, cb_table)) + assert(type(data) == "table") + assert(data.hello == "world") + + cache.lru:delete("table") + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):18 loop\]/ +--- no_error_log +[error] + + + +=== TEST 34: get() JITs when miss coming from LRU +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil + end + + for i = 1, 10e3 do + local data, err = cache:get("key", nil, cb) + assert(err == nil) + assert(data == nil) + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):10 loop\]/ +--- no_error_log +[error] + + + +=== TEST 35: get() JITs when miss coming from shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil + end + + for i = 1, 10e3 do + local data, err = cache:get("key", nil, cb) + assert(err == nil) + assert(data == nil) + + cache.lru:delete("key") + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):10 loop\]/ +--- no_error_log +[error] + + + +=== TEST 36: get() callback can return nil + err (string) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil, "an error occurred" + end + + local data, err = cache:get("1", nil, cb) + if err then + ngx.say("cb return values: ", data, " ", err) + end + + local function cb2() + -- we will return "foo" to users as well from get(), and + -- not just nil, if they wish so. + return "foo", "an error occurred again" + end + + data, err = cache:get("2", nil, cb2) + if err then + ngx.say("cb2 return values: ", data, " ", err) + end + } + } +--- request +GET /t +--- response_body +cb return values: nil an error occurred +cb2 return values: foo an error occurred again +--- no_error_log +[error] + + + +=== TEST 37: get() callback can return nil + err (non-string) safely +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil, { err = "an error occurred" } -- invalid usage + end + + local data, err = cache:get("1", nil, cb) + if err then + ngx.say("cb return values: ", data, " ", err) + end + + local function cb2() + -- we will return "foo" to users as well from get(), and + -- not just nil, if they wish so. + return "foo", { err = "an error occurred again" } -- invalid usage + end + + data, err = cache:get("2", nil, cb2) + if err then + ngx.say("cb2 return values: ", data, " ", err) + end + } + } +--- request +GET /t +--- response_body_like chomp +cb return values: nil table: 0x[[:xdigit:]]+ +cb2 return values: foo table: 0x[[:xdigit:]]+ +--- no_error_log +[error] + + + +=== TEST 38: get() callback can return nil + err (table) and will call __tostring +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local mt = { + __tostring = function() + return "hello from __tostring" + end + } + + local function cb() + return nil, setmetatable({}, mt) + end + + local data, err = cache:get("1", nil, cb) + if err then + ngx.say("cb return values: ", data, " ", err) + end + } + } +--- request +GET /t +--- response_body +cb return values: nil hello from __tostring +--- no_error_log +[error] + + + +=== TEST 39: get() callback's 3th return value can override the ttl +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { ttl = 10 } + local cache = assert(mlcache.new("my_mlcache", "cache_shm", opts)) + + local function cb() + ngx.say("in callback 1") + return 1, nil, 0.1 + end + + local function cb2() + ngx.say("in callback 2") + return 2 + end + + -- cache our value (runs cb) + + local data, err = cache:get("key", opts, cb) + assert(err == nil, err) + assert(data == 1) + + -- should not run cb2 + + data, err = cache:get("key", opts, cb2) + assert(err == nil, err) + assert(data == 1) + + ngx.sleep(0.15) + + -- should run cb2 (value expired) + + data, err = cache:get("key", opts, cb2) + assert(err == nil, err) + assert(data == 2) + } + } +--- request +GET /t +--- response_body +in callback 1 +in callback 2 +--- no_error_log +[error] + + + +=== TEST 40: get() callback's 3th return value can override the neg_ttl +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { ttl = 10, neg_ttl = 10 } + local cache = assert(mlcache.new("my_mlcache", "cache_shm", opts)) + + local function cb() + ngx.say("in callback 1") + return nil, nil, 0.1 + end + + local function cb2() + ngx.say("in callback 2") + return 1 + end + + -- cache our value (runs cb) + + local data, err = cache:get("key", opts, cb) + assert(err == nil, err) + assert(data == nil) + + -- should not run cb2 + + data, err = cache:get("key", opts, cb2) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.15) + + -- should run cb2 (value expired) + + data, err = cache:get("key", opts, cb2) + assert(err == nil, err) + assert(data == 1) + } + } +--- request +GET /t +--- response_body +in callback 1 +in callback 2 +--- no_error_log +[error] + + + +=== TEST 41: get() ignores invalid callback 3rd return value (not number) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { ttl = 0.1, neg_ttl = 0.1 } + local cache = assert(mlcache.new("my_mlcache", "cache_shm", opts)) + + local function pos_cb() + ngx.say("in positive callback") + return 1, nil, "success" + end + + local function neg_cb() + ngx.say("in negative callback") + return nil, nil, {} + end + + ngx.say("Test A: string TTL return value for positive data is ignored") + + -- cache our value (runs pos_cb) + + local data, err = cache:get("pos_key", opts, pos_cb) + assert(err == nil, err) + assert(data == 1) + + -- neg_cb should not run + + data, err = cache:get("pos_key", opts, neg_cb) + assert(err == nil, err) + assert(data == 1) + + ngx.sleep(0.15) + + -- should run neg_cb + + data, err = cache:get("pos_key", opts, neg_cb) + assert(err == nil, err) + assert(data == nil) + + ngx.say("Test B: table TTL return value for negative data is ignored") + + -- cache our value (runs neg_cb) + + data, err = cache:get("neg_key", opts, neg_cb) + assert(err == nil, err) + assert(data == nil) + + -- pos_cb should not run + + data, err = cache:get("neg_key", opts, pos_cb) + assert(err == nil, err) + assert(data == nil) + + ngx.sleep(0.15) + + -- should run pos_cb + + data, err = cache:get("neg_key", opts, pos_cb) + assert(err == nil, err) + assert(data == 1) + } + } +--- request +GET /t +--- response_body +Test A: string TTL return value for positive data is ignored +in positive callback +in negative callback +Test B: table TTL return value for negative data is ignored +in negative callback +in positive callback +--- no_error_log +[error] + + + +=== TEST 42: get() passes 'resty_lock_opts' for L3 calls +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local resty_lock = require "resty.lock" + local mlcache = require "kong.resty.mlcache" + + local resty_lock_opts = { timeout = 5 } + + do + local orig_resty_lock_new = resty_lock.new + resty_lock.new = function(_, dict_name, opts, ...) + ngx.say("was given 'opts.resty_lock_opts': ", opts == resty_lock_opts) + + return orig_resty_lock_new(_, dict_name, opts, ...) + end + end + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + resty_lock_opts = resty_lock_opts, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key", nil, function() return nil end) + if err then + ngx.log(ngx.ERR, err) + return + end + } + } +--- request +GET /t +--- response_body +was given 'opts.resty_lock_opts': true +--- no_error_log +[error] + + + +=== TEST 43: get() errors on lock timeout +--- http_config eval: $::HttpConfig +--- config + location = /t { + access_by_lua_block { + ngx.shared.cache_shm:set(1, true, 0.2) + ngx.shared.cache_shm:set(2, true, 0.2) + } + + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache_1 = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3 + })) + local cache_2 = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resty_lock_opts = { + timeout = 0.2 + } + })) + + local function cb(delay, return_val) + if delay then + ngx.sleep(delay) + end + + return return_val or 123 + end + + -- cache in shm + + local data, err, hit_lvl = cache_1:get("my_key", nil, cb) + assert(data == 123) + assert(err == nil) + assert(hit_lvl == 3) + + -- make shm + LRU expire + + ngx.sleep(0.3) + + local t1 = ngx.thread.spawn(function() + -- trigger L3 callback again, but slow to return this time + cache_1:get("my_key", nil, cb, 0.3, 456) + end) + + local t2 = ngx.thread.spawn(function() + -- make this mlcache wait on other's callback, and timeout + local data, err, hit_lvl = cache_2:get("my_key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + end) + + assert(ngx.thread.wait(t1)) + assert(ngx.thread.wait(t2)) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache_2:get("my_key", nil, cb, nil, 123) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) -- should be 1 since LRU instances are shared by mlcache namespace, and t1 finished + } + } +--- request +GET /t +--- response_body +data: nil +err: could not acquire callback lock: timeout +hit_lvl: nil + +-> subsequent get() +data: 456 +err: nil +hit_lvl: 1 +--- no_error_log +[error] + + + +=== TEST 44: get() returns data even if failed to set in shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + local mlcache = require "kong.resty.mlcache" + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^5)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- now, trigger a hit with a value many times as large + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local data, err = cache:get("key", nil, function() + return string.rep("a", 2^20) + end) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("data type: ", type(data)) + } + } +--- request +GET /t +--- response_body +data type: string +--- error_log eval +qr/\[warn\] .*? could not write to lua_shared_dict 'cache_shm' after 3 tries \(no memory\), it is either/ +--- no_error_log +[error] + + + +=== TEST 45: get() errors on invalid opts.shm_set_tries +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local values = { + "foo", + -1, + 0, + } + + for _, v in ipairs(values) do + local ok, err = pcall(cache.get, cache, "key", { + shm_set_tries = v + }, function() end) + if not ok then + ngx.say(err) + end + end + } + } +--- request +GET /t +--- response_body +opts.shm_set_tries must be a number +opts.shm_set_tries must be >= 1 +opts.shm_set_tries must be >= 1 +--- no_error_log +[error] + + + +=== TEST 46: get() with default shm_set_tries to LRU evict items when a large value is being cached +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + dict:flush_all() + dict:flush_expired() + local mlcache = require "kong.resty.mlcache" + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^2)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- shm:set() will evict up to 30 items when the shm is full + -- now, trigger a hit with a larger value which should trigger LRU + -- eviction and force the slab allocator to free pages + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local cb_calls = 0 + local function cb() + cb_calls = cb_calls + 1 + return string.rep("a", 2^5) + end + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("type of data in shm: ", type(data)) + ngx.say("callback was called: ", cb_calls, " times") + } + } +--- request +GET /t +--- response_body +type of data in shm: string +callback was called: 1 times +--- no_error_log +[warn] +[error] + + + +=== TEST 47: get() respects instance opts.shm_set_tries to LRU evict items when a large value is being cached +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + dict:flush_all() + dict:flush_expired() + local mlcache = require "kong.resty.mlcache" + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^2)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- shm:set() will evict up to 30 items when the shm is full + -- now, trigger a hit with a larger value which should trigger LRU + -- eviction and force the slab allocator to free pages + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + shm_set_tries = 5 + })) + + local cb_calls = 0 + local function cb() + cb_calls = cb_calls + 1 + return string.rep("a", 2^12) + end + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("type of data in shm: ", type(data)) + ngx.say("callback was called: ", cb_calls, " times") + } + } +--- request +GET /t +--- response_body +type of data in shm: string +callback was called: 1 times +--- no_error_log +[warn] +[error] + + + +=== TEST 48: get() accepts opts.shm_set_tries to LRU evict items when a large value is being cached +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + dict:flush_all() + dict:flush_expired() + local mlcache = require "kong.resty.mlcache" + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^2)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- now, trigger a hit with a value ~3 times as large + -- which should trigger retries and eventually remove 9 other + -- cached items + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local cb_calls = 0 + local function cb() + cb_calls = cb_calls + 1 + return string.rep("a", 2^12) + end + + local data, err = cache:get("key", { + shm_set_tries = 5 + }, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + -- from shm + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("type of data in shm: ", type(data)) + ngx.say("callback was called: ", cb_calls, " times") + } + } +--- request +GET /t +--- response_body +type of data in shm: string +callback was called: 1 times +--- no_error_log +[warn] +[error] + + + +=== TEST 49: get() caches data in L1 LRU even if failed to set in shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + dict:flush_all() + dict:flush_expired() + local mlcache = require "kong.resty.mlcache" + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^2)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- now, trigger a hit with a value many times as large + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + shm_set_tries = 1, + })) + + local data, err = cache:get("key", nil, function() + return string.rep("a", 2^20) + end) + if err then + ngx.log(ngx.ERR, err) + return + end + + local data = cache.lru:get("key") + ngx.say("type of data in LRU: ", type(data)) + + ngx.say("sleeping...") + ngx.sleep(0.4) + + local _, stale = cache.lru:get("key") + ngx.say("is stale: ", stale ~= nil) + } + } +--- request +GET /t +--- response_body +type of data in LRU: string +sleeping... +is stale: true +--- no_error_log +[error] + + + +=== TEST 50: get() does not cache value in LRU indefinitely when retrieved from shm on last ms (see GH PR #58) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local forced_now = ngx.now() + ngx.now = function() + return forced_now + end + + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.2, + })) + + local function cb(v) + return v or 42 + end + + local data, err = cache:get("key", nil, cb) + assert(data == 42, err or "invalid data value: " .. data) + + -- drop L1 cache value + cache.lru:delete("key") + + -- advance 0.2 second in the future, and simulate another :get() + -- call; the L2 shm entry will still be alive (as its clock is + -- not faked), but mlcache will compute a remaining_ttl of 0; + -- In such cases, we should _not_ cache the value indefinitely in + -- the L1 LRU cache. + forced_now = forced_now + 0.2 + + local data, err, hit_lvl = cache:get("key", nil, cb) + assert(data == 42, err or "invalid data value: " .. data) + + ngx.say("+0.200s hit_lvl: ", hit_lvl) + + -- the value is not cached in LRU (too short ttl anyway) + + data, err, hit_lvl = cache:get("key", nil, cb) + assert(data == 42, err or "invalid data value: " .. data) + + ngx.say("+0.200s hit_lvl: ", hit_lvl) + + -- make it expire in shm (real wait) + ngx.sleep(0.201) + + data, err, hit_lvl = cache:get("key", nil, cb, 91) + assert(data == 91, err or "invalid data value: " .. data) + + ngx.say("+0.201s hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body ++0.200s hit_lvl: 2 ++0.200s hit_lvl: 2 ++0.201s hit_lvl: 3 +--- no_error_log +[error] + + + +=== TEST 51: get() bypass cache for negative callback TTL +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { ttl = 0.1, neg_ttl = 0.1 } + local cache = assert(mlcache.new("my_mlcache", "cache_shm", opts)) + + local function pos_cb() + ngx.say("in positive callback") + return 1, nil, -1 + end + + local function neg_cb() + ngx.say("in negative callback") + return nil, nil, -1 + end + + ngx.say("Test A: negative TTL return value for positive data bypasses cache") + + -- don't cache our value (runs pos_cb) + + local data, err, hit_level = cache:get("pos_key", opts, pos_cb) + assert(err == nil, err) + assert(data == 1) + assert(hit_level == 3) + + -- pos_cb should run again + + data, err = cache:get("pos_key", opts, pos_cb) + assert(err == nil, err) + assert(data == 1) + assert(hit_level == 3) + + ngx.say("Test B: negative TTL return value for negative data bypasses cache") + + -- don't cache our value (runs neg_cb) + + data, err = cache:get("neg_key", opts, neg_cb) + assert(err == nil, err) + assert(data == nil) + assert(hit_level == 3) + + -- neg_cb should run again + + data, err = cache:get("neg_key", opts, neg_cb) + assert(err == nil, err) + assert(data == nil) + assert(hit_level == 3) + } + } +--- request +GET /t +--- response_body +Test A: negative TTL return value for positive data bypasses cache +in positive callback +in positive callback +Test B: negative TTL return value for negative data bypasses cache +in negative callback +in negative callback +--- no_error_log +[error] + + + +=== TEST 52: get() nil callback returns positive cached items from L1/L2 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + -- miss lookup + + local data, err, hit_lvl = cache:get("key") + if err then + ngx.log(ngx.ERR, err) + end + ngx.say("-> miss") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + -- cache an item + + local _, err = cache:get("key", nil, function() return 123 end) + if err then + ngx.log(ngx.ERR, err) + end + + -- hit from lru + + local data, err, hit_lvl = cache:get("key") + ngx.say() + ngx.say("-> from LRU") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + -- hit from shm + + cache.lru:delete("key") + + local data, err, hit_lvl = cache:get("key") + ngx.say() + ngx.say("-> from shm") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + -- promoted to lru again + + local data, err, hit_lvl = cache:get("key") + ngx.say() + ngx.say("-> promoted to LRU") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> miss +data: nil +err: nil +hit_lvl: -1 + +-> from LRU +data: 123 +err: nil +hit_lvl: 1 + +-> from shm +data: 123 +err: nil +hit_lvl: 2 + +-> promoted to LRU +data: 123 +err: nil +hit_lvl: 1 +--- no_error_log +[error] + + + +=== TEST 53: get() nil callback returns negative cached items from L1/L2 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + -- miss lookup + + local data, err, hit_lvl = cache:get("key") + if err then + ngx.log(ngx.ERR, err) + end + ngx.say("-> miss") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + -- cache an item + + local _, err = cache:get("key", nil, function() return nil end) + if err then + ngx.log(ngx.ERR, err) + end + + -- hit from lru + + local data, err, hit_lvl = cache:get("key") + ngx.say() + ngx.say("-> from LRU") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + -- hit from shm + + cache.lru:delete("key") + + local data, err, hit_lvl = cache:get("key") + ngx.say() + ngx.say("-> from shm") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + -- promoted to lru again + + local data, err, hit_lvl = cache:get("key") + ngx.say() + ngx.say("-> promoted to LRU") + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> miss +data: nil +err: nil +hit_lvl: -1 + +-> from LRU +data: nil +err: nil +hit_lvl: 1 + +-> from shm +data: nil +err: nil +hit_lvl: 2 + +-> promoted to LRU +data: nil +err: nil +hit_lvl: 1 +--- no_error_log +[error] + + + +=== TEST 54: get() JITs on misses without a callback +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + for i = 1, 10e3 do + cache:get("key") + end + } + } +--- request +GET /t +--- ignore_response_body +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):6 loop\]/ +--- no_error_log +[error] diff --git a/t/05-mlcache/03-peek.t b/t/05-mlcache/03-peek.t new file mode 100644 index 000000000000..f6ccc87eab47 --- /dev/null +++ b/t/05-mlcache/03-peek.t @@ -0,0 +1,666 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3) + 2; + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict cache_shm_miss 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: peek() validates key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = pcall(cache.peek, cache) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +key must be a string +--- no_error_log +[error] + + + +=== TEST 2: peek() returns nil if a key has never been fetched before +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ttl, err = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", ttl) + } + } +--- request +GET /t +--- response_body +ttl: nil +--- no_error_log +[error] + + + +=== TEST 3: peek() returns the remaining ttl if a key has been fetched before +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return nil + end + + local val, err = cache:get("my_key", { neg_ttl = 19 }, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + local ttl, err = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", math.ceil(ttl)) + + ngx.sleep(1) + + local ttl, err = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", math.ceil(ttl)) + } + } +--- request +GET /t +--- response_body +ttl: 19 +ttl: 18 +--- no_error_log +[error] + + + +=== TEST 4: peek() returns a negative ttl when a key expired +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return nil + end + + local val, err = cache:get("my_key", { neg_ttl = 0 }, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.sleep(1) + + local ttl = assert(cache:peek("my_key")) + ngx.say("ttl: ", math.ceil(ttl)) + + ngx.sleep(1) + + local ttl = assert(cache:peek("my_key")) + ngx.say("ttl: ", math.ceil(ttl)) + } + } +--- request +GET /t +--- response_body +ttl: -1 +ttl: -2 +--- no_error_log +[error] + + + +=== TEST 5: peek() returns remaining ttl if shm_miss is specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + shm_miss = "cache_shm_miss", + })) + + local function cb() + return nil + end + + local val, err = cache:get("my_key", { neg_ttl = 19 }, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + local ttl, err = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", math.ceil(ttl)) + + ngx.sleep(1) + + local ttl, err = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", math.ceil(ttl)) + } + } +--- request +GET /t +--- response_body +ttl: 19 +ttl: 18 +--- no_error_log +[error] + + + +=== TEST 6: peek() returns the value if a key has been fetched before +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb_number() + return 123 + end + + local function cb_nil() + return nil + end + + local val, err = cache:get("my_key", nil, cb_number) + if err then + ngx.log(ngx.ERR, err) + return + end + + local val, err = cache:get("my_nil_key", nil, cb_nil) + if err then + ngx.log(ngx.ERR, err) + return + end + + local ttl, err, val = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", math.ceil(ttl), " val: ", val) + + local ttl, err, val = cache:peek("my_nil_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", math.ceil(ttl), " nil_val: ", val) + } + } +--- request +GET /t +--- response_body_like +ttl: \d* val: 123 +ttl: \d* nil_val: nil +--- no_error_log +[error] + + + +=== TEST 7: peek() returns the value if shm_miss is specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + shm_miss = "cache_shm_miss", + })) + + local function cb_nil() + return nil + end + + local val, err = cache:get("my_nil_key", nil, cb_nil) + if err then + ngx.log(ngx.ERR, err) + return + end + + local ttl, err, val = cache:peek("my_nil_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", math.ceil(ttl), " nil_val: ", val) + } + } +--- request +GET /t +--- response_body_like +ttl: \d* nil_val: nil +--- no_error_log +[error] + + + +=== TEST 8: peek() JITs on hit +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + return 123456 + end + + local val = assert(cache:get("key", nil, cb)) + ngx.say("val: ", val) + + for i = 1, 10e3 do + assert(cache:peek("key")) + end + } + } +--- request +GET /t +--- response_body +val: 123456 +--- no_error_log +[error] +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):13 loop\]/ + + + +=== TEST 9: peek() JITs on miss +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + for i = 1, 10e3 do + local ttl, err, val = cache:peek("key") + assert(err == nil) + assert(ttl == nil) + assert(val == nil) + end + } + } +--- request +GET /t +--- response_body + +--- no_error_log +[error] +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):6 loop\]/ + + + +=== TEST 10: peek() returns nil if a value expired +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + assert(cache:get("my_key", { ttl = 0.3 }, function() + return 123 + end)) + + ngx.sleep(0.3) + + local ttl, err, data, stale = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", ttl) + ngx.say("data: ", data) + ngx.say("stale: ", stale) + } + } +--- request +GET /t +--- response_body +ttl: nil +data: nil +stale: nil +--- no_error_log +[error] + + + +=== TEST 11: peek() returns nil if a value expired in 'shm_miss' +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + shm_miss = "cache_shm_miss" + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("my_key", { neg_ttl = 0.3 }, function() + return nil + end) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.sleep(0.3) + + local ttl, err, data, stale = cache:peek("my_key") + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", ttl) + ngx.say("data: ", data) + ngx.say("stale: ", stale) + } + } +--- request +GET /t +--- response_body +ttl: nil +data: nil +stale: nil +--- no_error_log +[error] + + + +=== TEST 12: peek() accepts stale arg and returns stale values +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + assert(cache:get("my_key", { ttl = 0.3 }, function() + return 123 + end)) + + ngx.sleep(0.3) + + local ttl, err, data, stale = cache:peek("my_key", true) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", ttl) + ngx.say("data: ", data) + ngx.say("stale: ", stale) + } + } +--- request +GET /t +--- response_body_like chomp +ttl: -0\.\d+ +data: 123 +stale: true +--- no_error_log +[error] + + + +=== TEST 13: peek() accepts stale arg and returns stale values from 'shm_miss' +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + shm_miss = "cache_shm_miss" + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("my_key", { neg_ttl = 0.3 }, function() + return nil + end) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.sleep(0.3) + + local ttl, err, data, stale = cache:peek("my_key", true) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("ttl: ", ttl) + ngx.say("data: ", data) + ngx.say("stale: ", stale) + } + } +--- request +GET /t +--- response_body_like chomp +ttl: -0\.\d+ +data: nil +stale: true +--- no_error_log +[error] + + + +=== TEST 14: peek() does not evict stale items from L2 shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + })) + + local data, err = cache:get("key", nil, function() + return 123 + end) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.sleep(0.3) + + for i = 1, 3 do + remaining_ttl, err, data = cache:peek("key", true) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("remaining_ttl: ", remaining_ttl) + ngx.say("data: ", data) + end + } + } +--- request +GET /t +--- response_body_like chomp +remaining_ttl: -\d\.\d+ +data: 123 +remaining_ttl: -\d\.\d+ +data: 123 +remaining_ttl: -\d\.\d+ +data: 123 +--- no_error_log +[error] + + + +=== TEST 15: peek() does not evict stale negative data from L2 shm_miss +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + neg_ttl = 0.3, + shm_miss = "cache_shm_miss", + })) + + local data, err = cache:get("key", nil, function() + return nil + end) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.sleep(0.3) + + for i = 1, 3 do + remaining_ttl, err, data = cache:peek("key", true) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("remaining_ttl: ", remaining_ttl) + ngx.say("data: ", data) + end + } + } +--- request +GET /t +--- response_body_like chomp +remaining_ttl: -\d\.\d+ +data: nil +remaining_ttl: -\d\.\d+ +data: nil +remaining_ttl: -\d\.\d+ +data: nil +--- no_error_log +[error] diff --git a/t/05-mlcache/04-update.t b/t/05-mlcache/04-update.t new file mode 100644 index 000000000000..b77c69f53f3a --- /dev/null +++ b/t/05-mlcache/04-update.t @@ -0,0 +1,117 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3); + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict ipc_shm 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: update() errors if no ipc +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local ok, err = pcall(cache.update, cache, "foo") + ngx.say(err) + } + } +--- request +GET /t +--- response_body +no polling configured, specify opts.ipc_shm or opts.ipc.poll +--- no_error_log +[error] + + + +=== TEST 2: update() calls ipc poll() with timeout arg +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function() end, + poll = function(...) + ngx.say("called poll() with args: ", ...) + return true + end, + } + })) + + assert(cache:update(3.5, "not me")) + } + } +--- request +GET /t +--- response_body +called poll() with args: 3.5 +--- no_error_log +[error] + + + +=== TEST 3: update() JITs when no events to catch up +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + for i = 1, 10e3 do + assert(cache:update()) + end + } + } +--- request +GET /t +--- ignore_response_body +--- no_error_log +[error] +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):8 loop\]/ diff --git a/t/05-mlcache/05-set.t b/t/05-mlcache/05-set.t new file mode 100644 index 000000000000..4fa5b2e1e7bf --- /dev/null +++ b/t/05-mlcache/05-set.t @@ -0,0 +1,624 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3) + 2; + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict cache_shm_miss 1m; + lua_shared_dict ipc_shm 1m; +}; + +run_tests(); + +__DATA__ + +=== TEST 1: set() errors if no ipc +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local ok, err = pcall(cache.set, cache, "foo") + ngx.say(err) + } + } +--- request +GET /t +--- response_body +no ipc to propagate update, specify opts.ipc_shm or opts.ipc +--- no_error_log +[error] + + + +=== TEST 2: set() validates key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + local ok, err = pcall(cache.set, cache) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +key must be a string +--- no_error_log +[error] + + + +=== TEST 3: set() puts a value directly in shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- setting a value in shm + + assert(cache:set("my_key", nil, 123)) + + -- declaring a callback that MUST NOT be called + + local function cb() + ngx.log(ngx.ERR, "callback was called but should not have") + end + + -- try to get() + + local value = assert(cache:get("my_key", nil, cb)) + + ngx.say("value from get(): ", value) + + -- value MUST BE in lru + + local value_lru = cache.lru:get("my_key") + + ngx.say("cache lru value after get(): ", value_lru) + } + } +--- request +GET /t +--- response_body +value from get(): 123 +cache lru value after get(): 123 +--- no_error_log +[error] + + + +=== TEST 4: set() puts a negative hit directly in shm_miss if specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + shm_miss = "cache_shm_miss", + })) + + -- setting a value in shm + + assert(cache:set("my_key", nil, nil)) + + -- declaring a callback that MUST NOT be called + + local function cb() + ngx.log(ngx.ERR, "callback was called but should not have") + end + + -- try to get() + + local value, err = cache:get("my_key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("value from get(): ", value) + } + } +--- request +GET /t +--- response_body +value from get(): nil +--- no_error_log +[error] + + + +=== TEST 5: set() puts a value directly in its own LRU +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- setting a value in shm + + assert(cache:set("my_key", nil, 123)) + + -- value MUST BE be in lru + + local value_lru = cache.lru:get("my_key") + + ngx.say("cache lru value after set(): ", value_lru) + } + } +--- request +GET /t +--- response_body +cache lru value after set(): 123 +--- no_error_log +[error] + + + +=== TEST 6: set() respects 'ttl' for non-nil values +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- setting a non-nil value in shm + + assert(cache:set("my_key", { + ttl = 0.2, + neg_ttl = 1, + }, 123)) + + -- declaring a callback that logs accesses + + local function cb() + ngx.say("callback called") + return 123 + end + + -- try to get() (callback MUST NOT be called) + + ngx.say("calling get()") + local value = assert(cache:get("my_key", nil, cb)) + ngx.say("value from get(): ", value) + + -- wait until expiry + + ngx.say("waiting until expiry...") + ngx.sleep(0.3) + ngx.say("waited 0.3s") + + -- try to get() (callback MUST be called) + + ngx.say("calling get()") + local value = assert(cache:get("my_key", nil, cb)) + ngx.say("value from get(): ", value) + } + } +--- request +GET /t +--- response_body +calling get() +value from get(): 123 +waiting until expiry... +waited 0.3s +calling get() +callback called +value from get(): 123 +--- no_error_log +[error] + + + +=== TEST 7: set() respects 'neg_ttl' for nil values +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- setting a nil value in shm + + assert(cache:set("my_key", { + ttl = 1, + neg_ttl = 0.2, + }, nil)) + + -- declaring a callback that logs accesses + + local function cb() + ngx.say("callback called") + return nil + end + + -- try to get() (callback MUST NOT be called) + + ngx.say("calling get()") + local value, err = cache:get("my_key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + end + ngx.say("value from get(): ", value) + + -- wait until expiry + + ngx.say("waiting until expiry...") + ngx.sleep(0.3) + ngx.say("waited 0.3s") + + -- try to get() (callback MUST be called) + + ngx.say("calling get()") + local value, err = cache:get("my_key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + end + ngx.say("value from get(): ", value) + } + } +--- request +GET /t +--- response_body +calling get() +value from get(): nil +waiting until expiry... +waited 0.3s +calling get() +callback called +value from get(): nil +--- no_error_log +[error] + + + +=== TEST 8: set() respects 'set_shm_tries' +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + dict:flush_all() + dict:flush_expired() + local mlcache = require "kong.resty.mlcache" + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^2)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- shm:set() will evict up to 30 items when the shm is full + -- now, trigger a hit with a larger value which should trigger LRU + -- eviction and force the slab allocator to free pages + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + local data, err = cache:set("key", { + shm_set_tries = 5, + }, string.rep("a", 2^12)) + if err then + ngx.log(ngx.ERR, err) + return + end + + -- from shm + + cache.lru:delete("key") + + local cb_called + local function cb() + cb_called = true + end + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("type of data in shm: ", type(data)) + ngx.say("callback was called: ", cb_called ~= nil) + } + } +--- request +GET /t +--- response_body +type of data in shm: string +callback was called: false +--- no_error_log +[warn] +[error] + + + +=== TEST 9: set() with shm_miss can set a nil where a value was +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + shm_miss = "cache_shm_miss", + })) + + local function cb() + return 123 + end + + -- install a non-nil value in the cache + + local value, err = cache:get("my_key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("initial value from get(): ", value) + + -- override that value with a negative hit that + -- must go in the shm_miss (and the shm value must be + -- erased) + + assert(cache:set("my_key", nil, nil)) + + -- and remove it from the LRU + + cache.lru:delete("my_key") + + -- ok, now we should be getting nil from the cache + + local value, err = cache:get("my_key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("value from get() after set(): ", value) + } + } +--- request +GET /t +--- response_body +initial value from get(): 123 +value from get() after set(): nil +--- no_error_log +[error] + + + +=== TEST 10: set() with shm_miss can set a value where a nil was +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + shm_miss = "cache_shm_miss", + })) + + local function cb() + return nil + end + + -- install a non-nil value in the cache + + local value, err = cache:get("my_key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("initial value from get(): ", value) + + -- override that value with a negative hit that + -- must go in the shm_miss (and the shm value must be + -- erased) + + assert(cache:set("my_key", nil, 123)) + + -- and remove it from the LRU + + cache.lru:delete("my_key") + + -- ok, now we should be getting nil from the cache + + local value, err = cache:get("my_key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("value from get() after set(): ", value) + } + } +--- request +GET /t +--- response_body +initial value from get(): nil +value from get() after set(): 123 +--- no_error_log +[error] + + + +=== TEST 11: set() returns 'no memory' errors upon fragmentation in the shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- fill shm + + local idx = 0 + + while true do + local ok, err, forcible = ngx.shared.cache_shm:set(idx, true) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- set large value + + local ok, err = cache:set("my_key", { shm_set_tries = 1 }, string.rep("a", 2^10)) + ngx.say(ok) + ngx.say(err) + } + } +--- request +GET /t +--- response_body +nil +could not write to lua_shared_dict 'cache_shm': no memory +--- no_error_log +[error] +[warn] + + + +=== TEST 12: set() does not set LRU upon shm insertion error +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- fill shm + + local idx = 0 + + while true do + local ok, err, forcible = ngx.shared.cache_shm:set(idx, true) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- set large value + + local ok = cache:set("my_key", { shm_set_tries = 1 }, string.rep("a", 2^10)) + assert(ok == nil) + + local data = cache.lru:get("my_key") + ngx.say(data) + } + } +--- request +GET /t +--- response_body +nil +--- no_error_log +[error] + + + +=== TEST 13: set() calls broadcast() with invalidated key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function(channel, data, ...) + ngx.say("channel: ", channel) + ngx.say("data: ", data) + ngx.say("other args:", ...) + return true + end, + poll = function() end, + } + })) + + assert(cache:set("my_key", nil, nil)) + } + } +--- request +GET /t +--- response_body +channel: mlcache:invalidations:my_mlcache +data: my_key +other args: +--- no_error_log +[error] diff --git a/t/05-mlcache/06-delete.t b/t/05-mlcache/06-delete.t new file mode 100644 index 000000000000..9eb65b152fd1 --- /dev/null +++ b/t/05-mlcache/06-delete.t @@ -0,0 +1,252 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3); + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict cache_shm_miss 1m; + lua_shared_dict ipc_shm 1m; +}; + +run_tests(); + +__DATA__ + +=== TEST 1: delete() errors if no ipc +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local ok, err = pcall(cache.delete, cache, "foo") + ngx.say(err) + } + } +--- request +GET /t +--- response_body +no ipc to propagate deletion, specify opts.ipc_shm or opts.ipc +--- no_error_log +[error] + + + +=== TEST 2: delete() validates key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + local ok, err = pcall(cache.delete, cache, 123) + ngx.say(err) + } + } +--- request +GET /t +--- response_body +key must be a string +--- no_error_log +[error] + + + +=== TEST 3: delete() removes a cached value from LRU + shm +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + local value = 123 + + local function cb() + ngx.say("in callback") + return value + end + + -- set a value (callback call) + + local data = assert(cache:get("key", nil, cb)) + ngx.say("from callback: ", data) + + -- get a value (no callback call) + + data = assert(cache:get("key", nil, cb)) + ngx.say("from LRU: ", data) + + -- test if value is set from shm (safer to check due to the key) + + local v = ngx.shared.cache_shm:get(cache.name .. "key") + ngx.say("shm has value before delete: ", v ~= nil) + + -- delete the value + + assert(cache:delete("key")) + + local v = ngx.shared.cache_shm:get(cache.name .. "key") + ngx.say("shm has value after delete: ", v ~= nil) + + -- ensure LRU was also deleted + + v = cache.lru:get("key") + ngx.say("from LRU: ", v) + + -- start over from callback again + + value = 456 + + data = assert(cache:get("key", nil, cb)) + ngx.say("from callback: ", data) + } + } +--- request +GET /t +--- response_body +in callback +from callback: 123 +from LRU: 123 +shm has value before delete: true +shm has value after delete: false +from LRU: nil +in callback +from callback: 456 +--- no_error_log +[error] + + + +=== TEST 4: delete() removes a cached nil from shm_miss if specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + shm_miss = "cache_shm_miss", + })) + + local value = nil + + local function cb() + ngx.say("in callback") + return value + end + + -- set a value (callback call) + + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("from callback: ", data) + + -- get a value (no callback call) + + data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("from LRU: ", data) + + -- test if value is set from shm (safer to check due to the key) + + local v = ngx.shared.cache_shm_miss:get(cache.name .. "key") + ngx.say("shm_miss has value before delete: ", v ~= nil) + + -- delete the value + + assert(cache:delete("key")) + + local v = ngx.shared.cache_shm_miss:get(cache.name .. "key") + ngx.say("shm_miss has value after delete: ", v ~= nil) + + -- ensure LRU was also deleted + + v = cache.lru:get("key") + ngx.say("from LRU: ", v) + + -- start over from callback again + + value = 456 + + data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("from callback again: ", data) + } + } +--- request +GET /t +--- response_body +in callback +from callback: nil +from LRU: nil +shm_miss has value before delete: true +shm_miss has value after delete: false +from LRU: nil +in callback +from callback again: 456 +--- no_error_log +[error] + + + +=== TEST 5: delete() calls broadcast with invalidated key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function(channel, data, ...) + ngx.say("channel: ", channel) + ngx.say("data: ", data) + ngx.say("other args:", ...) + return true + end, + poll = function() end, + } + })) + + assert(cache:delete("my_key")) + } + } +--- request +GET /t +--- response_body +channel: mlcache:invalidations:my_mlcache +data: my_key +other args: +--- no_error_log +[error] diff --git a/t/05-mlcache/07-l1_serializer.t b/t/05-mlcache/07-l1_serializer.t new file mode 100644 index 000000000000..74ec9c467f8e --- /dev/null +++ b/t/05-mlcache/07-l1_serializer.t @@ -0,0 +1,741 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3) + 1; + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict ipc_shm 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: l1_serializer is validated by the constructor +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "my_mlcache", "cache_shm", { + l1_serializer = false, + }) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.l1_serializer must be a function +--- no_error_log +[error] + + + +=== TEST 2: l1_serializer is called on L1+L2 cache misses +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + return string.format("transform(%q)", s) + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key", nil, function() return "foo" end) + if not data then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + } + } +--- request +GET /t +--- response_body +transform("foo") +--- no_error_log +[error] + + + +=== TEST 3: get() JITs when hit of scalar value coming from shm with l1_serializer +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(i) + return i + 2 + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb_number() + return 123456 + end + + for i = 1, 10e2 do + local data = assert(cache:get("number", nil, cb_number)) + assert(data == 123458) + + cache.lru:delete("number") + end + } + } +--- request +GET /t +--- response_body + +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):18 loop\]/ +--- no_error_log +[error] + + + +=== TEST 4: l1_serializer is not called on L1 hits +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local calls = 0 + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + calls = calls + 1 + return string.format("transform(%q)", s) + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, 3 do + local data, err = cache:get("key", nil, function() return "foo" end) + if not data then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + end + + ngx.say("calls: ", calls) + } + } +--- request +GET /t +--- response_body +transform("foo") +transform("foo") +transform("foo") +calls: 1 +--- no_error_log +[error] + + + +=== TEST 5: l1_serializer is called on each L2 hit +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local calls = 0 + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + calls = calls + 1 + return string.format("transform(%q)", s) + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, 3 do + local data, err = cache:get("key", nil, function() return "foo" end) + if not data then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + cache.lru:delete("key") + end + + ngx.say("calls: ", calls) + } + } +--- request +GET /t +--- response_body +transform("foo") +transform("foo") +transform("foo") +calls: 3 +--- no_error_log +[error] + + + +=== TEST 6: l1_serializer is called on boolean false hits +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + return string.format("transform_boolean(%q)", s) + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local function cb() + return false + end + + local data, err = cache:get("key", nil, cb) + if not data then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + } + } +--- request +GET /t +--- response_body +transform_boolean("false") +--- no_error_log +[error] + + + +=== TEST 7: l1_serializer is called in protected mode (L2 miss) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + error("cannot transform") + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key", nil, function() return "foo" end) + if not data then + ngx.say(err) + end + + ngx.say(data) + } + } +--- request +GET /t +--- response_body_like +l1_serializer threw an error: .*?: cannot transform +--- no_error_log +[error] + + + +=== TEST 8: l1_serializer is called in protected mode (L2 hit) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local called = false + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + if called then error("cannot transform") end + called = true + return string.format("transform(%q)", s) + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + assert(cache:get("key", nil, function() return "foo" end)) + cache.lru:delete("key") + + local data, err = cache:get("key", nil, function() return "foo" end) + if not data then + ngx.say(err) + end + + ngx.say(data) + } + } +--- request +GET /t +--- response_body_like +l1_serializer threw an error: .*?: cannot transform +--- no_error_log +[error] + + + +=== TEST 9: l1_serializer is not called for L2+L3 misses (no record) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local called = false + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + called = true + return string.format("transform(%s)", s) + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key", nil, function() return nil end) + if data ~= nil then + ngx.log(ngx.ERR, "got a value for a L3 miss: ", tostring(data)) + return + elseif err ~= nil then + ngx.log(ngx.ERR, "got an error for a L3 miss: ", tostring(err)) + return + end + + -- our L3 returned nil, we do not call the l1_serializer and + -- we store the LRU nil sentinel value + + ngx.say("l1_serializer called for L3 miss: ", called) + + -- delete from LRU, and try from L2 again + + cache.lru:delete("key") + + local data, err = cache:get("key", nil, function() error("not supposed to call") end) + if data ~= nil then + ngx.log(ngx.ERR, "got a value for a L3 miss: ", tostring(data)) + return + elseif err ~= nil then + ngx.log(ngx.ERR, "got an error for a L3 miss: ", tostring(err)) + return + end + + ngx.say("l1_serializer called for L2 negative hit: ", called) + } + } +--- request +GET /t +--- response_body +l1_serializer called for L3 miss: false +l1_serializer called for L2 negative hit: false +--- no_error_log +[error] + + + +=== TEST 10: l1_serializer is not supposed to return a nil value +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + return nil + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = cache:get("key", nil, function() return "foo" end) + assert(not ok, "get() should not return successfully") + ngx.say(err) + } + } +--- request +GET /t +--- response_body_like +l1_serializer returned a nil value +--- no_error_log +[error] + + + +=== TEST 11: l1_serializer can return nil + error +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + return nil, "l1_serializer: cannot transform" + end, + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key", nil, function() return "foo" end) + if not data then + ngx.say(err) + end + + ngx.say("data: ", data) + } + } +--- request +GET /t +--- response_body +l1_serializer: cannot transform +data: nil +--- no_error_log +[error] + + + +=== TEST 12: l1_serializer can be given as a get() argument +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key", { + l1_serializer = function(s) + return string.format("transform(%q)", s) + end + }, function() return "foo" end) + if not data then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + } + } +--- request +GET /t +--- response_body +transform("foo") +--- no_error_log +[error] + + + +=== TEST 13: l1_serializer as get() argument has precedence over the constructor one +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(s) + return string.format("constructor(%q)", s) + end + }) + + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local data, err = cache:get("key1", { + l1_serializer = function(s) + return string.format("get_argument(%q)", s) + end + }, function() return "foo" end) + if not data then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + + local data, err = cache:get("key2", nil, function() return "bar" end) + if not data then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(data) + } + } +--- request +GET /t +--- response_body +get_argument("foo") +constructor("bar") +--- no_error_log +[error] + + + +=== TEST 14: get() validates l1_serializer is a function +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm") + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = pcall(cache.get, cache, "key", { + l1_serializer = false, + }, function() return "foo" end) + if not data then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.l1_serializer must be a function +--- no_error_log +[error] + + + +=== TEST 15: set() calls l1_serializer +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + l1_serializer = function(s) + return string.format("transform(%q)", s) + end + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = cache:set("key", nil, "value") + if not ok then + ngx.log(ngx.ERR, err) + return + end + + local value, err = cache:get("key", nil, error) + if not value then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(value) + } + } +--- request +GET /t +--- response_body +transform("value") +--- no_error_log +[error] + + + +=== TEST 16: set() calls l1_serializer for boolean false values +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + l1_serializer = function(s) + return string.format("transform_boolean(%q)", s) + end + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = cache:set("key", nil, false) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + local value, err = cache:get("key", nil, error) + if not value then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(value) + } + } +--- request +GET /t +--- response_body +transform_boolean("false") +--- no_error_log +[error] + + + +=== TEST 17: l1_serializer as set() argument has precedence over the constructor one +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + l1_serializer = function(s) + return string.format("constructor(%q)", s) + end + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = cache:set("key", { + l1_serializer = function(s) + return string.format("set_argument(%q)", s) + end + }, "value") + if not ok then + ngx.log(ngx.ERR, err) + return + end + + local value, err = cache:get("key", nil, error) + if not value then + ngx.log(ngx.ERR, err) + return + end + + ngx.say(value) + } + } +--- request +GET /t +--- response_body +set_argument("value") +--- no_error_log +[error] + + + +=== TEST 18: set() validates l1_serializer is a function +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache, err = mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + }) + if not cache then + ngx.log(ngx.ERR, err) + return + end + + local ok, err = pcall(cache.set, cache, "key", { + l1_serializer = true + }, "value") + if not data then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.l1_serializer must be a function +--- no_error_log +[error] diff --git a/t/05-mlcache/08-purge.t b/t/05-mlcache/08-purge.t new file mode 100644 index 000000000000..c8f8eca72d9a --- /dev/null +++ b/t/05-mlcache/08-purge.t @@ -0,0 +1,402 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); +use lib '.'; +use t::Util; + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3); + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict cache_shm_miss 1m; + lua_shared_dict ipc_shm 1m; +}; + +run_tests(); + +__DATA__ + +=== TEST 1: purge() errors if no ipc +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local ok, err = pcall(cache.purge, cache) + ngx.say(err) + } + } +--- request +GET /t +--- response_body +no ipc to propagate purge, specify opts.ipc_shm or opts.ipc +--- no_error_log +[error] + + + +=== TEST 2: purge() deletes all items from L1 + L2 (sanity 1/2) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- populate mlcache + + for i = 1, 100 do + assert(cache:get(tostring(i), nil, function() return i end)) + end + + -- purge + + assert(cache:purge()) + + for i = 1, 100 do + local value, err = cache:get(tostring(i), nil, function() return nil end) + if err then + ngx.log(ngx.ERR, err) + return + end + + if value ~= nil then + ngx.say("key ", i, " had: ", value) + end + end + + ngx.say("ok") + } + } +--- request +GET /t +--- response_body +ok +--- no_error_log +[error] + + + +=== TEST 3: purge() deletes all items from L1 (sanity 2/2) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- populate mlcache + + for i = 1, 100 do + assert(cache:get(tostring(i), nil, function() return i end)) + end + + -- purge + + assert(cache:purge()) + + for i = 1, 100 do + local value = cache.lru:get(tostring(i)) + + if value ~= nil then + ngx.say("key ", i, " had: ", value) + end + end + + ngx.say("ok") + } + } +--- request +GET /t +--- response_body +ok +--- no_error_log +[error] + + + +=== TEST 4: purge() deletes all items from L1 with a custom LRU +--- skip_eval: 3: t::Util::skip_openresty('<', '1.13.6.2') +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local lrucache = require "resty.lrucache" + + local lru = lrucache.new(100) + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + lru = lru, + })) + + -- populate mlcache + + for i = 1, 100 do + assert(cache:get(tostring(i), nil, function() return i end)) + end + + -- purge + + assert(cache:purge()) + + for i = 1, 100 do + local value = cache.lru:get(tostring(i)) + + if value ~= nil then + ngx.say("key ", i, " had: ", value) + end + end + + ngx.say("ok") + ngx.say("lru instance is the same one: ", lru == cache.lru) + } + } +--- request +GET /t +--- response_body +ok +lru instance is the same one: true +--- no_error_log +[error] + + + +=== TEST 5: purge() is prevented if custom LRU does not support flush_all() +--- skip_eval: 3: t::Util::skip_openresty('>', '1.13.6.1') +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local lrucache = require "resty.lrucache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + lru = lrucache.new(10), + })) + + local pok, perr = pcall(cache.purge, cache) + if not pok then + ngx.say(perr) + return + end + + ngx.say("ok") + } + } +--- request +GET /t +--- response_body +cannot purge when using custom LRU cache with OpenResty < 1.13.6.2 +--- no_error_log +[error] + + + +=== TEST 6: purge() deletes all items from shm_miss is specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + shm_miss = "cache_shm_miss", + })) + + -- populate mlcache + + for i = 1, 100 do + local _, err = cache:get(tostring(i), nil, function() return nil end) + if err then + ngx.log(ngx.ERR, err) + return + end + end + + -- purge + + assert(cache:purge()) + + local called = 0 + + for i = 1, 100 do + local value, err = cache:get(tostring(i), nil, function() return i end) + + if value ~= i then + ngx.say("key ", i, " had: ", value) + end + end + + ngx.say("ok") + } + } +--- request +GET /t +--- response_body +ok +--- no_error_log +[error] + + + +=== TEST 7: purge() does not call shm:flush_expired() by default +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + do + local cache_shm = ngx.shared.cache_shm + local mt = getmetatable(cache_shm) + local orig_cache_shm_flush_expired = mt.flush_expired + + mt.flush_expired = function(self, ...) + ngx.say("flush_expired called with 'max_count'") + + return orig_cache_shm_flush_expired(self, ...) + end + end + + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + assert(cache:purge()) + } + } +--- request +GET /t +--- response_body_unlike +flush_expired called with 'max_count' +--- no_error_log +[error] + + + +=== TEST 8: purge() calls shm:flush_expired() if argument specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + do + local cache_shm = ngx.shared.cache_shm + local mt = getmetatable(cache_shm) + local orig_cache_shm_flush_expired = mt.flush_expired + + mt.flush_expired = function(self, ...) + local arg = { ... } + local n = arg[1] + ngx.say("flush_expired called with 'max_count': ", n) + + return orig_cache_shm_flush_expired(self, ...) + end + end + + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + assert(cache:purge(true)) + } + } +--- request +GET /t +--- response_body +flush_expired called with 'max_count': nil +--- no_error_log +[error] + + + +=== TEST 9: purge() calls shm:flush_expired() if shm_miss is specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + do + local cache_shm = ngx.shared.cache_shm + local mt = getmetatable(cache_shm) + local orig_cache_shm_flush_expired = mt.flush_expired + + mt.flush_expired = function(self, ...) + local arg = { ... } + local n = arg[1] + ngx.say("flush_expired called with 'max_count': ", n) + + return orig_cache_shm_flush_expired(self, ...) + end + end + + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + shm_miss = "cache_shm_miss", + })) + + assert(cache:purge(true)) + } + } +--- request +GET /t +--- response_body +flush_expired called with 'max_count': nil +flush_expired called with 'max_count': nil +--- no_error_log +[error] + + + +=== TEST 10: purge() calls broadcast() on purge channel +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function(channel, data, ...) + ngx.say("channel: ", channel) + ngx.say("data:", data) + ngx.say("other args:", ...) + return true + end, + poll = function() end, + } + })) + + assert(cache:purge()) + } + } +--- request +GET /t +--- response_body +channel: mlcache:purge:my_mlcache +data: +other args: +--- no_error_log +[error] diff --git a/t/05-mlcache/09-isolation.t b/t/05-mlcache/09-isolation.t new file mode 100644 index 000000000000..eadfa86272fb --- /dev/null +++ b/t/05-mlcache/09-isolation.t @@ -0,0 +1,375 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3); + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict ipc_shm 1m; +}; + +run_tests(); + +__DATA__ + +=== TEST 1: multiple instances with the same name have same lua-resty-lru instance +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache_1 = assert(mlcache.new("my_mlcache", "cache_shm")) + local cache_2 = assert(mlcache.new("my_mlcache", "cache_shm")) + + ngx.say("lua-resty-lru instances are the same: ", + cache_1.lru == cache_2.lru) + } + } +--- request +GET /t +--- response_body +lua-resty-lru instances are the same: true +--- no_error_log +[error] + + + +=== TEST 2: multiple instances with different names have different lua-resty-lru instances +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache_1 = assert(mlcache.new("my_mlcache_1", "cache_shm")) + local cache_2 = assert(mlcache.new("my_mlcache_2", "cache_shm")) + + ngx.say("lua-resty-lru instances are the same: ", + cache_1.lru == cache_2.lru) + } + } +--- request +GET /t +--- response_body +lua-resty-lru instances are the same: false +--- no_error_log +[error] + + + +=== TEST 3: garbage-collected instances also GC their lru instance +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + collectgarbage("collect") + local cache_1 = assert(mlcache.new("my_mlcache", "cache_shm")) + local cache_2 = assert(mlcache.new("my_mlcache", "cache_shm")) + + -- cache something in cache_1's LRU + + cache_1.lru:set("key", 123) + + -- GC cache_1 (the LRU should survive because it is shared with cache_2) + + cache_1 = nil + collectgarbage("collect") + + -- prove LRU survived + + ngx.say((cache_2.lru:get("key"))) + + -- GC cache_2 (and the LRU this time, since no more references) + + cache_2 = nil + collectgarbage("collect") + + -- re-create the caches and a new LRU + + cache_1 = assert(mlcache.new("my_mlcache", "cache_shm")) + cache_2 = assert(mlcache.new("my_mlcache", "cache_shm")) + + -- this is a new LRU, it has nothing in it + + ngx.say((cache_2.lru:get("key"))) + } + } +--- request +GET /t +--- response_body +123 +nil +--- no_error_log +[error] + + + +=== TEST 4: multiple instances with different names get() of the same key are isolated +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + -- create 2 mlcache + + local cache_1 = assert(mlcache.new("my_mlcache_1", "cache_shm")) + local cache_2 = assert(mlcache.new("my_mlcache_2", "cache_shm")) + + -- set a value in both mlcaches + + local data_1 = assert(cache_1:get("my_key", nil, function() return "value A" end)) + local data_2 = assert(cache_2:get("my_key", nil, function() return "value B" end)) + + -- get values from LRU + + local lru_1_value = cache_1.lru:get("my_key") + local lru_2_value = cache_2.lru:get("my_key") + + ngx.say("cache_1 lru has: ", lru_1_value) + ngx.say("cache_2 lru has: ", lru_2_value) + + -- delete values from LRU + + cache_1.lru:delete("my_key") + cache_2.lru:delete("my_key") + + -- get values from shm + + local shm_1_value = assert(cache_1:get("my_key", nil, function() end)) + local shm_2_value = assert(cache_2:get("my_key", nil, function() end)) + + ngx.say("cache_1 shm has: ", shm_1_value) + ngx.say("cache_2 shm has: ", shm_2_value) + } + } +--- request +GET /t +--- response_body +cache_1 lru has: value A +cache_2 lru has: value B +cache_1 shm has: value A +cache_2 shm has: value B +--- no_error_log +[error] + + + +=== TEST 5: multiple instances with different names delete() of the same key are isolated +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + -- create 2 mlcache + + local cache_1 = assert(mlcache.new("my_mlcache_1", "cache_shm", { + ipc_shm = "ipc_shm", + })) + local cache_2 = assert(mlcache.new("my_mlcache_2", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- set 2 values in both mlcaches + + local data_1 = assert(cache_1:get("my_key", nil, function() return "value A" end)) + local data_2 = assert(cache_2:get("my_key", nil, function() return "value B" end)) + + -- test if value is set from shm (safer to check due to the key) + + local shm_v = ngx.shared.cache_shm:get(cache_1.name .. "my_key") + ngx.say("cache_1 shm has a value: ", shm_v ~= nil) + + -- delete value from mlcache 1 + + ngx.say("delete from cache_1") + assert(cache_1:delete("my_key")) + + -- ensure cache 1 key is deleted from LRU + + local lru_v = cache_1.lru:get("my_key") + ngx.say("cache_1 lru has: ", lru_v) + + -- ensure cache 1 key is deleted from shm + + local shm_v = ngx.shared.cache_shm:get(cache_1.name .. "my_key") + ngx.say("cache_1 shm has: ", shm_v) + + -- ensure cache 2 still has its value + + local shm_v_2 = ngx.shared.cache_shm:get(cache_2.name .. "my_key") + ngx.say("cache_2 shm has a value: ", shm_v_2 ~= nil) + + local lru_v_2 = cache_2.lru:get("my_key") + ngx.say("cache_2 lru has: ", lru_v_2) + } + } +--- request +GET /t +--- response_body +cache_1 shm has a value: true +delete from cache_1 +cache_1 lru has: nil +cache_1 shm has: nil +cache_2 shm has a value: true +cache_2 lru has: value B +--- no_error_log +[error] + + + +=== TEST 6: multiple instances with different names peek() of the same key are isolated +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + -- must reset the shm so that when repeated, this tests doesn't + -- return unpredictible TTLs (0.9xxxs) + ngx.shared.cache_shm:flush_all() + ngx.shared.cache_shm:flush_expired() + + local mlcache = require "kong.resty.mlcache" + + -- create 2 mlcaches + + local cache_1 = assert(mlcache.new("my_mlcache_1", "cache_shm", { + ipc_shm = "ipc_shm", + })) + local cache_2 = assert(mlcache.new("my_mlcache_2", "cache_shm", { + ipc_shm = "ipc_shm", + })) + + -- reset LRUs so repeated tests allow the below get() to set the + -- value in the shm + + cache_1.lru:delete("my_key") + cache_2.lru:delete("my_key") + + -- set a value in both mlcaches + + local data_1 = assert(cache_1:get("my_key", { ttl = 1 }, function() return "value A" end)) + local data_2 = assert(cache_2:get("my_key", { ttl = 2 }, function() return "value B" end)) + + -- peek cache 1 + + local ttl, err, val = assert(cache_1:peek("my_key")) + + ngx.say("cache_1 ttl: ", ttl) + ngx.say("cache_1 value: ", val) + + -- peek cache 2 + + local ttl, err, val = assert(cache_2:peek("my_key")) + + ngx.say("cache_2 ttl: ", ttl) + ngx.say("cache_2 value: ", val) + } + } +--- request +GET /t +--- response_body +cache_1 ttl: 1 +cache_1 value: value A +cache_2 ttl: 2 +cache_2 value: value B +--- no_error_log +[error] + + + +=== TEST 7: non-namespaced instances use different delete() broadcast channel +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + -- create 2 mlcaches + + local cache_1 = assert(mlcache.new("my_mlcache_1", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function(channel) + ngx.say("cache_1 channel: ", channel) + return true + end, + poll = function() end, + } + })) + local cache_2 = assert(mlcache.new("my_mlcache_2", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function(channel) + ngx.say("cache_2 channel: ", channel) + return true + end, + poll = function() end, + } + })) + + assert(cache_1:delete("my_key")) + assert(cache_2:delete("my_key")) + } + } +--- request +GET /t +--- response_body +cache_1 channel: mlcache:invalidations:my_mlcache_1 +cache_2 channel: mlcache:invalidations:my_mlcache_2 +--- no_error_log +[error] + + + +=== TEST 8: non-namespaced instances use different purge() broadcast channel +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + -- create 2 mlcaches + + local cache_1 = assert(mlcache.new("my_mlcache_1", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function(channel) + ngx.say("cache_1 channel: ", channel) + return true + end, + poll = function() end, + } + })) + local cache_2 = assert(mlcache.new("my_mlcache_2", "cache_shm", { + ipc = { + register_listeners = function() end, + broadcast = function(channel) + ngx.say("cache_2 channel: ", channel) + return true + end, + poll = function() end, + } + })) + + assert(cache_1:purge()) + assert(cache_2:purge()) + } + } +--- request +GET /t +--- response_body +cache_1 channel: mlcache:purge:my_mlcache_1 +cache_2 channel: mlcache:purge:my_mlcache_2 +--- no_error_log +[error] diff --git a/t/05-mlcache/10-ipc_shm.t b/t/05-mlcache/10-ipc_shm.t new file mode 100644 index 000000000000..3f7b3b093569 --- /dev/null +++ b/t/05-mlcache/10-ipc_shm.t @@ -0,0 +1,319 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); +use lib '.'; +use t::Util; + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * (blocks() * 3) + 2; + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict ipc_shm 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: update() with ipc_shm catches up with invalidation events +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + debug = true -- allows same worker to receive its own published events + })) + + cache.ipc:subscribe(cache.events.invalidation.channel, function(data) + ngx.log(ngx.NOTICE, "received event from invalidations: ", data) + end) + + assert(cache:delete("my_key")) + assert(cache:update()) + } + } +--- request +GET /t +--- ignore_response_body +--- no_error_log +[error] +--- error_log +received event from invalidations: my_key + + + +=== TEST 2: update() with ipc_shm timeouts when waiting for too long +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + debug = true -- allows same worker to receive its own published events + })) + + cache.ipc:subscribe(cache.events.invalidation.channel, function(data) + ngx.log(ngx.NOTICE, "received event from invalidations: ", data) + end) + + assert(cache:delete("my_key")) + assert(cache:delete("my_other_key")) + ngx.shared.ipc_shm:delete(2) + + local ok, err = cache:update(0.1) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +could not poll ipc events: timeout +--- no_error_log +[error] +received event from invalidations: my_other +--- error_log +received event from invalidations: my_key + + + +=== TEST 3: update() with ipc_shm JITs when no events to catch up +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ipc_shm = "ipc_shm", + debug = true -- allows same worker to receive its own published events + })) + for i = 1, 10e3 do + assert(cache:update()) + end + } + } +--- request +GET /t +--- ignore_response_body +--- no_error_log +[error] +--- error_log eval +qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/ + + + +=== TEST 4: set() with ipc_shm invalidates other workers' LRU cache +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { + ipc_shm = "ipc_shm", + debug = true -- allows same worker to receive its own published events + } + + local cache = assert(mlcache.new("namespace", "cache_shm", opts)) + local cache_clone = assert(mlcache.new("namespace", "cache_shm", opts)) + + do + local lru_delete = cache.lru.delete + cache.lru.delete = function(self, key) + ngx.say("called lru:delete() with key: ", key) + return lru_delete(self, key) + end + end + + assert(cache:set("my_key", nil, nil)) + + ngx.say("calling update on cache") + assert(cache:update()) + + ngx.say("calling update on cache_clone") + assert(cache_clone:update()) + } + } +--- request +GET /t +--- response_body +calling update on cache +called lru:delete() with key: my_key +calling update on cache_clone +called lru:delete() with key: my_key +--- no_error_log +[error] + + + +=== TEST 5: delete() with ipc_shm invalidates other workers' LRU cache +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { + ipc_shm = "ipc_shm", + debug = true -- allows same worker to receive its own published events + } + + local cache = assert(mlcache.new("namespace", "cache_shm", opts)) + local cache_clone = assert(mlcache.new("namespace", "cache_shm", opts)) + + do + local lru_delete = cache.lru.delete + cache.lru.delete = function(self, key) + ngx.say("called lru:delete() with key: ", key) + return lru_delete(self, key) + end + end + + assert(cache:delete("my_key")) + + ngx.say("calling update on cache") + assert(cache:update()) + + ngx.say("calling update on cache_clone") + assert(cache_clone:update()) + } + } +--- request +GET /t +--- response_body +called lru:delete() with key: my_key +calling update on cache +called lru:delete() with key: my_key +calling update on cache_clone +called lru:delete() with key: my_key +--- no_error_log +[error] + + + +=== TEST 6: purge() with mlcache_shm invalidates other workers' LRU cache (OpenResty < 1.13.6.2) +--- skip_eval: 3: t::Util::skip_openresty('>=', '1.13.6.2') +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { + ipc_shm = "ipc_shm", + debug = true -- allows same worker to receive its own published events + } + + local cache = assert(mlcache.new("namespace", "cache_shm", opts)) + local cache_clone = assert(mlcache.new("namespace", "cache_shm", opts)) + + local lru = cache.lru + local lru_clone = cache_clone.lru + + assert(cache:purge()) + + -- cache.lru should be different now + ngx.say("cache has new lru: ", cache.lru ~= lru) + + ngx.say("cache_clone still has same lru: ", cache_clone.lru == lru_clone) + + ngx.say("calling update on cache_clone") + assert(cache_clone:update()) + + -- cache.lru should be different now + ngx.say("cache_clone has new lru: ", cache_clone.lru ~= lru_clone) + } + } +--- request +GET /t +--- response_body +cache has new lru: true +cache_clone still has same lru: true +calling update on cache_clone +cache_clone has new lru: true +--- no_error_log +[error] + + + +=== TEST 7: purge() with mlcache_shm invalidates other workers' LRU cache (OpenResty >= 1.13.6.2) +--- skip_eval: 3: t::Util::skip_openresty('<', '1.13.6.2') +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local opts = { + ipc_shm = "ipc_shm", + debug = true -- allows same worker to receive its own published events + } + + local cache = assert(mlcache.new("namespace", "cache_shm", opts)) + local cache_clone = assert(mlcache.new("namespace", "cache_shm", opts)) + + local lru = cache.lru + + ngx.say("both instances use the same lru: ", cache.lru == cache_clone.lru) + + do + local lru_flush_all = lru.flush_all + cache.lru.flush_all = function(self) + ngx.say("called lru:flush_all()") + return lru_flush_all(self) + end + end + + assert(cache:purge()) + + ngx.say("calling update on cache_clone") + assert(cache_clone:update()) + + ngx.say("both instances use the same lru: ", cache.lru == cache_clone.lru) + ngx.say("lru didn't change after purge: ", cache.lru == lru) + } + } +--- request +GET /t +--- response_body +both instances use the same lru: true +called lru:flush_all() +calling update on cache_clone +called lru:flush_all() +both instances use the same lru: true +lru didn't change after purge: true +--- no_error_log +[error] diff --git a/t/05-mlcache/11-locks_shm.t b/t/05-mlcache/11-locks_shm.t new file mode 100644 index 000000000000..de2bf758bcc5 --- /dev/null +++ b/t/05-mlcache/11-locks_shm.t @@ -0,0 +1,115 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +plan tests => repeat_each() * (blocks() * 3); + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict locks_shm 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: new() validates opts.shm_locks +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = pcall(mlcache.new, "name", "cache_shm", { + shm_locks = false, + }) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +opts.shm_locks must be a string +--- no_error_log +[error] + + + +=== TEST 2: new() ensures opts.shm_locks exists +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local ok, err = mlcache.new("name", "cache_shm", { + shm_locks = "foo", + }) + if not ok then + ngx.say(err) + end + } + } +--- request +GET /t +--- response_body +no such lua_shared_dict for opts.shm_locks: foo +--- no_error_log +[error] + + + +=== TEST 3: get() stores resty-locks in opts.shm_locks if specified +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("name", "cache_shm", { + shm_locks = "locks_shm", + })) + + local function cb() + local keys = ngx.shared.locks_shm:get_keys() + for i, key in ipairs(keys) do + ngx.say(i, ": ", key) + end + + return 123 + end + + cache:get("key", nil, cb) + } + } +--- request +GET /t +--- response_body +1: lua-resty-mlcache:lock:namekey +--- no_error_log +[error] diff --git a/t/05-mlcache/12-resurrect-stale.t b/t/05-mlcache/12-resurrect-stale.t new file mode 100644 index 000000000000..bfb1349db954 --- /dev/null +++ b/t/05-mlcache/12-resurrect-stale.t @@ -0,0 +1,1047 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); + +plan tests => repeat_each() * (blocks() * 3 + 3); + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + lua_shared_dict cache_shm_miss 1m; +}; + +no_long_string(); +log_level('warn'); + +run_tests(); + +__DATA__ + +=== TEST 1: new() validates 'opts.resurrect_ttl' (number && >= 0) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local pok, perr = pcall(mlcache.new, "my_mlcache", "cache_shm", { + resurrect_ttl = "", + }) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(mlcache.new, "my_mlcache", "cache_shm", { + resurrect_ttl = -1, + }) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +opts.resurrect_ttl must be a number +opts.resurrect_ttl must be >= 0 +--- no_error_log +[error] + + + +=== TEST 2: get() validates 'opts.resurrect_ttl' (number && >= 0) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() + -- nop + end + + local pok, perr = pcall(cache.get, cache, "key", { + resurrect_ttl = "", + }, cb) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(cache.get, cache, "key", { + resurrect_ttl = -1, + }, cb) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +opts.resurrect_ttl must be a number +opts.resurrect_ttl must be >= 0 +--- no_error_log +[error] + + + +=== TEST 3: get() resurrects a stale value upon callback soft error for 'resurrect_ttl' instance option +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 0.2, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return 123 + + elseif cb_called == 2 then + return nil, "some error" + + elseif cb_called == 3 then + return 456 + end + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("-> subsequent get() from LRU") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("-> subsequent get() from shm") + cache.lru:delete("key") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("sleeping for 0.2s...") + ngx.sleep(0.21) + ngx.say() + + ngx.say("-> successfull callback get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: 123 + +sleeping for 0.3s... + +-> stale get() +data: 123 +err: nil +hit_lvl: 4 + +-> subsequent get() from LRU +data: 123 +err: nil +hit_lvl: 1 + +-> subsequent get() from shm +data: 123 +err: nil +hit_lvl: 4 + +sleeping for 0.2s... + +-> successfull callback get() +data: 456 +err: nil +hit_lvl: 3 +--- no_error_log +[error] + + + +=== TEST 4: get() logs soft callback error with warn level when resurrecting +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 0.2, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return 123 + + elseif cb_called == 2 then + return nil, "some error" + + elseif cb_called == 3 then + return 456 + end + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: 123 + +sleeping for 0.3s... + +-> stale get() +data: 123 +err: nil +hit_lvl: 4 +--- error_log eval +qr/\[warn\] .*? callback returned an error \(some error\) but stale value found/ + + + +=== TEST 5: get() accepts 'opts.resurrect_ttl' option to override instance option +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 0.8, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return 123 + + else + return nil, "some error" + end + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", { + resurrect_ttl = 0.2 + }, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("sleeping for 0.2s...") + ngx.sleep(0.21) + ngx.say() + + ngx.say("-> subsequent stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: 123 + +sleeping for 0.3s... + +-> stale get() +data: 123 +err: nil +hit_lvl: 4 + +sleeping for 0.2s... + +-> subsequent stale get() +data: 123 +err: nil +hit_lvl: 4 +--- no_error_log +[error] + + + +=== TEST 6: get() resurrects a nil stale value (negative cache) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + neg_ttl = 0.3, + resurrect_ttl = 0.2, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return nil + + elseif cb_called == 2 then + return nil, "some error" + + elseif cb_called == 3 then + return 456 + end + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("sleeping for 0.2s...") + ngx.sleep(0.21) + ngx.say() + + ngx.say("-> successfull callback get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: nil + +sleeping for 0.3s... + +-> stale get() +data: nil +err: nil +hit_lvl: 4 + +-> subsequent get() +data: nil +err: nil +hit_lvl: 1 + +sleeping for 0.2s... + +-> successfull callback get() +data: 456 +err: nil +hit_lvl: 3 +--- no_error_log +[error] + + + +=== TEST 7: get() resurrects a nil stale value (negative cache) in 'opts.shm_miss' +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + neg_ttl = 0.3, + resurrect_ttl = 0.2, + shm_miss = "cache_shm_miss" + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return nil + + elseif cb_called == 2 then + return nil, "some error" + + elseif cb_called == 3 then + return 456 + end + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("sleeping for 0.2s...") + ngx.sleep(0.21) + ngx.say() + + ngx.say("-> successfull callback get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: nil + +sleeping for 0.3s... + +-> stale get() +data: nil +err: nil +hit_lvl: 4 + +-> subsequent get() +data: nil +err: nil +hit_lvl: 1 + +sleeping for 0.2s... + +-> successfull callback get() +data: 456 +err: nil +hit_lvl: 3 +--- no_error_log +[error] + + + +=== TEST 8: get() ignores cb return values upon stale value resurrection +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 0.2, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 2 then + -- ignore ret values 1 and 3 + return 456, "some error", 10 + + else + return 123 + end + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("sleeping for 0.2s...") + ngx.sleep(0.21) + ngx.say() + + ngx.say("-> successfull callback get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: 123 + +sleeping for 0.3s... + +-> stale get() +data: 123 +err: nil +hit_lvl: 4 + +-> subsequent get() +data: 123 +err: nil +hit_lvl: 1 + +sleeping for 0.2s... + +-> successfull callback get() +data: 123 +err: nil +hit_lvl: 3 +--- no_error_log +[error] + + + +=== TEST 9: get() does not resurrect a stale value when callback throws error +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 0.2, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return 123 + + elseif cb_called == 2 then + error("thrown error") + + elseif cb_called == 3 then + return 123 + end + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", string.match(err, "callback threw an error:"), " ", + string.match(err, "thrown error")) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: 123 + +sleeping for 0.3s... + +-> stale get() +data: nil +err: callback threw an error: thrown error +hit_lvl: nil + +-> subsequent get() +data: 123 +err: nil +hit_lvl: 3 +--- no_error_log +[error] + + + +=== TEST 10: get() returns error and data on lock timeout but does not resurrect +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + -- insert 2 dummy values to ensure that lock acquisition (which + -- uses shm:set) will _not_ evict out stale cached value + ngx.shared.cache_shm:set(1, true, 0.2) + ngx.shared.cache_shm:set(2, true, 0.2) + + local mlcache = require "kong.resty.mlcache" + local cache_1 = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 0.3 + })) + local cache_2 = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 0.3, + resty_lock_opts = { + timeout = 0.2 + } + })) + + local function cb(delay, return_val) + if delay then + ngx.sleep(delay) + end + + return return_val or 123 + end + + -- cache in shm + + local data, err, hit_lvl = cache_1:get("my_key", nil, cb) + assert(data == 123) + assert(err == nil) + assert(hit_lvl == 3) + + -- make shm + LRU expire + + ngx.sleep(0.3) + + local t1 = ngx.thread.spawn(function() + -- trigger L3 callback again, but slow to return this time + + cache_1:get("my_key", nil, cb, 0.3, 456) + end) + + local t2 = ngx.thread.spawn(function() + -- make this mlcache wait on other's callback, and timeout + + local data, err, hit_lvl = cache_2:get("my_key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + end) + + assert(ngx.thread.wait(t1)) + assert(ngx.thread.wait(t2)) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache_2:get("my_key", nil, cb, nil, 123) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) -- should be 1 since LRU instances are shared by mlcache namespace, and t1 finished + } + } +--- request +GET /t +--- response_body +data: 123 +err: nil +hit_lvl: 4 + +-> subsequent get() +data: 456 +err: nil +hit_lvl: 1 +--- no_error_log +[error] +--- error_log eval +qr/\[warn\] .*? could not acquire callback lock: timeout/ + + + +=== TEST 11: get() returns nil cached item on callback lock timeout +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + -- insert 2 dummy values to ensure that lock acquisition (which + -- uses shm:set) will _not_ evict out stale cached value + ngx.shared.cache_shm:set(1, true, 0.2) + ngx.shared.cache_shm:set(2, true, 0.2) + + local mlcache = require "kong.resty.mlcache" + local cache_1 = assert(mlcache.new("my_mlcache", "cache_shm", { + neg_ttl = 0.3, + resurrect_ttl = 0.3 + })) + local cache_2 = assert(mlcache.new("my_mlcache", "cache_shm", { + neg_ttl = 0.3, + resurrect_ttl = 0.3, + resty_lock_opts = { + timeout = 0.2 + } + })) + + local function cb(delay) + if delay then + ngx.sleep(delay) + end + + return nil + end + + -- cache in shm + + local data, err, hit_lvl = cache_1:get("my_key", nil, cb) + assert(data == nil) + assert(err == nil) + assert(hit_lvl == 3) + + -- make shm + LRU expire + + ngx.sleep(0.3) + + local t1 = ngx.thread.spawn(function() + -- trigger L3 callback again, but slow to return this time + + cache_1:get("my_key", nil, cb, 0.3) + end) + + local t2 = ngx.thread.spawn(function() + -- make this mlcache wait on other's callback, and timeout + + local data, err, hit_lvl = cache_2:get("my_key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + end) + + assert(ngx.thread.wait(t1)) + assert(ngx.thread.wait(t2)) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache_2:get("my_key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) -- should be 1 since LRU instances are shared by mlcache namespace, and t1 finished + } + } +--- request +GET /t +--- response_body +data: nil +err: nil +hit_lvl: 4 + +-> subsequent get() +data: nil +err: nil +hit_lvl: 1 +--- no_error_log +[error] +--- error_log eval +qr/\[warn\] .*? could not acquire callback lock: timeout/ + + + +=== TEST 12: get() does not resurrect a stale value if no 'resurrect_ttl' is set on the instance +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return 123 + end + + return nil, "some error" + end + + ngx.say("-> 1st get()") + local data, err = cache:get("key", nil, cb) + if err then + ngx.log(ngx.ERR, err) + return + end + ngx.say("data: ", data) + + ngx.say() + ngx.say("sleeping for 0.3s...") + ngx.sleep(0.3) + ngx.say() + + ngx.say("-> stale get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + + ngx.say() + ngx.say("-> subsequent get()") + data, err, hit_lvl = cache:get("key", nil, cb) + ngx.say("data: ", data) + ngx.say("err: ", err) + ngx.say("hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body +-> 1st get() +data: 123 + +sleeping for 0.3s... + +-> stale get() +data: nil +err: some error +hit_lvl: nil + +-> subsequent get() +data: nil +err: some error +hit_lvl: nil +--- no_error_log +[error] + + + +=== TEST 13: get() callback can return nil + err (non-string) safely with opts.resurrect_ttl +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.3, + resurrect_ttl = 1, + })) + + local data, err = cache:get("1", nil, function() return 123 end) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.sleep(0.3) + + local data, err = cache:get("1", nil, function() return nil, {} end) + if err then + ngx.log(ngx.ERR, err) + return + end + + ngx.say("cb return values: ", data, " ", err) + } + } +--- request +GET /t +--- response_body +cb return values: 123 nil +--- no_error_log +[error] +--- error_log eval +qr/\[warn\] .*? callback returned an error \(table: 0x[[:xdigit:]]+\)/ + + + +=== TEST 14: get() returns stale hit_lvl when retrieved from shm on last ms (see GH PR #58) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local forced_now = ngx.now() + ngx.now = function() + return forced_now + end + + local mlcache = require "kong.resty.mlcache" + + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.2, + resurrect_ttl = 0.2, + })) + + local cb_called = 0 + + local function cb() + cb_called = cb_called + 1 + + if cb_called == 1 then + return 42 + end + + return nil, "some error causing a resurrect" + end + + local data, err = cache:get("key", nil, cb) + assert(data == 42, err or "invalid data value: " .. data) + + -- cause a resurrect in L2 shm + ngx.sleep(0.201) + forced_now = forced_now + 0.201 + + local data, err, hit_lvl = cache:get("key", nil, cb) + assert(data == 42, err or "invalid data value: " .. data) + assert(hit_lvl == 4, "hit_lvl should be 4 (resurrected data), got: " .. hit_lvl) + + -- value is now resurrected + + -- drop L1 cache value + cache.lru:delete("key") + + -- advance 0.2 second in the future, and simulate another :get() + -- call; the L2 shm entry will still be alive (as its clock is + -- not faked), but mlcache will compute a remaining_ttl of 0; + -- in such cases we should still see the stale flag returned + -- as hit_lvl + forced_now = forced_now + 0.2 + + local data, err, hit_lvl = cache:get("key", nil, cb) + assert(data == 42, err or "invalid data value: " .. data) + + ngx.say("+0.200s after resurrect hit_lvl: ", hit_lvl) + } + } +--- request +GET /t +--- response_body ++0.200s after resurrect hit_lvl: 4 +--- no_error_log +[error] diff --git a/t/05-mlcache/13-get_bulk.t b/t/05-mlcache/13-get_bulk.t new file mode 100644 index 000000000000..e0a9d47827fd --- /dev/null +++ b/t/05-mlcache/13-get_bulk.t @@ -0,0 +1,1735 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); +use lib '.'; +use t::Util; + +no_long_string(); + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * ((blocks() * 3) + 12 * 3); # n * 3 -> for debug error_log concurrency tests + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + #lua_shared_dict cache_shm_miss 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: get_bulk() validates bulk +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local pok, perr = pcall(cache.get_bulk, cache) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +bulk must be a table +--- no_error_log +[error] + + + +=== TEST 2: get_bulk() ensures bulk has n field +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, function() return 1 end, nil, + "key_b", nil, function() return 1 end, nil, + }) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +bulk must have n field +--- no_error_log +[error] + + + +=== TEST 3: get_bulk() validates operations keys +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, function() return 1 end, nil, + false, nil, function() return 1 end, nil, + n = 2, + }) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +key at index 5 must be a string for operation 2 (got boolean) +--- no_error_log +[error] + + + +=== TEST 4: get_bulk() validates operations callbacks +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_b", nil, nil, nil, + "key_a", nil, function() return 1 end, nil, + n = 2, + }) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, function() return 1 end, nil, + "key_b", nil, false, nil, + n = 2, + }) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +callback at index 3 must be a function for operation 1 (got nil) +callback at index 7 must be a function for operation 2 (got boolean) +--- no_error_log +[error] + + + +=== TEST 5: get_bulk() validates opts argument +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + }, true) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + }, {}) + if not pok then + ngx.say(perr) + end + + ngx.say("ok") + } + } +--- request +GET /t +--- response_body +opts must be a table +ok +--- no_error_log +[error] + + + +=== TEST 6: get_bulk() validates opts.concurrency +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb() end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + }, { concurrency = true }) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + }, { concurrency = 0 }) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + }, { concurrency = -1 }) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + }, { concurrency = 1 }) + if not pok then + ngx.say(perr) + end + + ngx.say("ok") + } + } +--- request +GET /t +--- response_body +opts.concurrency must be a number +opts.concurrency must be > 0 +opts.concurrency must be > 0 +ok +--- no_error_log +[error] + + + +=== TEST 7: get_bulk() multiple fetch L3 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local res, err = cache:get_bulk { + "key_a", nil, function() return 1 end, nil, + "key_b", nil, function() return 2 end, nil, + "key_c", nil, function() return 3 end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 3 +2 nil 3 +3 nil 3 +--- no_error_log +[error] + + + +=== TEST 8: get_bulk() multiple fetch L2 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + assert(cache:get("key_a", nil, function() return 1 end)) + assert(cache:get("key_b", nil, function() return 2 end)) + assert(cache:get("key_c", nil, function() return 3 end)) + + cache.lru:delete("key_a") + cache.lru:delete("key_b") + cache.lru:delete("key_c") + + local res, err = cache:get_bulk { + "key_a", nil, function() return -1 end, nil, + "key_b", nil, function() return -2 end, nil, + "key_c", nil, function() return -3 end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 2 +2 nil 2 +3 nil 2 +--- no_error_log +[error] + + + +=== TEST 9: get_bulk() multiple fetch L1 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + assert(cache:get("key_a", nil, function() return 1 end)) + assert(cache:get("key_b", nil, function() return 2 end)) + assert(cache:get("key_c", nil, function() return 3 end)) + + local res, err = cache:get_bulk { + "key_a", nil, function() return -1 end, nil, + "key_b", nil, function() return -2 end, nil, + "key_c", nil, function() return -3 end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 1 +2 nil 1 +3 nil 1 +--- no_error_log +[error] + + + +=== TEST 10: get_bulk() multiple fetch L1/single fetch L3 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + assert(cache:get("key_a", nil, function() return 1 end)) + assert(cache:get("key_b", nil, function() return 2 end)) + + local res, err = cache:get_bulk { + "key_a", nil, function() return -1 end, nil, + "key_b", nil, function() return -2 end, nil, + "key_c", nil, function() return 3 end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 1 +2 nil 1 +3 nil 3 +--- no_error_log +[error] + + + +=== TEST 11: get_bulk() multiple fetch L1/single fetch L3 (with nils) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local _, err = cache:get("key_a", nil, function() return nil end) + assert(err == nil, err) + local _, err = cache:get("key_b", nil, function() return nil end) + assert(err == nil, err) + + local res, err = cache:get_bulk { + "key_a", nil, function() return -1 end, nil, + "key_b", nil, function() return -2 end, nil, + "key_c", nil, function() return nil end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +nil nil 1 +nil nil 1 +nil nil 3 +--- no_error_log +[error] + + + +=== TEST 12: get_bulk() mixed fetch L1/L2/L3 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + assert(cache:get("key_a", nil, function() return 1 end)) + assert(cache:get("key_b", nil, function() return 2 end)) + + -- remove key_b from L1 + cache.lru:delete("key_b") + + local res, err = cache:get_bulk { + "key_a", nil, function() return -1 end, nil, + "key_b", nil, function() return -2 end, nil, + "key_c", nil, function() return 3 end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 1 +2 nil 2 +3 nil 3 +--- no_error_log +[error] + + + +=== TEST 13: get_bulk() mixed fetch L1/L2/L3 (with nils) +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local _, err = cache:get("key_a", nil, function() return nil end) + assert(err == nil, err) + local _, err = cache:get("key_b", nil, function() return nil end) + assert(err == nil, err) + + -- remove key_b from L1 + cache.lru:delete("key_b") + + local res, err = cache:get_bulk { + "key_a", nil, function() return -1 end, nil, + "key_b", nil, function() return -2 end, nil, + "key_c", nil, function() return nil end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +nil nil 1 +nil nil 2 +nil nil 3 +--- no_error_log +[error] + + + +=== TEST 14: get_bulk() returns callback-returned errors +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local res, err = cache:get_bulk { + "key_a", nil, function() return 1 end, nil, + "key_b", nil, function() return 2 end, nil, + "key_c", nil, function() return nil, "some error" end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 3 +2 nil 3 +nil some error nil +--- no_error_log +[error] + + + +=== TEST 15: get_bulk() returns callback runtime errors +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local res, err = cache:get_bulk { + "key_a", nil, function() return 1 end, nil, + "key_b", nil, function() return 2 end, nil, + "key_c", nil, function() return error("some error") end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body_like +1 nil 3 +2 nil 3 +nil callback threw an error: some error +stack traceback: +.*? nil +--- no_error_log +[error] + + + +=== TEST 16: get_bulk() runs L3 callback on expired keys +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local n = 0 + local function cb() + n = n + 1 + return n + end + + assert(cache:get("key_a", { ttl = 0.2 }, cb)) + + ngx.sleep(0.2) + + local res, err = cache:get_bulk { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +2 nil 3 +3 nil 3 +--- no_error_log +[error] + + + +=== TEST 17: get_bulk() honors ttl and neg_ttl instance attributes +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.2, + neg_ttl = 0.3, + })) + + local res, err = cache:get_bulk { + "key_a", nil, function() return 1 end, nil, + "key_b", nil, function() return nil end, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + + ngx.say() + local ttl, _, value = assert(cache:peek("key_a")) + ngx.say("key_a: ", value, " (ttl: ", ttl, ")") + local ttl, _, value = assert(cache:peek("key_b")) + ngx.say("key_b: ", value, " (ttl: ", ttl, ")") + } + } +--- request +GET /t +--- response_body +1 nil 3 +nil nil 3 + +key_a: 1 (ttl: 0.2) +key_b: nil (ttl: 0.3) +--- no_error_log +[error] + + + +=== TEST 18: get_bulk() validates operations ttl and neg_ttl +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", { ttl = true }, function() return 1 end, nil, + "key_b", nil, function() return 2 end, nil, + n = 2, + }) + if not pok then + ngx.say(perr) + end + + local pok, perr = pcall(cache.get_bulk, cache, { + "key_a", nil, function() return 1 end, nil, + "key_b", { neg_ttl = true }, function() return 2 end, nil, + n = 2, + }) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +options at index 2 for operation 1 are invalid: opts.ttl must be a number +options at index 6 for operation 2 are invalid: opts.neg_ttl must be a number +--- no_error_log +[error] + + + +=== TEST 19: get_bulk() accepts ttl and neg_ttl for each operation +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 1, + neg_ttl = 2, + })) + + local res, err = cache:get_bulk { + "key_a", { ttl = 0.4, neg_ttl = 3 }, function() return 1 end, nil, + "key_b", { neg_ttl = 0.8 }, function() return nil end, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + + ngx.say() + local ttl, _, value = assert(cache:peek("key_a")) + ngx.say("key_a: ", value, " (ttl: ", ttl, ")") + local ttl, _, value = assert(cache:peek("key_b")) + ngx.say("key_b: ", value, " (ttl: ", ttl, ")") + } + } +--- request +GET /t +--- response_body +1 nil 3 +nil nil 3 + +key_a: 1 (ttl: 0.4) +key_b: nil (ttl: 0.8) +--- no_error_log +[error] + + + +=== TEST 20: get_bulk() honors ttl from callback return values +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 1, + })) + + local res, err = cache:get_bulk { + "key_a", nil, function() return 1, nil, 0.2 end, nil, + "key_b", nil, function() return 2 end, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + + ngx.say() + local ttl, _, value = assert(cache:peek("key_a")) + ngx.say("key_a: ", value, " (ttl: ", ttl, ")") + local ttl, _, value = assert(cache:peek("key_b")) + ngx.say("key_b: ", value, " (ttl: ", ttl, ")") + } + } +--- request +GET /t +--- response_body +1 nil 3 +2 nil 3 + +key_a: 1 (ttl: 0.2) +key_b: 2 (ttl: 1) +--- no_error_log +[error] + + + +=== TEST 21: get_bulk() honors resurrect_ttl instance attribute +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.2, + resurrect_ttl = 0.3, + })) + + local i = 0 + local function cb() + i = i + 1 + if i == 2 then + return nil, "some error" + end + return i + end + + assert(cache:get("key_a", nil, cb)) + + ngx.sleep(0.2) + + local res, err = cache:get_bulk { + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + + ngx.sleep(0.1) + + ngx.say() + local ttl, _, value = cache:peek("key_a") + ngx.say(string.format("key_a: %d ttl: %.2f", value, ttl)) + local ttl, _, value = cache:peek("key_b") + ngx.say(string.format("key_b: %d ttl: %.2f", value, ttl)) + } + } +--- request +GET /t +--- response_body_like +1 nil 4 +3 nil 3 + +key_a: 1 ttl: 0\.(?:2|1)\d+ +key_b: 3 ttl: 0\.(?:1|0)\d+ +--- no_error_log +[error] + + + +=== TEST 22: get_bulk() accepts resurrect_ttl for each operation +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + ttl = 0.2, + resurrect_ttl = 3, + })) + + local i = 0 + local function cb() + i = i + 1 + if i == 2 then + return nil, "some error" + end + return i + end + + assert(cache:get("key_a", nil, cb)) + + ngx.sleep(0.2) + + local res, err = cache:get_bulk { + "key_a", { resurrect_ttl = 0.3 }, cb, nil, + "key_b", nil, cb, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + + ngx.sleep(0.1) + + ngx.say() + local ttl, _, value = cache:peek("key_a") + ngx.say(string.format("key_a: %d ttl: %.2f", value, ttl)) + local ttl, _, value = cache:peek("key_b") + ngx.say(string.format("key_b: %d ttl: %.2f", value, ttl)) + } + } +--- request +GET /t +--- response_body_like +1 nil 4 +3 nil 3 + +key_a: 1 ttl: 0\.(?:2|1)\d+ +key_b: 3 ttl: 0\.(?:1|0)\d+ +--- no_error_log +[error] + + + +=== TEST 23: get_bulk() honors l1_serializer instance attribute +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(t) + return t.x + end + })) + + local res, err = cache:get_bulk { + "key_a", nil, function() return { x = "hello" } end, nil, + "key_b", nil, function() return { x = "world" } end, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +hello nil 3 +world nil 3 +--- no_error_log +[error] + + + +=== TEST 24: get_bulk() accepts l1_serializer for each operation +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + l1_serializer = function(t) + return t.x + end + })) + + local function l1_serializer_a(t) return t.x end + local function l1_serializer_b(t) return t.y end + + local res, err = cache:get_bulk { + "key_a", { l1_serializer = l1_serializer_a }, function() return { x = "hello" } end, nil, + "key_b", { l1_serializer = l1_serializer_b }, function() return { y = "world" } end, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +hello nil 3 +world nil 3 +--- no_error_log +[error] + + + +=== TEST 25: get_bulk() honors shm_set_tries instance attribute +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + dict:flush_all() + dict:flush_expired() + + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + shm_set_tries = 1, + })) + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^2)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- now, trigger a hit with a value ~3 times as large + -- which should trigger retries and eventually remove 3 other + -- cached items (but still not enough memory) + + local res, err = cache:get_bulk { + "key_a", nil, function() return string.rep("a", 2^12) end, nil, + "key_b", nil, function() return 2 end, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + } + } +--- request +GET /t +--- ignore_response_body +--- no_error_log +[error] +--- error_log +could not write to lua_shared_dict 'cache_shm' after 1 tries (no memory) + + + +=== TEST 26: get_bulk() accepts shm_set_tries for each operation +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local dict = ngx.shared.cache_shm + dict:flush_all() + dict:flush_expired() + + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + shm_set_tries = 3, + })) + + -- fill up shm + + local idx = 0 + + while true do + local ok, err, forcible = dict:set(idx, string.rep("a", 2^2)) + if not ok then + ngx.log(ngx.ERR, err) + return + end + + if forcible then + break + end + + idx = idx + 1 + end + + -- now, trigger a hit with a value ~3 times as large + -- which should trigger retries and eventually remove 3 other + -- cached items (but still not enough memory) + + local res, err = cache:get_bulk { + "key_a", { shm_set_tries = 1 }, function() return string.rep("a", 2^12) end, nil, + "key_b", nil, function() return 2 end, nil, + n = 2, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + } + } +--- request +GET /t +--- ignore_response_body +--- no_error_log +[error] +--- error_log +could not write to lua_shared_dict 'cache_shm' after 1 tries (no memory) + + + +=== TEST 27: get_bulk() operations wait on lock if another thread is fetching the same key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache_1 = assert(mlcache.new("my_mlcache", "cache_shm")) + local cache_2 = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb(wait) + if wait then + ngx.sleep(wait) + end + + return "hello" + end + + local t1_data, t1_hit_lvl + local t2_res + + local t1 = ngx.thread.spawn(function() + local err + t1_data, err, t1_hit_lvl = cache_1:get("key", nil, cb, 0.3) + if err then + ngx.log(ngx.ERR, err) + return + end + end) + + local t2 = ngx.thread.spawn(function() + local err + t2_res, err = cache_2:get_bulk { + "key_a", nil, cb, nil, + "key", nil, cb, nil, + n = 2, + } + if not t2_res then + ngx.log(ngx.ERR, err) + return + end + end) + + assert(ngx.thread.wait(t1)) + assert(ngx.thread.wait(t2)) + + ngx.say("t1\n", t1_data, " ", t1_hit_lvl) + + ngx.say() + ngx.say("t2") + for i = 1, t2_res.n, 3 do + ngx.say(tostring(t2_res[i]), " ", + tostring(t2_res[i + 1]), " ", + tostring(t2_res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +t1 +hello 3 + +t2 +hello nil 3 +hello nil 2 +--- no_error_log +[error] + + + +=== TEST 28: get_bulk() operations reports timeout on lock if another thread is fetching the same key +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache_1 = assert(mlcache.new("my_mlcache", "cache_shm")) + local cache_2 = assert(mlcache.new("my_mlcache", "cache_shm", { + resty_lock_opts = { timeout = 0.2 } + })) + + local function cb(wait) + if wait then + ngx.sleep(wait) + end + + return "hello" + end + + local t1_data, t1_hit_lvl + local t2_res + + local t1 = ngx.thread.spawn(function() + local err + t1_data, err, t1_hit_lvl = cache_1:get("key", nil, cb, 0.3) + if err then + ngx.log(ngx.ERR, err) + return + end + end) + + local t2 = ngx.thread.spawn(function() + local err + t2_res, err = cache_2:get_bulk { + "key_a", nil, cb, nil, + "key", nil, cb, nil, + n = 2, + } + if not t2_res then + ngx.log(ngx.ERR, err) + return + end + end) + + assert(ngx.thread.wait(t1)) + assert(ngx.thread.wait(t2)) + + ngx.say("t1\n", t1_data, " ", t1_hit_lvl) + + ngx.say() + ngx.say("t2") + for i = 1, t2_res.n, 3 do + ngx.say(tostring(t2_res[i]), " ", + tostring(t2_res[i + 1]), " ", + tostring(t2_res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +t1 +hello 3 + +t2 +hello nil 3 +nil could not acquire callback lock: timeout nil +--- no_error_log +[error] + + + +=== TEST 29: get_bulk() opts.concurrency: default is 3 (with 3 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + n = 3, + }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 2 threads to run 3 callbacks +thread 1 running callbacks 1 to 1 +thread 2 running callbacks 2 to 2 +main thread running callbacks 3 to 3 +--- no_error_log +[error] + + + +=== TEST 30: get_bulk() opts.concurrency: default is 3 (with 6 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + "key_d", nil, cb, nil, + "key_e", nil, cb, nil, + "key_f", nil, cb, nil, + n = 6, + }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 2 threads to run 6 callbacks +thread 1 running callbacks 1 to 2 +thread 2 running callbacks 3 to 4 +main thread running callbacks 5 to 6 +--- no_error_log +[error] + + + +=== TEST 31: get_bulk() opts.concurrency: default is 3 (with 7 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + "key_d", nil, cb, nil, + "key_e", nil, cb, nil, + "key_f", nil, cb, nil, + "key_g", nil, cb, nil, + n = 7, + }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 2 threads to run 7 callbacks +thread 1 running callbacks 1 to 3 +thread 2 running callbacks 4 to 6 +main thread running callbacks 7 to 7 +--- no_error_log +[error] + + + +=== TEST 32: get_bulk() opts.concurrency: default is 3 (with 1 op) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + n = 1, + }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 0 threads to run 1 callbacks +main thread running callbacks 1 to 1 +--- no_error_log +[warn] +[error] +[alert] + + + +=== TEST 33: get_bulk() opts.concurrency: 1 (with 3 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + n = 3, + }, { concurrency = 1 }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 0 threads to run 3 callbacks +main thread running callbacks 1 to 3 +--- no_error_log +[warn] +[error] +[alert] + + + +=== TEST 34: get_bulk() opts.concurrency: 1 (with 6 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + "key_d", nil, cb, nil, + "key_e", nil, cb, nil, + "key_f", nil, cb, nil, + n = 6, + }, { concurrency = 1 }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 0 threads to run 6 callbacks +main thread running callbacks 1 to 6 +--- no_error_log +[warn] +[error] +[alert] + + + +=== TEST 35: get_bulk() opts.concurrency: 6 (with 3 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + n = 3, + }, { concurrency = 6 }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 2 threads to run 3 callbacks +thread 1 running callbacks 1 to 1 +thread 2 running callbacks 2 to 2 +main thread running callbacks 3 to 3 +--- no_error_log +[error] + + + +=== TEST 36: get_bulk() opts.concurrency: 6 (with 6 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + "key_d", nil, cb, nil, + "key_e", nil, cb, nil, + "key_f", nil, cb, nil, + n = 6, + }, { concurrency = 6 }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 5 threads to run 6 callbacks +thread 1 running callbacks 1 to 1 +thread 2 running callbacks 2 to 2 +thread 3 running callbacks 3 to 3 +thread 4 running callbacks 4 to 4 +thread 5 running callbacks 5 to 5 +main thread running callbacks 6 to 6 +--- no_error_log +[warn] +[error] +[alert] + + + +=== TEST 37: get_bulk() opts.concurrency: 6 (with 7 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + "key_b", nil, cb, nil, + "key_c", nil, cb, nil, + "key_d", nil, cb, nil, + "key_e", nil, cb, nil, + "key_f", nil, cb, nil, + "key_g", nil, cb, nil, + n = 7, + }, { concurrency = 6 }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 5 threads to run 7 callbacks +thread 1 running callbacks 1 to 2 +thread 2 running callbacks 3 to 4 +thread 3 running callbacks 5 to 6 +thread 4 running callbacks 7 to 7 +--- no_error_log +[error] + + + +=== TEST 38: get_bulk() opts.concurrency: 6 (with 1 op) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm", { + debug = true, + })) + + local function cb(wait) + return "hello" + end + + local res, err = cache:get_bulk({ + "key_a", nil, cb, nil, + n = 1, + }, { concurrency = 6 }) + } + } +--- request +GET /t +--- no_response_body +--- error_log +spawning 0 threads to run 1 callbacks +main thread running callbacks 1 to 1 +--- no_error_log +[warn] +[error] +[alert] + + + +=== TEST 39: get_bulk() opts.concurrency: 6 (with 7 ops) +--- http_config eval: $::HttpConfig +--- log_level: debug +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local res, err = cache:get_bulk({ + "key_a", nil, function() return 1 end, nil, + "key_b", nil, function() return 2 end, nil, + "key_c", nil, function() return 3 end, nil, + "key_d", nil, function() return 4 end, nil, + "key_e", nil, function() return 5 end, nil, + "key_f", nil, function() return 6 end, nil, + "key_g", nil, function() return 7 end, nil, + n = 7, + }, { concurrency = 6 }) + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 3 +2 nil 3 +3 nil 3 +4 nil 3 +5 nil 3 +6 nil 3 +7 nil 3 +--- no_error_log +[error] diff --git a/t/05-mlcache/14-bulk-and-res.t b/t/05-mlcache/14-bulk-and-res.t new file mode 100644 index 000000000000..3e105723a2ab --- /dev/null +++ b/t/05-mlcache/14-bulk-and-res.t @@ -0,0 +1,227 @@ +# vim:set ts=4 sts=4 sw=4 et ft=: + +use Test::Nginx::Socket::Lua; +use Cwd qw(cwd); +use lib '.'; +use t::Util; + +no_long_string(); + +workers(2); + +#repeat_each(2); + +plan tests => repeat_each() * blocks() * 3; + +my $pwd = cwd(); + +our $HttpConfig = qq{ + lua_package_path "$pwd/lib/?.lua;;"; + lua_shared_dict cache_shm 1m; + #lua_shared_dict cache_shm_miss 1m; + + init_by_lua_block { + -- local verbose = true + local verbose = false + local outfile = "$Test::Nginx::Util::ErrLogFile" + -- local outfile = "/tmp/v.log" + if verbose then + local dump = require "jit.dump" + dump.on(nil, outfile) + else + local v = require "jit.v" + v.on(outfile) + end + + require "resty.core" + -- jit.opt.start("hotloop=1") + -- jit.opt.start("loopunroll=1000000") + -- jit.off() + } +}; + +run_tests(); + +__DATA__ + +=== TEST 1: new_bulk() creates a bulk +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local bulk = mlcache.new_bulk() + + ngx.say("type: ", type(bulk)) + ngx.say("size: ", #bulk) + ngx.say("bulk.n: ", bulk.n) + } + } +--- request +GET /t +--- response_body +type: table +size: 0 +bulk.n: 0 +--- no_error_log +[error] + + + +=== TEST 2: new_bulk() creates a bulk with narr in arg #1 +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local bulk = mlcache.new_bulk(3) + + ngx.say("type: ", type(bulk)) + ngx.say("size: ", #bulk) + ngx.say("bulk.n: ", bulk.n) + } + } +--- request +GET /t +--- response_body +type: table +size: 0 +bulk.n: 0 +--- no_error_log +[error] + + + +=== TEST 3: bulk:add() adds bulk operations +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local function cb() end + + local bulk = mlcache.new_bulk(3) + + for i = 1, 3 do + bulk:add("key_" .. i, nil, cb, i) + end + + for i = 1, 3*4, 4 do + ngx.say(tostring(bulk[i]), " ", + tostring(bulk[i + 1]), " ", + tostring(bulk[i + 2]), " ", + tostring(bulk[i + 3])) + end + + ngx.say("bulk.n: ", bulk.n) + } + } +--- request +GET /t +--- response_body_like +key_1 nil function: 0x[0-9a-fA-F]+ 1 +key_2 nil function: 0x[0-9a-fA-F]+ 2 +key_3 nil function: 0x[0-9a-fA-F]+ 3 +bulk\.n: 3 +--- no_error_log +[error] + + + +=== TEST 4: bulk:add() can be given to get_bulk() +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local function cb(i) return i end + + local bulk = mlcache.new_bulk(3) + + for i = 1, 3 do + bulk:add("key_" .. i, nil, cb, i) + end + + local res, err = cache:get_bulk(bulk) + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i = 1, res.n, 3 do + ngx.say(tostring(res[i]), " ", + tostring(res[i + 1]), " ", + tostring(res[i + 2])) + end + } + } +--- request +GET /t +--- response_body +1 nil 3 +2 nil 3 +3 nil 3 +--- no_error_log +[error] + + + +=== TEST 5: each_bulk_res() iterates over get_bulk() results +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + local cache = assert(mlcache.new("my_mlcache", "cache_shm")) + + local res, err = cache:get_bulk { + "key_a", nil, function() return 1 end, nil, + "key_b", nil, function() return 2 end, nil, + "key_c", nil, function() return 3 end, nil, + n = 3, + } + if not res then + ngx.log(ngx.ERR, err) + return + end + + for i, data, err, hit_lvl in mlcache.each_bulk_res(res) do + ngx.say(i, " ", data, " ", err, " ", hit_lvl) + end + } + } +--- request +GET /t +--- response_body +1 1 nil 3 +2 2 nil 3 +3 3 nil 3 +--- no_error_log +[error] + + + +=== TEST 6: each_bulk_res() throws an error on unrocognized res +--- http_config eval: $::HttpConfig +--- config + location = /t { + content_by_lua_block { + local mlcache = require "kong.resty.mlcache" + + local pok, perr = pcall(mlcache.each_bulk_res, {}) + if not pok then + ngx.say(perr) + end + } + } +--- request +GET /t +--- response_body +res must have res.n field; is this a get_bulk() result? +--- no_error_log +[error] From 7e57bc7fad0d34246a3fc9191088a03b5ab7e76e Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Fri, 2 Jun 2023 13:26:16 +0300 Subject: [PATCH 2/3] tests(mlcache): remove unnecessary tests and using of skip_openresty ### Summary With Kong we will run tests with known OpenResty version, so we can remove some mlcache tests that were for older OpenResty versions and also the usage of `skip_openresty` clauses. Signed-off-by: Aapo Talvensaari --- t/05-mlcache/02-get.t | 1 - t/05-mlcache/08-purge.t | 43 ++++------------------------------- t/05-mlcache/10-ipc_shm.t | 48 +-------------------------------------- 3 files changed, 6 insertions(+), 86 deletions(-) diff --git a/t/05-mlcache/02-get.t b/t/05-mlcache/02-get.t index 85500b023e6b..3e26c9a4c31b 100644 --- a/t/05-mlcache/02-get.t +++ b/t/05-mlcache/02-get.t @@ -1360,7 +1360,6 @@ hit level from shm: 2 === TEST 30: get() returns hit level for boolean false hits ---- skip_eval: 3: t::Util::skip_openresty('<', '1.11.2.3') --- http_config eval: $::HttpConfig --- config location = /t { diff --git a/t/05-mlcache/08-purge.t b/t/05-mlcache/08-purge.t index c8f8eca72d9a..8d59932e8310 100644 --- a/t/05-mlcache/08-purge.t +++ b/t/05-mlcache/08-purge.t @@ -131,7 +131,6 @@ ok === TEST 4: purge() deletes all items from L1 with a custom LRU ---- skip_eval: 3: t::Util::skip_openresty('<', '1.13.6.2') --- http_config eval: $::HttpConfig --- config location = /t { @@ -178,39 +177,7 @@ lru instance is the same one: true -=== TEST 5: purge() is prevented if custom LRU does not support flush_all() ---- skip_eval: 3: t::Util::skip_openresty('>', '1.13.6.1') ---- http_config eval: $::HttpConfig ---- config - location = /t { - content_by_lua_block { - local mlcache = require "kong.resty.mlcache" - local lrucache = require "resty.lrucache" - - local cache = assert(mlcache.new("my_mlcache", "cache_shm", { - ipc_shm = "ipc_shm", - lru = lrucache.new(10), - })) - - local pok, perr = pcall(cache.purge, cache) - if not pok then - ngx.say(perr) - return - end - - ngx.say("ok") - } - } ---- request -GET /t ---- response_body -cannot purge when using custom LRU cache with OpenResty < 1.13.6.2 ---- no_error_log -[error] - - - -=== TEST 6: purge() deletes all items from shm_miss is specified +=== TEST 5: purge() deletes all items from shm_miss is specified --- http_config eval: $::HttpConfig --- config location = /t { @@ -258,7 +225,7 @@ ok -=== TEST 7: purge() does not call shm:flush_expired() by default +=== TEST 6: purge() does not call shm:flush_expired() by default --- http_config eval: $::HttpConfig --- config location = /t { @@ -293,7 +260,7 @@ flush_expired called with 'max_count' -=== TEST 8: purge() calls shm:flush_expired() if argument specified +=== TEST 7: purge() calls shm:flush_expired() if argument specified --- http_config eval: $::HttpConfig --- config location = /t { @@ -330,7 +297,7 @@ flush_expired called with 'max_count': nil -=== TEST 9: purge() calls shm:flush_expired() if shm_miss is specified +=== TEST 8: purge() calls shm:flush_expired() if shm_miss is specified --- http_config eval: $::HttpConfig --- config location = /t { @@ -369,7 +336,7 @@ flush_expired called with 'max_count': nil -=== TEST 10: purge() calls broadcast() on purge channel +=== TEST 9: purge() calls broadcast() on purge channel --- http_config eval: $::HttpConfig --- config location = /t { diff --git a/t/05-mlcache/10-ipc_shm.t b/t/05-mlcache/10-ipc_shm.t index 3f7b3b093569..34e81c3f0280 100644 --- a/t/05-mlcache/10-ipc_shm.t +++ b/t/05-mlcache/10-ipc_shm.t @@ -224,53 +224,7 @@ called lru:delete() with key: my_key -=== TEST 6: purge() with mlcache_shm invalidates other workers' LRU cache (OpenResty < 1.13.6.2) ---- skip_eval: 3: t::Util::skip_openresty('>=', '1.13.6.2') ---- http_config eval: $::HttpConfig ---- config - location = /t { - content_by_lua_block { - local mlcache = require "kong.resty.mlcache" - - local opts = { - ipc_shm = "ipc_shm", - debug = true -- allows same worker to receive its own published events - } - - local cache = assert(mlcache.new("namespace", "cache_shm", opts)) - local cache_clone = assert(mlcache.new("namespace", "cache_shm", opts)) - - local lru = cache.lru - local lru_clone = cache_clone.lru - - assert(cache:purge()) - - -- cache.lru should be different now - ngx.say("cache has new lru: ", cache.lru ~= lru) - - ngx.say("cache_clone still has same lru: ", cache_clone.lru == lru_clone) - - ngx.say("calling update on cache_clone") - assert(cache_clone:update()) - - -- cache.lru should be different now - ngx.say("cache_clone has new lru: ", cache_clone.lru ~= lru_clone) - } - } ---- request -GET /t ---- response_body -cache has new lru: true -cache_clone still has same lru: true -calling update on cache_clone -cache_clone has new lru: true ---- no_error_log -[error] - - - -=== TEST 7: purge() with mlcache_shm invalidates other workers' LRU cache (OpenResty >= 1.13.6.2) ---- skip_eval: 3: t::Util::skip_openresty('<', '1.13.6.2') +=== TEST 6: purge() with mlcache_shm invalidates other workers' LRU cache --- http_config eval: $::HttpConfig --- config location = /t { From a6dc798c01558be8506b92f12d0eb8e433eda025 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Fri, 2 Jun 2023 22:22:42 +0300 Subject: [PATCH 3/3] tests(mlcache): fix mlcache test error because of different path to mlcache lib Signed-off-by: Aapo Talvensaari --- kong/resty/mlcache/init.lua | 2 +- t/05-mlcache/02-get.t | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/kong/resty/mlcache/init.lua b/kong/resty/mlcache/init.lua index aad500780ec5..541623bade48 100644 --- a/kong/resty/mlcache/init.lua +++ b/kong/resty/mlcache/init.lua @@ -1024,7 +1024,7 @@ function _M:get_bulk(bulk, opts) = pcall(check_opts, self, b_opts) if not pok then -- strip the stacktrace - local err = ttl:match("mlcache%.lua:%d+:%s(.*)") + local err = ttl:match("init%.lua:%d+:%s(.*)") error("options at index " .. i + 1 .. " for operation " .. ceil(i / 4) .. " are invalid: " .. err, 2) end diff --git a/t/05-mlcache/02-get.t b/t/05-mlcache/02-get.t index 3e26c9a4c31b..bcb10064429a 100644 --- a/t/05-mlcache/02-get.t +++ b/t/05-mlcache/02-get.t @@ -716,7 +716,7 @@ from shm: table world bar GET /t --- error_code: 500 --- error_log eval -qr/\[error\] .*?mlcache\.lua:\d+: cannot cache value of type userdata/ +qr/\[error\] .*?init\.lua:\d+: cannot cache value of type userdata/