diff --git a/kong/global.lua b/kong/global.lua index 0dad6430551b..f5c151eec556 100644 --- a/kong/global.lua +++ b/kong/global.lua @@ -69,6 +69,7 @@ end local _GLOBAL = { phases = phase_checker.phases, + LAST_RECONFIGURATION = ngx.now(), } diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index 76f01fe9b3ec..2c1344bdbba6 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -12,7 +12,7 @@ local certificate = require "kong.runloop.certificate" local concurrency = require "kong.concurrency" local lrucache = require "resty.lrucache" local ktls = require "resty.kong.tls" - +local global = require "kong.global" @@ -454,7 +454,7 @@ local function update_router() return nil, --[[ 'err' fully formatted ]] err end - return true + return "updated" end @@ -538,7 +538,7 @@ local function update_plugins_iterator() return nil, --[[ 'err' fully formatted ]] err end - return true + return "updated" end @@ -583,7 +583,7 @@ local function build_wasm_state() WASM_STATE_VERSION = version - return true + return ok end @@ -1015,84 +1015,55 @@ return { if strategy ~= "off" then local worker_state_update_frequency = kong.configuration.worker_state_update_frequency or 1 - local router_async_opts = { - name = "router", - timeout = 0, - on_timeout = "return_true", - } - - local function rebuild_router_timer(premature) + local function rebuild_timer(premature) if premature then return end - -- Don't wait for the semaphore (timeout = 0) when updating via the - -- timer. - -- If the semaphore is locked, that means that the rebuild is - -- already ongoing. - local ok, err = rebuild_router(router_async_opts) - if not ok then + local router_update_status, err = rebuild_router({ + name = "router", + timeout = 0, + on_timeout = "return_true", + }) + if not router_update_status then log(ERR, "could not rebuild router via timer: ", err) end - end - - local _, err = kong.timer:named_every("router-rebuild", - worker_state_update_frequency, - rebuild_router_timer) - if err then - log(ERR, "could not schedule timer to rebuild router: ", err) - end - - local plugins_iterator_async_opts = { - name = "plugins_iterator", - timeout = 0, - on_timeout = "return_true", - } - - local function rebuild_plugins_iterator_timer(premature) - if premature then - return - end - local _, err = rebuild_plugins_iterator(plugins_iterator_async_opts) - if err then - log(ERR, "could not rebuild plugins iterator via timer: ", err) - end - end - - local _, err = kong.timer:named_every("plugins-iterator-rebuild", - worker_state_update_frequency, - rebuild_plugins_iterator_timer) - if err then - log(ERR, "could not schedule timer to rebuild plugins iterator: ", err) - end - - - if wasm.enabled() then - local wasm_async_opts = { - name = "wasm", + local plugins_iterator_update_status, err = rebuild_plugins_iterator({ + name = "plugins_iterator", timeout = 0, on_timeout = "return_true", - } - - local function rebuild_wasm_filter_chains_timer(premature) - if premature then - return - end + }) + if not plugins_iterator_update_status then + log(ERR, "could not rebuild plugins iterator via timer: ", err) + end - local _, err = rebuild_wasm_state(wasm_async_opts) + local wasm_update_status + if wasm.enabled() then + wasm_update_status, err = rebuild_wasm_state({ + name = "wasm", + timeout = 0, + on_timeout = "return_true", + }) if err then log(ERR, "could not rebuild wasm filter chains via timer: ", err) end end - local _, err = kong.timer:named_every("wasm-filter-chains-rebuild", - worker_state_update_frequency, - rebuild_wasm_filter_chains_timer) - if err then - log(ERR, "could not schedule timer to rebuild wasm filter chains: ", err) + if router_update_status == "updated" + or plugins_iterator_update_status == "updated" + or wasm_update_status == "updated" then + global.LAST_RECONFIGURATION = ngx.now() + log(NOTICE, "configuration update was processed") end end + + local _, err = kong.timer:named_every("rebuild", + worker_state_update_frequency, + rebuild_timer) + if err then + log(ERR, "could not schedule timer to rebuild: ", err) + end end end, }, @@ -1182,6 +1153,24 @@ return { }, access = { before = function(ctx) + -- if this is a version-conditional get, abort the request if this dataplane has not processed the configuration + -- since the time indicated + local if_reconfigured_since = kong.request.get_header('x-kong-if-reconfigured-since') + if if_reconfigured_since then + if_reconfigured_since = ngx.parse_http_time(if_reconfigured_since) + log(INFO, "get if reconfigured since ", if_reconfigured_since, " last reconfiguration at ", global.LAST_RECONFIGURATION) + if if_reconfigured_since and if_reconfigured_since > global.LAST_RECONFIGURATION then + return kong.response.error( + 503, + "Service Unavailable", + { + ["X-Kong-Reconfiguration-Status"] = "pending", + ["Retry-After"] = tostring(kong.configuration.worker_state_update_frequency or 1), + } + ) + end + end + -- if there is a gRPC service in the context, don't re-execute the pre-access -- phase handler - it has been executed before the internal redirect if ctx.service and (ctx.service.protocol == "grpc" or diff --git a/kong/runloop/wasm.lua b/kong/runloop/wasm.lua index 64502ca6b084..dafd9e3897fb 100644 --- a/kong/runloop/wasm.lua +++ b/kong/runloop/wasm.lua @@ -522,7 +522,7 @@ local function update_in_place(new_version) set_state(new) - return true + return "updated" end diff --git a/spec/02-integration/01-helpers/04-reconfiguration-conditional-get.lua b/spec/02-integration/01-helpers/04-reconfiguration-conditional-get.lua new file mode 100644 index 000000000000..adb21bfbe40a --- /dev/null +++ b/spec/02-integration/01-helpers/04-reconfiguration-conditional-get.lua @@ -0,0 +1,73 @@ +local helpers = require "spec.helpers" + + +describe("configuration conditional get", function() + lazy_setup(function() + helpers.get_db_utils(nil, {}) -- runs migrations + assert(helpers.start_kong(env)) + end) + + lazy_teardown(function() + helpers.stop_kong() + end) + + local proxy_client + local admin_client + + before_each(function() + proxy_client = helpers.proxy_client(5000) + admin_client = helpers.admin_client(10000) + end) + + after_each(function() + if proxy_client then + proxy_client:close() + end + if admin_client then + admin_client:close() + end + end) + + it("waits until a change through the admin API has propagated to the proxy path", function() + local res = admin_client:post( + "/services", + { + body = { + protocol = "http", + name = "foo", + host = "127.0.0.1", + }, + headers = { ["Content-Type"] = "application/json" }, + }) + assert.res_status(201, res) + res = admin_client:post( + "/services/foo/routes", + { + body = { + paths = {"/blub"} + }, + headers = { ["Content-Type"] = "application/json" }, + }) + assert.res_status(201, res) + local date = res.headers['Date'] + -- flaky test below - we can't be sure that the configuration did not happen quicker than expected + res = proxy_client:get( + "/", + { headers = { ["X-Kong-If-Reconfigured-Since"] = date } } + ) + assert.res_status(503, res) + assert + .with_timeout(30) + .eventually( + function() + local client = helpers.proxy_client() + local res = client:get( + "/nonexistent", + { headers = { ["X-Kong-If-Reconfigured-Since"] = date } } + ) + client:close() + return res.status == 404 + end) + .is_truthy() + end) +end)