From 6e64d05cc6d139baabf3601d3f27e4190e0395c2 Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Thu, 19 Oct 2023 12:08:16 +0200 Subject: [PATCH 001/249] fix(core): comparison against boolean Signed-off-by: Joshua Schmid --- kong/clustering/utils.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kong/clustering/utils.lua b/kong/clustering/utils.lua index f463c83755cf..891aa974c691 100644 --- a/kong/clustering/utils.lua +++ b/kong/clustering/utils.lua @@ -157,7 +157,7 @@ end function _M.is_dp_worker_process() - if kong.configuration.dedicated_config_processing then + if kong.configuration.dedicated_config_processing == true then return process_type() == "privileged agent" end From 57e50d8b3b8140642cdd65fb596242b7f1551739 Mon Sep 17 00:00:00 2001 From: Michael Martin Date: Thu, 19 Oct 2023 11:01:07 -0700 Subject: [PATCH 002/249] chore(deps): pin lua-resty-ljsonschema to 1.1.6-2 (#11787) The lua-cjson dependency was removed from this revision. See: KAG-2757 --- kong-3.5.0-0.rockspec | 2 +- scripts/explain_manifest/fixtures/alpine-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/alpine-arm64.txt | 5 ----- scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt | 5 ----- scripts/explain_manifest/fixtures/debian-10-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/debian-11-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/el7-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/el8-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/el9-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/el9-arm64.txt | 5 ----- scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt | 5 ----- scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt | 5 ----- 15 files changed, 1 insertion(+), 71 deletions(-) diff --git a/kong-3.5.0-0.rockspec b/kong-3.5.0-0.rockspec index c02b73864a35..ca621a7bd277 100644 --- a/kong-3.5.0-0.rockspec +++ b/kong-3.5.0-0.rockspec @@ -41,7 +41,7 @@ dependencies = { "lua-resty-session == 4.0.5", "lua-resty-timer-ng == 0.2.5", "lpeg == 1.0.2", - "lua-resty-ljsonschema == 1.1.6", + "lua-resty-ljsonschema == 1.1.6-2", } build = { type = "builtin", diff --git a/scripts/explain_manifest/fixtures/alpine-amd64.txt b/scripts/explain_manifest/fixtures/alpine-amd64.txt index 6446c3030599..b5bf1a0fa465 100644 --- a/scripts/explain_manifest/fixtures/alpine-amd64.txt +++ b/scripts/explain_manifest/fixtures/alpine-amd64.txt @@ -31,11 +31,6 @@ Needed : - libc.so -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/kong/lib/libssl.so.1.1 Needed : - libcrypto.so.1.1 diff --git a/scripts/explain_manifest/fixtures/alpine-arm64.txt b/scripts/explain_manifest/fixtures/alpine-arm64.txt index 512b8d8ead11..b5bf1a0fa465 100644 --- a/scripts/explain_manifest/fixtures/alpine-arm64.txt +++ b/scripts/explain_manifest/fixtures/alpine-arm64.txt @@ -37,11 +37,6 @@ - libc.so Rpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt index 70104b231b27..c8cbf3e5bd32 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt @@ -79,11 +79,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt index ab0dde1598d1..95eb40ea4ba9 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt @@ -72,11 +72,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt index b877bd1be733..e352ddf9485a 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt @@ -57,11 +57,6 @@ - libc.so.6 Rpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/debian-10-amd64.txt b/scripts/explain_manifest/fixtures/debian-10-amd64.txt index 6d1121a05612..95d532bef36b 100644 --- a/scripts/explain_manifest/fixtures/debian-10-amd64.txt +++ b/scripts/explain_manifest/fixtures/debian-10-amd64.txt @@ -79,11 +79,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/debian-11-amd64.txt b/scripts/explain_manifest/fixtures/debian-11-amd64.txt index fff523b65df1..253e43cd2a53 100644 --- a/scripts/explain_manifest/fixtures/debian-11-amd64.txt +++ b/scripts/explain_manifest/fixtures/debian-11-amd64.txt @@ -77,11 +77,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/el7-amd64.txt b/scripts/explain_manifest/fixtures/el7-amd64.txt index 70104b231b27..c8cbf3e5bd32 100644 --- a/scripts/explain_manifest/fixtures/el7-amd64.txt +++ b/scripts/explain_manifest/fixtures/el7-amd64.txt @@ -79,11 +79,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/el8-amd64.txt b/scripts/explain_manifest/fixtures/el8-amd64.txt index 2a545419d2cb..7bbdad456097 100644 --- a/scripts/explain_manifest/fixtures/el8-amd64.txt +++ b/scripts/explain_manifest/fixtures/el8-amd64.txt @@ -79,11 +79,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/el9-amd64.txt b/scripts/explain_manifest/fixtures/el9-amd64.txt index e0866a846b13..eca28e4a403f 100644 --- a/scripts/explain_manifest/fixtures/el9-amd64.txt +++ b/scripts/explain_manifest/fixtures/el9-amd64.txt @@ -72,11 +72,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/el9-arm64.txt b/scripts/explain_manifest/fixtures/el9-arm64.txt index b877bd1be733..e352ddf9485a 100644 --- a/scripts/explain_manifest/fixtures/el9-arm64.txt +++ b/scripts/explain_manifest/fixtures/el9-arm64.txt @@ -57,11 +57,6 @@ - libc.so.6 Rpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt b/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt index a034e1c9b39c..a7184560750f 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt @@ -77,11 +77,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt b/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt index 185d054b0770..68de4cc4203f 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt @@ -70,11 +70,6 @@ - libc.so.6 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 diff --git a/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt b/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt index cd8c9628f642..b66889974bd0 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt @@ -56,11 +56,6 @@ - ld-linux-aarch64.so.1 Runpath : /usr/local/kong/lib -- Path : /usr/local/lib/lua/5.1/cjson.so - Needed : - - libc.so.6 - Runpath : /usr/local/kong/lib - - Path : /usr/local/lib/lua/5.1/lfs.so Needed : - libc.so.6 From 6a6af6d095935ea8c941e00cd05f46d947ff4b52 Mon Sep 17 00:00:00 2001 From: Xiaochen Wang Date: Fri, 20 Oct 2023 14:58:57 +0800 Subject: [PATCH 003/249] perf(request-id): use `proxy_set_header` instead of `ngx.req.set_header` (#11788) Utilize the Nginx directive `proxy_set_header X-Kong-Request-Id $kong_request_id` instead of Lua call `set_header()` can enhance the RPS by ~2% in the testing scenario where no plugins are enabled. KAG-2814 --- kong/runloop/handler.lua | 14 -------------- kong/templates/nginx_kong.lua | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index 98947fce8964..ed6cfb9bed91 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -44,7 +44,6 @@ local log = ngx.log local exit = ngx.exit local exec = ngx.exec local header = ngx.header -local set_header = ngx.req.set_header local timer_at = ngx.timer.at local get_phase = ngx.get_phase local subsystem = ngx.config.subsystem @@ -1355,9 +1354,6 @@ return { end, -- Only executed if the `router` module found a route and allows nginx to proxy it. after = function(ctx) - local enabled_headers_upstream = kong.configuration.enabled_headers_upstream - local headers = constants.HEADERS - -- Nginx's behavior when proxying a request with an empty querystring -- `/foo?` is to keep `$is_args` an empty string, hence effectively -- stripping the empty querystring. @@ -1450,16 +1446,6 @@ return { if var.http_proxy_connection then clear_header("Proxy-Connection") end - - -- X-Kong-Request-Id upstream header - local rid, rid_get_err = request_id_get() - if not rid then - log(WARN, "failed to get Request ID: ", rid_get_err) - end - - if enabled_headers_upstream[headers.REQUEST_ID] and rid then - set_header(headers.REQUEST_ID, rid) - end end }, header_filter = { diff --git a/kong/templates/nginx_kong.lua b/kong/templates/nginx_kong.lua index 3f229bf70c9c..2b797caff6e8 100644 --- a/kong/templates/nginx_kong.lua +++ b/kong/templates/nginx_kong.lua @@ -168,6 +168,9 @@ server { proxy_set_header X-Forwarded-Path $upstream_x_forwarded_path; proxy_set_header X-Forwarded-Prefix $upstream_x_forwarded_prefix; proxy_set_header X-Real-IP $remote_addr; +> if enabled_headers_upstream["X-Kong-Request-Id"] then + proxy_set_header X-Kong-Request-Id $kong_request_id; +> end proxy_pass_header Server; proxy_pass_header Date; proxy_ssl_name $upstream_host; @@ -199,6 +202,9 @@ server { proxy_set_header X-Forwarded-Path $upstream_x_forwarded_path; proxy_set_header X-Forwarded-Prefix $upstream_x_forwarded_prefix; proxy_set_header X-Real-IP $remote_addr; +> if enabled_headers_upstream["X-Kong-Request-Id"] then + proxy_set_header X-Kong-Request-Id $kong_request_id; +> end proxy_pass_header Server; proxy_pass_header Date; proxy_ssl_name $upstream_host; @@ -230,6 +236,9 @@ server { proxy_set_header X-Forwarded-Path $upstream_x_forwarded_path; proxy_set_header X-Forwarded-Prefix $upstream_x_forwarded_prefix; proxy_set_header X-Real-IP $remote_addr; +> if enabled_headers_upstream["X-Kong-Request-Id"] then + proxy_set_header X-Kong-Request-Id $kong_request_id; +> end proxy_pass_header Server; proxy_pass_header Date; proxy_ssl_name $upstream_host; @@ -261,6 +270,9 @@ server { proxy_set_header X-Forwarded-Path $upstream_x_forwarded_path; proxy_set_header X-Forwarded-Prefix $upstream_x_forwarded_prefix; proxy_set_header X-Real-IP $remote_addr; +> if enabled_headers_upstream["X-Kong-Request-Id"] then + proxy_set_header X-Kong-Request-Id $kong_request_id; +> end proxy_pass_header Server; proxy_pass_header Date; proxy_ssl_name $upstream_host; @@ -285,6 +297,9 @@ server { grpc_set_header X-Forwarded-Path $upstream_x_forwarded_path; grpc_set_header X-Forwarded-Prefix $upstream_x_forwarded_prefix; grpc_set_header X-Real-IP $remote_addr; +> if enabled_headers_upstream["X-Kong-Request-Id"] then + grpc_set_header X-Kong-Request-Id $kong_request_id; +> end grpc_pass_header Server; grpc_pass_header Date; grpc_ssl_name $upstream_host; @@ -328,6 +343,9 @@ server { proxy_set_header X-Forwarded-Path $upstream_x_forwarded_path; proxy_set_header X-Forwarded-Prefix $upstream_x_forwarded_prefix; proxy_set_header X-Real-IP $remote_addr; +> if enabled_headers_upstream["X-Kong-Request-Id"] then + proxy_set_header X-Kong-Request-Id $kong_request_id; +> end proxy_pass_header Server; proxy_pass_header Date; proxy_ssl_name $upstream_host; From 74bd1134f9640ebdeef3ea3b59cee6a1be5575a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Fri, 20 Oct 2023 10:17:12 +0200 Subject: [PATCH 004/249] chore(tests): Build docker image for upgrade tests (#11783) --- .github/workflows/upgrade-tests.yml | 33 +++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/.github/workflows/upgrade-tests.yml b/.github/workflows/upgrade-tests.yml index 7dfca3a5b9f1..94f2420c90c9 100644 --- a/.github/workflows/upgrade-tests.yml +++ b/.github/workflows/upgrade-tests.yml @@ -23,6 +23,8 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +env: + GH_TOKEN: ${{ github.token }} jobs: upgrade-test: @@ -30,25 +32,38 @@ jobs: runs-on: ubuntu-22.04 steps: - - name: Install Docker + - name: Install Prerequisites run: | sudo apt-get -y update - sudo apt-get -y install ca-certificates curl gnupg lsb-release + sudo apt-get -y install ca-certificates curl gnupg lsb-release jq libyaml-dev net-tools sudo mkdir -p /etc/apt/keyrings curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt-get update sudo apt-get install docker-ce docker-ce-cli containerd.io docker-compose-plugin - - name: Install prerequisites - run: | - sudo apt-get -y install jq - - - name: Clone Kong source code + - name: Clone Source Code uses: actions/checkout@v4 with: fetch-depth: 0 + submodules: recursive + + - name: Build Debian Package + run: | + make package/deb + mv bazel-bin/pkg/kong.amd64.deb . + + - name: Build Docker Image + uses: docker/build-push-action@v3 + with: + file: build/dockerfiles/deb.Dockerfile + context: . + push: false + tags: "kong-local/kong:latest" + build-args: | + KONG_BASE_IMAGE=ubuntu:22.04 + KONG_ARTIFACT_PATH=./ - - name: Run upgrade tests + - name: Run Upgrade Tests run: | - bash ./scripts/upgrade-tests/test-upgrade-path.sh + bash ./scripts/upgrade-tests/test-upgrade-path.sh -i kong-local/kong:latest From 6ce55c407cd1931036f5270fe3427dcea3131083 Mon Sep 17 00:00:00 2001 From: Kurt Tu <131840510+sabertobihwy@users.noreply.github.com> Date: Mon, 23 Oct 2023 10:52:44 +0800 Subject: [PATCH 005/249] refactor(plugins/ldap-auth): optimize the process of parsing and handling authentication headers (#11780) * refactor(plugins/ldap-auth): optimize the process of parsing and handling authentication headers 1. use the `ngx.re.find` and `ngx.re.match` functions for more robust and efficient string matching operations. 2. adds error handling and logging for potential errors during authentication header parsing and credential decoding. 3. tweak the handling position for the case where `proxy_authorization_value` does not exist. Fix: [FTI-5329](https://konghq.atlassian.net/browse/FTI-5329) Signed-off-by: sabertobihwy * update by comments --------- Signed-off-by: sabertobihwy Co-authored-by: tzssangglass --- kong/plugins/ldap-auth/access.lua | 48 ++++++++++++++++++++++++------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/kong/plugins/ldap-auth/access.lua b/kong/plugins/ldap-auth/access.lua index 2027bffe21f0..c04b6c50276d 100644 --- a/kong/plugins/ldap-auth/access.lua +++ b/kong/plugins/ldap-auth/access.lua @@ -6,10 +6,10 @@ local kong = kong local error = error local decode_base64 = ngx.decode_base64 local tostring = tostring -local match = string.match +local re_find = ngx.re.find +local re_match = ngx.re.match local lower = string.lower local upper = string.upper -local find = string.find local sub = string.sub local fmt = string.format local tcp = ngx.socket.tcp @@ -24,15 +24,37 @@ local _M = {} local function retrieve_credentials(authorization_header_value, conf) + local lower_header_type = lower(conf.header_type) + local regex = "^\\s*" .. lower_header_type .. "\\s+" + local from, to, err = re_find(lower(authorization_header_value), regex, "jo") + if err then + kong.log.err("error while find header_type: ", lower_header_type, " in authorization header value") + return nil + end + + if not from then + kong.log.info("header_type: ", lower_header_type, " not found in authorization header value") + return nil + end + local username, password - if authorization_header_value then - local s, e = find(lower(authorization_header_value), "^%s*" .. - lower(conf.header_type) .. "%s+") - if s == 1 then - local cred = sub(authorization_header_value, e + 1) - local decoded_cred = decode_base64(cred) - username, password = match(decoded_cred, "(.-):(.+)") + if from == 1 then + local cred = sub(authorization_header_value, to + 1) + local decoded_cred = decode_base64(cred) + local m, err = re_match(decoded_cred, "^(.*?):(.+)$", "jo") + if err then + kong.log.err("error while decoding credentials: ", err) + return nil end + + if type(m) == "table" and #m == 2 then + username = m[1] + password = m[2] + else + kong.log.err("no valid credentials found in authorization header value") + return nil + end + end return username, password @@ -231,8 +253,12 @@ local function do_authentication(conf) } end - local is_authorized, credential = authenticate(conf, proxy_authorization_value) - if not is_authorized then + local is_authorized, credential + if proxy_authorization_value then + is_authorized, credential = authenticate(conf, proxy_authorization_value) + end + + if not is_authorized and authorization_value then is_authorized, credential = authenticate(conf, authorization_value) end From ee9103cee3ec6b5163806340541cf082d1279a7a Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 23 Oct 2023 11:04:39 +0800 Subject: [PATCH 006/249] refactor(pdk): get_raw_body with string.buffer (#11771) --- kong/pdk/response.lua | 32 ++++++++++---------------------- 1 file changed, 10 insertions(+), 22 deletions(-) diff --git a/kong/pdk/response.lua b/kong/pdk/response.lua index 258f527ef141..b519ac12ef25 100644 --- a/kong/pdk/response.lua +++ b/kong/pdk/response.lua @@ -12,6 +12,7 @@ -- @module kong.response +local buffer = require "string.buffer" local cjson = require "cjson.safe" local checks = require "kong.pdk.private.checks" local phase_checker = require "kong.pdk.private.phases" @@ -27,7 +28,6 @@ local find = string.find local lower = string.lower local error = error local pairs = pairs -local concat = table.concat local coroutine = coroutine local cjson_encode = cjson.encode local normalize_header = checks.normalize_header @@ -568,39 +568,27 @@ local function new(self, major_version) function _RESPONSE.get_raw_body() check_phase(PHASES.body_filter) - local body_buffer + local body_buffer = ngx.ctx.KONG_BODY_BUFFER local chunk = arg[1] local eof = arg[2] - if eof then - body_buffer = ngx.ctx.KONG_BODY_BUFFER - if not body_buffer then - return chunk - end + + if eof and not body_buffer then + return chunk end if type(chunk) == "string" and chunk ~= "" then - if not eof then - body_buffer = ngx.ctx.KONG_BODY_BUFFER - end - - if body_buffer then - local n = body_buffer.n + 1 - body_buffer.n = n - body_buffer[n] = chunk - - else - body_buffer = { - chunk, - n = 1, - } + if not body_buffer then + body_buffer = buffer.new() ngx.ctx.KONG_BODY_BUFFER = body_buffer end + + body_buffer:put(chunk) end if eof then if body_buffer then - body_buffer = concat(body_buffer, "", 1, body_buffer.n) + body_buffer = body_buffer:get() else body_buffer = "" end From a4e495fd95f3c4996dae50732298912684b55bb3 Mon Sep 17 00:00:00 2001 From: oowl Date: Mon, 23 Oct 2023 16:32:20 +0800 Subject: [PATCH 007/249] fix(prometheus): expose metrics in no service route (#11781) Expose Prometheus metrics in no service route FTI-5065 --------- Co-authored-by: Datong Sun --- .../prometheus_expose_no_service_metrics.yml | 3 ++ kong/plugins/prometheus/exporter.lua | 7 ++- .../26-prometheus/05-metrics_spec.lua | 52 +++++++++++++++++++ spec/fixtures/blueprints.lua | 11 +++- 4 files changed, 67 insertions(+), 6 deletions(-) create mode 100644 changelog/unreleased/kong/prometheus_expose_no_service_metrics.yml diff --git a/changelog/unreleased/kong/prometheus_expose_no_service_metrics.yml b/changelog/unreleased/kong/prometheus_expose_no_service_metrics.yml new file mode 100644 index 000000000000..e16c228eaed4 --- /dev/null +++ b/changelog/unreleased/kong/prometheus_expose_no_service_metrics.yml @@ -0,0 +1,3 @@ +message: "Expose metrics for serviceless routes" +type: bugfix +scope: Plugin diff --git a/kong/plugins/prometheus/exporter.lua b/kong/plugins/prometheus/exporter.lua index f1518164794c..fd219d66b380 100644 --- a/kong/plugins/prometheus/exporter.lua +++ b/kong/plugins/prometheus/exporter.lua @@ -227,17 +227,16 @@ local function log(message, serialized) return end - local service_name + local service_name = "" if message and message.service then service_name = message.service.name or message.service.host - else - -- do not record any stats if the service is not present - return end local route_name if message and message.route then route_name = message.route.name or message.route.id + else + return end local consumer = "" diff --git a/spec/03-plugins/26-prometheus/05-metrics_spec.lua b/spec/03-plugins/26-prometheus/05-metrics_spec.lua index a47a7e0b221b..a6d56b808b01 100644 --- a/spec/03-plugins/26-prometheus/05-metrics_spec.lua +++ b/spec/03-plugins/26-prometheus/05-metrics_spec.lua @@ -64,6 +64,25 @@ for _, strategy in helpers.each_strategy() do } } + local route1 = bp.routes:insert{ + name = "serverless", + protocols = {"https"}, + hosts = {"status.example.com"}, + paths = {"/serverless"}, + no_service = true, + } + + assert(bp.plugins:insert { + name = "request-termination", + route = { id = route1.id }, + config = { + status_code = 200, + message = "request terminated by request-termination plugin", + echo = true, + }, + }) + + bp.plugins:insert{ name = "prometheus", -- globally enabled config = { @@ -158,5 +177,38 @@ for _, strategy in helpers.each_strategy() do assert.matches('kong_nginx_connections_total{node_id="' .. UUID_PATTERN .. '",subsystem="' .. ngx.config.subsystem .. '",state="%w+"} %d+', body) end) + it("expose metrics in no service route", function() + local res = assert(proxy_ssl_client:send{ + method = "GET", + path = "/serverless", + headers = { + ["Host"] = "status.example.com" + } + }) + assert.res_status(200, res) + + local res = assert(proxy_ssl_client:send{ + method = "GET", + path = "/metrics", + headers = { + ["Host"] = "status.example.com" + } + }) + assert.res_status(200, res) + + helpers.wait_until(function() + local res = assert(admin_ssl_client:send{ + method = "GET", + path = "/metrics" + }) + local body = assert.res_status(200, res) + + assert.matches('kong_nginx_metric_errors_total 0', body, nil, true) + + return body:find('kong_http_requests_total{service="",route="serverless",code="200",source="kong",consumer=""} 1', + nil, true) + end) + end) + end) end diff --git a/spec/fixtures/blueprints.lua b/spec/fixtures/blueprints.lua index 8b27bf22a55c..c1662b00d710 100644 --- a/spec/fixtures/blueprints.lua +++ b/spec/fixtures/blueprints.lua @@ -180,8 +180,15 @@ function _M.new(db) end) res.routes = new_blueprint(db.routes, function(overrides) - return { - service = overrides.service or res.services:insert(), + local service + if overrides.no_service then + service = nil + overrides.no_service = nil + else + service = overrides.service or res.services:insert() + end + return { + service = service, } end) From 53b11936d7619701cdb406ad5c818cc9f36aa2f1 Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 23 Oct 2023 17:11:47 +0800 Subject: [PATCH 008/249] refactor(clustering): use the correct param name `basic_info` for init_worker() (#11807) --- kong/clustering/init.lua | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/kong/clustering/init.lua b/kong/clustering/init.lua index f09a194e3e4f..a661a8c4eeaf 100644 --- a/kong/clustering/init.lua +++ b/kong/clustering/init.lua @@ -73,22 +73,22 @@ function _M:handle_cp_websocket() end -function _M:init_cp_worker(plugins_list) +function _M:init_cp_worker(basic_info) events.init() self.instance = require("kong.clustering.control_plane").new(self) - self.instance:init_worker(plugins_list) + self.instance:init_worker(basic_info) end -function _M:init_dp_worker(plugins_list) +function _M:init_dp_worker(basic_info) if not is_dp_worker_process() then return end self.instance = require("kong.clustering.data_plane").new(self) - self.instance:init_worker(plugins_list) + self.instance:init_worker(basic_info) end From bab36eadcc5adcd569fb61ddaff91eefa40a0cfc Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 23 Oct 2023 17:12:14 +0800 Subject: [PATCH 009/249] refactor(clustering): simplify parse_proxy_url() (#11799) --- kong/clustering/utils.lua | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/kong/clustering/utils.lua b/kong/clustering/utils.lua index 891aa974c691..72e1cca30c5e 100644 --- a/kong/clustering/utils.lua +++ b/kong/clustering/utils.lua @@ -30,9 +30,9 @@ local CLUSTER_PROXY_SSL_TERMINATOR_SOCK = fmt("unix:%s/cluster_proxy_ssl_termina local _M = {} -local function parse_proxy_url(conf) +local function parse_proxy_url(proxy_server) local ret = {} - local proxy_server = conf.proxy_server + if proxy_server then -- assume proxy_server is validated in conf_loader local parsed = parse_url(proxy_server) @@ -81,7 +81,7 @@ function _M.connect_cp(dp, endpoint, protocols) } if conf.cluster_use_proxy then - local proxy_opts = parse_proxy_url(conf) + local proxy_opts = parse_proxy_url(conf.proxy_server) opts.proxy_opts = { wss_proxy = proxy_opts.proxy_url, wss_proxy_authorization = proxy_opts.proxy_authorization, From 8b0a8381ecf07732f8788cc14c7743a6e8bc5dd7 Mon Sep 17 00:00:00 2001 From: "Qirui(Keery) Nie" Date: Mon, 23 Oct 2023 17:17:53 +0800 Subject: [PATCH 010/249] fix(aws-lambda): aws lambda service cache by service related fields (#11805) Cache the aws lambda service by composing a cache key using the service related fields, so that service object can be reused between plugins and vault refresh can take effect when key/secret is rotated * fix(aws-lambda): aws lambda service cache by service related fields * tests(aws-lambda): add test for checking service cache refresh when vault rotates * style(*): lint Fix KAG-2832 --- .../kong/aws_lambda_service_cache.yml | 3 + kong/plugins/aws-lambda/handler.lua | 35 ++++++- .../27-aws-lambda/99-access_spec.lua | 93 +++++++++++++++++++ .../custom_vaults/kong/vaults/random/init.lua | 13 +++ .../kong/vaults/random/schema.lua | 19 ++++ 5 files changed, 159 insertions(+), 4 deletions(-) create mode 100644 changelog/unreleased/kong/aws_lambda_service_cache.yml create mode 100644 spec/fixtures/custom_vaults/kong/vaults/random/init.lua create mode 100644 spec/fixtures/custom_vaults/kong/vaults/random/schema.lua diff --git a/changelog/unreleased/kong/aws_lambda_service_cache.yml b/changelog/unreleased/kong/aws_lambda_service_cache.yml new file mode 100644 index 000000000000..48c421b041aa --- /dev/null +++ b/changelog/unreleased/kong/aws_lambda_service_cache.yml @@ -0,0 +1,3 @@ +message: Cache the AWS lambda service by those lambda service related fields +type: bugfix +scope: Plugin diff --git a/kong/plugins/aws-lambda/handler.lua b/kong/plugins/aws-lambda/handler.lua index a2a6c597288e..2e1b78002d03 100644 --- a/kong/plugins/aws-lambda/handler.lua +++ b/kong/plugins/aws-lambda/handler.lua @@ -1,9 +1,12 @@ -- Copyright (C) Kong Inc. -local fmt = string.format local ngx_var = ngx.var local ngx_now = ngx.now local ngx_update_time = ngx.update_time +local md5_bin = ngx.md5_bin +local fmt = string.format +local buffer = require "string.buffer" +local lrucache = require "resty.lrucache" local kong = kong local meta = require "kong.meta" @@ -22,7 +25,7 @@ local AWS_REGION do AWS_REGION = os.getenv("AWS_REGION") or os.getenv("AWS_DEFAULT_REGION") end local AWS -local LAMBDA_SERVICE_CACHE = setmetatable({}, { __mode = "k" }) +local LAMBDA_SERVICE_CACHE local function get_now() @@ -32,11 +35,34 @@ end local function initialize() + LAMBDA_SERVICE_CACHE = lrucache.new(1000) AWS_GLOBAL_CONFIG = aws_config.global AWS = aws() initialize = nil end +local build_cache_key do + -- Use AWS Service related config fields to build cache key + -- so that service object can be reused between plugins and + -- vault refresh can take effect when key/secret is rotated + local SERVICE_RELATED_FIELD = { "timeout", "keepalive", "aws_key", "aws_secret", + "aws_assume_role_arn", "aws_role_session_name", + "aws_region", "host", "port", "disable_https", + "proxy_url", "aws_imds_protocol_version" } + + build_cache_key = function (conf) + local cache_key_buffer = buffer.new(100):reset() + for _, field in ipairs(SERVICE_RELATED_FIELD) do + local v = conf[field] + if v then + cache_key_buffer:putf("%s=%s;", field, v) + end + end + + return md5_bin(cache_key_buffer:get()) + end +end + local AWSLambdaHandler = { PRIORITY = 750, @@ -62,7 +88,8 @@ function AWSLambdaHandler:access(conf) local scheme = conf.disable_https and "http" or "https" local endpoint = fmt("%s://%s", scheme, host) - local lambda_service = LAMBDA_SERVICE_CACHE[conf] + local cache_key = build_cache_key(conf) + local lambda_service = LAMBDA_SERVICE_CACHE:get(cache_key) if not lambda_service then local credentials = AWS.config.credentials -- Override credential config according to plugin config @@ -132,7 +159,7 @@ function AWSLambdaHandler:access(conf) http_proxy = conf.proxy_url, https_proxy = conf.proxy_url, }) - LAMBDA_SERVICE_CACHE[conf] = lambda_service + LAMBDA_SERVICE_CACHE:set(cache_key, lambda_service) end local upstream_body_json = build_request_payload(conf) diff --git a/spec/03-plugins/27-aws-lambda/99-access_spec.lua b/spec/03-plugins/27-aws-lambda/99-access_spec.lua index b5c5db6668dd..dc9ec8205ebc 100644 --- a/spec/03-plugins/27-aws-lambda/99-access_spec.lua +++ b/spec/03-plugins/27-aws-lambda/99-access_spec.lua @@ -7,6 +7,7 @@ local fixtures = require "spec.fixtures.aws-lambda" local TEST_CONF = helpers.test_conf local server_tokens = meta._SERVER_TOKENS local null = ngx.null +local fmt = string.format @@ -1182,4 +1183,96 @@ for _, strategy in helpers.each_strategy() do end) end) end) + + describe("Plugin: AWS Lambda with #vault [#" .. strategy .. "]", function () + local proxy_client + local admin_client + + local ttl_time = 1 + + lazy_setup(function () + helpers.setenv("KONG_VAULT_ROTATION_INTERVAL", "1") + + local bp = helpers.get_db_utils(strategy, { + "routes", + "services", + "plugins", + "vaults", + }, { "aws-lambda" }, { "random" }) + + local route1 = bp.routes:insert { + hosts = { "lambda-vault.com" }, + } + + bp.plugins:insert { + name = "aws-lambda", + route = { id = route1.id }, + config = { + port = 10001, + aws_key = fmt("{vault://random/aws_key?ttl=%s&resurrect_ttl=0}", ttl_time), + aws_secret = "aws_secret", + aws_region = "us-east-1", + function_name = "functionEcho", + }, + } + + assert(helpers.start_kong({ + database = strategy, + prefix = helpers.test_conf.prefix, + nginx_conf = "spec/fixtures/custom_nginx.template", + vaults = "random", + plugins = "bundled", + log_level = "error", + }, nil, nil, fixtures)) + end) + + lazy_teardown(function() + helpers.unsetenv("KONG_VAULT_ROTATION_INTERVAL") + + helpers.stop_kong() + end) + + before_each(function() + proxy_client = helpers.proxy_client() + admin_client = helpers.admin_client() + end) + + after_each(function () + proxy_client:close() + admin_client:close() + end) + + it("lambda service should use latest reference value after Vault ttl", function () + local res = assert(proxy_client:send { + method = "GET", + path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", + headers = { + ["Host"] = "lambda-vault.com" + } + }) + assert.res_status(200, res) + local body = assert.response(res).has.jsonbody() + local authorization_header = body.headers.authorization + local first_aws_key = string.match(authorization_header, "Credential=(.+)/") + + assert.eventually(function() + proxy_client:close() + proxy_client = helpers.proxy_client() + + local res = assert(proxy_client:send { + method = "GET", + path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", + headers = { + ["Host"] = "lambda-vault.com" + } + }) + assert.res_status(200, res) + local body = assert.response(res).has.jsonbody() + local authorization_header = body.headers.authorization + local second_aws_key = string.match(authorization_header, "Credential=(.+)/") + + return first_aws_key ~= second_aws_key + end).ignore_exceptions(true).with_timeout(ttl_time * 2).is_truthy() + end) + end) end diff --git a/spec/fixtures/custom_vaults/kong/vaults/random/init.lua b/spec/fixtures/custom_vaults/kong/vaults/random/init.lua new file mode 100644 index 000000000000..617754ebe6e9 --- /dev/null +++ b/spec/fixtures/custom_vaults/kong/vaults/random/init.lua @@ -0,0 +1,13 @@ +local utils = require "kong.tools.utils" + +local function get(conf, resource, version) + -- Return a random string every time + kong.log.err("get() called") + return utils.random_string() +end + + +return { + VERSION = "1.0.0", + get = get, +} diff --git a/spec/fixtures/custom_vaults/kong/vaults/random/schema.lua b/spec/fixtures/custom_vaults/kong/vaults/random/schema.lua new file mode 100644 index 000000000000..c48b47ce2061 --- /dev/null +++ b/spec/fixtures/custom_vaults/kong/vaults/random/schema.lua @@ -0,0 +1,19 @@ +local typedefs = require "kong.db.schema.typedefs" + +return { + name = "random", + fields = { + { + config = { + type = "record", + fields = { + { prefix = { type = "string" } }, + { suffix = { type = "string" } }, + { ttl = typedefs.ttl }, + { neg_ttl = typedefs.ttl }, + { resurrect_ttl = typedefs.ttl }, + }, + }, + }, + }, +} From a3c249d820b0bda2af2ad81a2d7b3d8c72719e93 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Mon, 23 Oct 2023 17:33:31 +0800 Subject: [PATCH 011/249] fix(build): correctly set manifest for multiarch images (#11809) Behaviour change from https://github.com/Kong/kong/pull/11594 Fix KAG-2855 Fix #11776 --- build/dockerfiles/apk.Dockerfile | 2 +- build/dockerfiles/deb.Dockerfile | 2 +- build/dockerfiles/rpm.Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/build/dockerfiles/apk.Dockerfile b/build/dockerfiles/apk.Dockerfile index 808b89f3aa25..bea623c9cdd2 100644 --- a/build/dockerfiles/apk.Dockerfile +++ b/build/dockerfiles/apk.Dockerfile @@ -1,5 +1,5 @@ ARG KONG_BASE_IMAGE=alpine:3.16 -FROM $KONG_BASE_IMAGE +FROM --platform=$TARGETPLATFORM $KONG_BASE_IMAGE LABEL maintainer="Kong Docker Maintainers (@team-gateway-bot)" diff --git a/build/dockerfiles/deb.Dockerfile b/build/dockerfiles/deb.Dockerfile index 7a45d2dcfbf1..75c2252f875a 100644 --- a/build/dockerfiles/deb.Dockerfile +++ b/build/dockerfiles/deb.Dockerfile @@ -1,5 +1,5 @@ ARG KONG_BASE_IMAGE=debian:bullseye-slim -FROM $KONG_BASE_IMAGE +FROM --platform=$TARGETPLATFORM $KONG_BASE_IMAGE LABEL maintainer="Kong Docker Maintainers (@team-gateway-bot)" diff --git a/build/dockerfiles/rpm.Dockerfile b/build/dockerfiles/rpm.Dockerfile index 9dd4b87ebf02..958140c98302 100644 --- a/build/dockerfiles/rpm.Dockerfile +++ b/build/dockerfiles/rpm.Dockerfile @@ -1,5 +1,5 @@ ARG KONG_BASE_IMAGE=redhat/ubi8 -FROM $KONG_BASE_IMAGE +FROM --platform=$TARGETPLATFORM $KONG_BASE_IMAGE LABEL maintainer="Kong Docker Maintainers (@team-gateway-bot)" From 920ba98af1b30ec8bde0c62732bd6581c2f186a0 Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Mon, 23 Oct 2023 13:46:05 +0100 Subject: [PATCH 012/249] fix(clustering): check for role->data_plane (#11814) Signed-off-by: Joshua Schmid --- kong/clustering/utils.lua | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/kong/clustering/utils.lua b/kong/clustering/utils.lua index 72e1cca30c5e..0ac9c8e69265 100644 --- a/kong/clustering/utils.lua +++ b/kong/clustering/utils.lua @@ -155,14 +155,13 @@ function _M.connect_dp(dp_id, dp_hostname, dp_ip, dp_version) return wb, log_suffix end - function _M.is_dp_worker_process() - if kong.configuration.dedicated_config_processing == true then + if kong.configuration.role == "data_plane" + and kong.configuration.dedicated_config_processing == true then return process_type() == "privileged agent" end return worker_id() == 0 end - return _M From e885ca464baaddfa446616c6960d3e86668e0c6b Mon Sep 17 00:00:00 2001 From: Andy Zhang Date: Tue, 24 Oct 2023 10:15:26 +0800 Subject: [PATCH 013/249] chore(release): bump version to 3.6.0 as part of the 3.5 Feature Freeze (#11802) --- kong-3.5.0-0.rockspec => kong-3.6.0-0.rockspec | 4 ++-- kong/meta.lua | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) rename kong-3.5.0-0.rockspec => kong-3.6.0-0.rockspec (99%) diff --git a/kong-3.5.0-0.rockspec b/kong-3.6.0-0.rockspec similarity index 99% rename from kong-3.5.0-0.rockspec rename to kong-3.6.0-0.rockspec index ca621a7bd277..0870501bd33e 100644 --- a/kong-3.5.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -1,10 +1,10 @@ package = "kong" -version = "3.5.0-0" +version = "3.6.0-0" rockspec_format = "3.0" supported_platforms = {"linux", "macosx"} source = { url = "git+https://github.com/Kong/kong.git", - tag = "3.5.0" + tag = "3.6.0" } description = { summary = "Kong is a scalable and customizable API Management Layer built on top of Nginx.", diff --git a/kong/meta.lua b/kong/meta.lua index 6b8b53b7b604..bc71d8a3f156 100644 --- a/kong/meta.lua +++ b/kong/meta.lua @@ -1,6 +1,6 @@ local version = setmetatable({ major = 3, - minor = 5, + minor = 6, patch = 0, --suffix = "-alpha.13" }, { From 59670a105cc7363333ad3bc2914d43c38e4ced98 Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 24 Oct 2023 12:48:47 +0800 Subject: [PATCH 014/249] perf(router): use `resty.core.utils.str_replace_char()` for dashes (#11721) resty.core.utils.str_replace_char() is a better way to replace - to _. In the future string.lua will gather more functions to simplify tools.utils.lua. See: #10443 --- kong-3.6.0-0.rockspec | 1 + kong/pdk/request.lua | 17 +--------------- kong/router/atc.lua | 3 ++- kong/router/compat.lua | 5 +++-- kong/tools/string.lua | 45 ++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 52 insertions(+), 19 deletions(-) create mode 100644 kong/tools/string.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 0870501bd33e..4a07e972a13b 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -164,6 +164,7 @@ build = { ["kong.tools.protobuf"] = "kong/tools/protobuf.lua", ["kong.tools.mime_type"] = "kong/tools/mime_type.lua", ["kong.tools.request_aware_table"] = "kong/tools/request_aware_table.lua", + ["kong.tools.string"] = "kong/tools/string.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/pdk/request.lua b/kong/pdk/request.lua index a5fc2f04d7d3..06fb846a2ae6 100644 --- a/kong/pdk/request.lua +++ b/kong/pdk/request.lua @@ -41,9 +41,6 @@ local get_body_file = req.get_body_file local decode_args = ngx.decode_args -local is_http_subsystem = ngx and ngx.config.subsystem == "http" - - local PHASES = phase_checker.phases @@ -85,19 +82,7 @@ local function new(self) end end - local replace_dashes do - -- 1.000.000 iterations with input of "my-header": - -- string.gsub: 81ms - -- ngx.re.gsub: 74ms - -- loop/string.buffer: 28ms - -- str_replace_char: 14ms - if is_http_subsystem then - local str_replace_char = require("resty.core.utils").str_replace_char - replace_dashes = function(str) - return str_replace_char(str, "-", "_") - end - end - end + local replace_dashes = require("kong.tools.string").replace_dashes --- diff --git a/kong/router/atc.lua b/kong/router/atc.lua index 653f09af2b58..7c59cba03b4d 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -562,6 +562,7 @@ local get_queries_key do local tb_sort = table.sort local tb_concat = table.concat + local replace_dashes_lower = require("kong.tools.string").replace_dashes_lower local str_buf = buffer.new(64) @@ -570,7 +571,7 @@ do -- NOTE: DO NOT yield until str_buf:get() for name, value in pairs(headers) do - local name = name:gsub("-", "_"):lower() + local name = replace_dashes_lower(name) if type(value) == "table" then for i, v in ipairs(value) do diff --git a/kong/router/compat.lua b/kong/router/compat.lua index dc0b5cdd08e9..6da3522f4698 100644 --- a/kong/router/compat.lua +++ b/kong/router/compat.lua @@ -10,7 +10,8 @@ local tb_nkeys = require("table.nkeys") local uuid = require("resty.jit-uuid") -local shallow_copy = require("kong.tools.utils").shallow_copy +local shallow_copy = require("kong.tools.utils").shallow_copy +local replace_dashes_lower = require("kong.tools.string").replace_dashes_lower local is_regex_magic = utils.is_regex_magic @@ -251,7 +252,7 @@ local function get_expression(route) single_header_buf:reset():put("(") for i, value in ipairs(v) do - local name = "any(http.headers." .. h:gsub("-", "_"):lower() .. ")" + local name = "any(http.headers." .. replace_dashes_lower(h) .. ")" local op = OP_EQUAL -- value starts with "~*" diff --git a/kong/tools/string.lua b/kong/tools/string.lua new file mode 100644 index 000000000000..3ed03a5d293a --- /dev/null +++ b/kong/tools/string.lua @@ -0,0 +1,45 @@ +local find = string.find +local gsub = string.gsub + + +local _M = {} + + +local replace_dashes +local replace_dashes_lower +do + local str_replace_char + + if ngx and ngx.config.subsystem == "http" then + + -- 1,000,000 iterations with input of "my-header": + -- string.gsub: 81ms + -- ngx.re.gsub: 74ms + -- loop/string.buffer: 28ms + -- str_replace_char: 14ms + str_replace_char = require("resty.core.utils").str_replace_char + + else -- stream subsystem + str_replace_char = function(str, ch, replace) + if not find(str, ch, nil, true) then + return str + end + + return gsub(str, ch, replace) + end + end + + replace_dashes = function(str) + return str_replace_char(str, "-", "_") + end + + replace_dashes_lower = function(str) + return str_replace_char(str:lower(), "-", "_") + end +end +_M.replace_dashes = replace_dashes +_M.replace_dashes_lower = replace_dashes_lower + + +return _M + From dc291701faebedb2af27c20d6bc7dcf97e2560d9 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 24 Oct 2023 10:02:57 +0200 Subject: [PATCH 015/249] chore(conf): gui #admin_listen > 0 -> #admin_listeners > 0 (#11818) Signed-off-by: Aapo Talvensaari --- kong/templates/nginx_kong.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kong/templates/nginx_kong.lua b/kong/templates/nginx_kong.lua index 2b797caff6e8..7e9a04bb4f93 100644 --- a/kong/templates/nginx_kong.lua +++ b/kong/templates/nginx_kong.lua @@ -453,7 +453,7 @@ server { } > end -> if (role == "control_plane" or role == "traditional") and #admin_listen > 0 and #admin_gui_listeners > 0 then +> if (role == "control_plane" or role == "traditional") and #admin_listeners > 0 and #admin_gui_listeners > 0 then server { server_name kong_gui; > for i = 1, #admin_gui_listeners do @@ -496,7 +496,7 @@ server { include nginx-kong-gui-include.conf; } -> end -- of the (role == "control_plane" or role == "traditional") and #admin_listen > 0 and #admin_gui_listeners > 0 +> end -- of the (role == "control_plane" or role == "traditional") and #admin_listeners > 0 and #admin_gui_listeners > 0 > if role == "control_plane" then server { From 9948067131a3c9c061c8971b84f32f11edf3f075 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 24 Oct 2023 10:04:09 +0200 Subject: [PATCH 016/249] chore(tests): mark one mlcache renew test as flaky (#11816) ### Summary KAG-2857 Signed-off-by: Aapo Talvensaari --- t/05-mlcache/15-renew.t | 1 + 1 file changed, 1 insertion(+) diff --git a/t/05-mlcache/15-renew.t b/t/05-mlcache/15-renew.t index 44e322bb604e..34887a469bf0 100644 --- a/t/05-mlcache/15-renew.t +++ b/t/05-mlcache/15-renew.t @@ -2378,6 +2378,7 @@ is stale: true === TEST 48: renew() does not cache value in LRU indefinitely when retrieved from shm on last ms (see GH PR #58) +--- SKIP --- http_config eval: $::HttpConfig --- config location = /t { From d8bd50dbf377d80bc50a4484df9a0cd459980613 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 24 Oct 2023 13:29:14 +0200 Subject: [PATCH 017/249] fix(vault): properly warmups the cache on init (#11793) ### Summary Fixes issue where this was logged to logs: ``` 2023/10/18 13:53:33 [warn] 8714#0: [kong] vault.lua:861 error updating secret reference {vault://env/PG_USER}: could not find cached value ``` That happened for example when starting Kong with this command: ``` KONG_LOG_LEVEL=warn PG_USER=kong KONG_PG_USER={vault://env/PG_USER} ./bin/kong start ``` It auto-corrected itself, which was good in this case. This commit makes it more robust, and does not warn anymore as caches are properly warmed. Signed-off-by: Aapo Talvensaari --- .../unreleased/kong/vault-init-warmup.yml | 3 + kong/pdk/vault.lua | 168 +++++++++++++----- .../02-cmd/02-start_stop_spec.lua | 2 + 3 files changed, 131 insertions(+), 42 deletions(-) create mode 100644 changelog/unreleased/kong/vault-init-warmup.yml diff --git a/changelog/unreleased/kong/vault-init-warmup.yml b/changelog/unreleased/kong/vault-init-warmup.yml new file mode 100644 index 000000000000..611277be75b9 --- /dev/null +++ b/changelog/unreleased/kong/vault-init-warmup.yml @@ -0,0 +1,3 @@ +message: Properly warmup Vault caches on init +type: bugfix +scope: Core diff --git a/kong/pdk/vault.lua b/kong/pdk/vault.lua index 32a35e51d82d..7023d55cbc88 100644 --- a/kong/pdk/vault.lua +++ b/kong/pdk/vault.lua @@ -183,7 +183,7 @@ end local function new(self) -- Don't put this onto the top level of the file unless you're prepared for a surprise local Schema = require "kong.db.schema" - + local ROTATION_MUTEX_OPTS = { name = "vault-rotation", exptime = ROTATION_INTERVAL * 1.5, -- just in case the lock is not properly released @@ -682,7 +682,7 @@ local function new(self) return nil, err end - if kong and kong.licensing and kong.licensing:license_type() == "free" and strategy.license_required then + if strategy.license_required and self.licensing and self.licensing:license_type() == "free" then return nil, "vault " .. name .. " requires a license to be used" end @@ -738,6 +738,35 @@ local function new(self) return value, nil, ttl end + --- + -- Function `get_cache_value_and_ttl` returns a value for caching and its ttl + -- + -- @local + -- @function get_from_vault + -- @tparam string value the vault returned value for a reference + -- @tparam table config the configuration settings to be used + -- @tparam[opt] number ttl the possible vault returned ttl + -- @treturn string value to be stored in shared dictionary + -- @treturn number shared dictionary ttl + -- @treturn number lru ttl + -- @usage local value, err = get_from_vault(reference, strategy, config, cache_key, parsed_reference) + local function get_cache_value_and_ttl(value, config, ttl) + local cache_value, shdict_ttl, lru_ttl + if value then + -- adjust ttl to the minimum and maximum values configured + lru_ttl = adjust_ttl(ttl, config) + shdict_ttl = max(lru_ttl + (config.resurrect_ttl or DAO_MAX_TTL), SECRETS_CACHE_MIN_TTL) + cache_value = value + + else + -- negatively cached values will be rotated on each rotation interval + shdict_ttl = max(config.neg_ttl or 0, SECRETS_CACHE_MIN_TTL) + cache_value = NEGATIVELY_CACHED_VALUE + end + + return cache_value, shdict_ttl, lru_ttl + end + --- -- Function `get_from_vault` retrieves a value from the vault using the provided strategy. @@ -759,19 +788,7 @@ local function new(self) -- @usage local value, err = get_from_vault(reference, strategy, config, cache_key, parsed_reference) local function get_from_vault(reference, strategy, config, cache_key, parsed_reference) local value, err, ttl = invoke_strategy(strategy, config, parsed_reference) - local cache_value, shdict_ttl - if value then - -- adjust ttl to the minimum and maximum values configured - ttl = adjust_ttl(ttl, config) - shdict_ttl = max(ttl + (config.resurrect_ttl or DAO_MAX_TTL), SECRETS_CACHE_MIN_TTL) - cache_value = value - - else - -- negatively cached values will be rotated on each rotation interval - shdict_ttl = max(config.neg_ttl or 0, SECRETS_CACHE_MIN_TTL) - cache_value = NEGATIVELY_CACHED_VALUE - end - + local cache_value, shdict_ttl, lru_ttl = get_cache_value_and_ttl(value, config, ttl) local ok, cache_err = SECRETS_CACHE:safe_set(cache_key, cache_value, shdict_ttl) if not ok then return nil, cache_err @@ -782,7 +799,7 @@ local function new(self) return nil, fmt("could not get value from external vault (%s)", err) end - LRU:set(reference, value, ttl) + LRU:set(reference, value, lru_ttl) return value end @@ -866,26 +883,14 @@ local function new(self) --- - -- Function `update` recursively updates a configuration table. - -- - -- This function recursively in-place updates a configuration table by - -- replacing reference fields with values fetched from a cache. The references - -- are specified in a `$refs` field. - -- - -- If a reference cannot be fetched from the cache, the corresponding field is - -- set to nil and an warning is logged. + -- Recurse over config and calls the callback for each found reference. -- -- @local - -- @function update - -- @tparam table config a table representing the configuration to update (if `config` - -- is not a table, the function immediately returns it without any modifications) - -- @treturn table the config table (with possibly updated values). - -- - -- @usage - -- local config = update(config) - -- OR - -- update(config) - local function update(config) + -- @function recurse_config_refs + -- @tparam table config config table to recurse. + -- @tparam function callback callback to call on each reference. + -- @treturn table config that might have been updated, depending on callback. + local function recurse_config_refs(config, callback) -- silently ignores other than tables if type(config) ~= "table" then return config @@ -893,7 +898,7 @@ local function new(self) for key, value in pairs(config) do if key ~= "$refs" and type(value) == "table" then - update(value) + recurse_config_refs(value, callback) end end @@ -904,11 +909,11 @@ local function new(self) for name, reference in pairs(references) do if type(reference) == "string" then -- a string reference - update_from_cache(reference, config, name) + callback(reference, config, name) elseif type(reference) == "table" then -- array, set or map of references for key, ref in pairs(reference) do - update_from_cache(ref, config[name], key) + callback(ref, config[name], key) end end end @@ -917,6 +922,31 @@ local function new(self) end + --- + -- Function `update` recursively updates a configuration table. + -- + -- This function recursively in-place updates a configuration table by + -- replacing reference fields with values fetched from a cache. The references + -- are specified in a `$refs` field. + -- + -- If a reference cannot be fetched from the cache, the corresponding field is + -- set to nil and an warning is logged. + -- + -- @local + -- @function update + -- @tparam table config a table representing the configuration to update (if `config` + -- is not a table, the function immediately returns it without any modifications) + -- @treturn table the config table (with possibly updated values). + -- + -- @usage + -- local config = update(config) + -- OR + -- update(config) + local function update(config) + return recurse_config_refs(config, update_from_cache) + end + + --- -- Function `get_references` recursively iterates over options and returns -- all the references in an array. The same reference is in array only once. @@ -1105,7 +1135,7 @@ local function new(self) -- We cannot retry, so let's just call the callback and return return callback(options) end - + local name = "vault.try:" .. calculate_hash(concat(references, ".")) local old_updated_at = RETRY_LRU:get(name) or 0 @@ -1296,10 +1326,6 @@ local function new(self) initialized = true - if self.configuration.role == "control_plane" then - return - end - if self.configuration.database ~= "off" then self.worker_events.register(handle_vault_crud_event, "crud", "vaults") end @@ -1311,6 +1337,61 @@ local function new(self) end + --- + -- Called on `init` phase, and stores value in secrets cache. + -- + -- @local + -- @function init_in_cache_from_value + -- @tparam string reference a vault reference. + -- @tparan value string value that is stored in secrets cache. + local function init_in_cache_from_value(reference, value) + local strategy, err, config, cache_key = get_strategy(reference) + if not strategy then + return nil, err + end + + -- doesn't support vault returned ttl, but none of the vaults supports it, + -- and the support for vault returned ttl might be removed later. + local cache_value, shdict_ttl, lru_ttl = get_cache_value_and_ttl(value, config) + + local ok, cache_err = SECRETS_CACHE:safe_set(cache_key, cache_value, shdict_ttl) + if not ok then + return nil, cache_err + end + + if value then + LRU:set(reference, value, lru_ttl) + end + + return true + end + + + --- + -- Called on `init` phase, and used to warmup secrets cache. + -- + -- @local + -- @function init_in_cache + -- @tparam string reference a vault reference. + -- @tparan table record a table that is a container for de-referenced value. + -- @tparam field string field name in a record to which to store the de-referenced value. + local function init_in_cache(reference, record, field) + local value, err = init_in_cache_from_value(reference, record[field]) + if not value then + self.log.warn("error caching secret reference ", reference, ": ", err) + end + end + + + --- + -- Called on `init` phase, and used to warmup secrets cache. + -- @local + -- @function init + local function init() + recurse_config_refs(self.configuration, init_in_cache) + end + + local _VAULT = {} -- the public PDK interfaces @@ -1482,6 +1563,9 @@ local function new(self) init_worker() end + if get_phase() == "init" then + init() + end return _VAULT end diff --git a/spec/02-integration/02-cmd/02-start_stop_spec.lua b/spec/02-integration/02-cmd/02-start_stop_spec.lua index 1bc151b0bb38..01540451b2aa 100644 --- a/spec/02-integration/02-cmd/02-start_stop_spec.lua +++ b/spec/02-integration/02-cmd/02-start_stop_spec.lua @@ -141,6 +141,8 @@ describe("kong start/stop #" .. strategy, function() })) assert.not_matches("failed to dereference {vault://env/pg_password}", stderr, nil, true) + assert.logfile().has.no.line("[warn]", true) + assert.logfile().has.no.line("env/pg_password", true) assert.matches("Kong started", stdout, nil, true) assert(kong_exec("stop", { prefix = PREFIX, From 38248da33589899c81d7724c41cc56ce4614c004 Mon Sep 17 00:00:00 2001 From: Zhongwei Yao Date: Tue, 24 Oct 2023 05:45:01 -0700 Subject: [PATCH 018/249] fix(core): print error message correctly when plugin fails. (#11800) Before the fix, error message is: [kong] init.lua:405 [aws-lambda] table: 0x04183d70, client:127.0.0.1... After: [kong] init.lua:405 [aws-lambda] Function not found: arn:aws:lambda:us-east-1:xxx:function:test-lambda-2, client: 127.0.0.1... --- changelog/unreleased/kong/fix-error-message-print.yml | 3 +++ kong/plugins/aws-lambda/handler.lua | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/fix-error-message-print.yml diff --git a/changelog/unreleased/kong/fix-error-message-print.yml b/changelog/unreleased/kong/fix-error-message-print.yml new file mode 100644 index 000000000000..c3e87303f08b --- /dev/null +++ b/changelog/unreleased/kong/fix-error-message-print.yml @@ -0,0 +1,3 @@ +message: print error message correctly when plugin fails +type: bugfix +scope: Core diff --git a/kong/plugins/aws-lambda/handler.lua b/kong/plugins/aws-lambda/handler.lua index 2e1b78002d03..78699df1d4ad 100644 --- a/kong/plugins/aws-lambda/handler.lua +++ b/kong/plugins/aws-lambda/handler.lua @@ -181,7 +181,7 @@ function AWSLambdaHandler:access(conf) local content = res.body if res.status >= 400 then - return error(content) + return error(content.Message) end -- TRACING: set KONG_WAITING_TIME stop From 72580d5ff18fcc5cdf994a097dae8eb16215ff92 Mon Sep 17 00:00:00 2001 From: Samuele Date: Tue, 24 Oct 2023 16:54:13 +0200 Subject: [PATCH 019/249] fix(tracing): set parent span correctly (#11786) when the `balancer` instrumentation was enabled, the parent span was set incorrectly on traces, this fix addresses the problem by setting the parent span correctly on the root (`kong`) span when there is an incoming tracing header. --- .../kong/fix-opentelemetry-parent-id.yml | 3 ++ kong/plugins/opentelemetry/handler.lua | 8 +++-- .../37-opentelemetry/03-propagation_spec.lua | 32 +++++++++++++++++-- .../kong/plugins/trace-propagator/handler.lua | 4 +-- 4 files changed, 39 insertions(+), 8 deletions(-) create mode 100644 changelog/unreleased/kong/fix-opentelemetry-parent-id.yml diff --git a/changelog/unreleased/kong/fix-opentelemetry-parent-id.yml b/changelog/unreleased/kong/fix-opentelemetry-parent-id.yml new file mode 100644 index 000000000000..5eb4c0284329 --- /dev/null +++ b/changelog/unreleased/kong/fix-opentelemetry-parent-id.yml @@ -0,0 +1,3 @@ +message: "**Opentelemetry**: fix an issue that resulted in traces with invalid parent IDs when `balancer` instrumentation was enabled" +type: bugfix +scope: Plugin diff --git a/kong/plugins/opentelemetry/handler.lua b/kong/plugins/opentelemetry/handler.lua index 96f186cdf29a..b0a4bfa67d35 100644 --- a/kong/plugins/opentelemetry/handler.lua +++ b/kong/plugins/opentelemetry/handler.lua @@ -115,16 +115,18 @@ function OpenTelemetryHandler:access(conf) -- overwrite trace id -- as we are in a chain of existing trace if trace_id then + -- to propagate the correct trace ID we have to set it here + -- before passing this span to propagation.set() injected_parent_span.trace_id = trace_id kong.ctx.plugin.trace_id = trace_id end - -- overwrite parent span's parent_id + -- overwrite root span's parent_id if span_id then - injected_parent_span.parent_id = span_id + root_span.parent_id = span_id elseif parent_id then - injected_parent_span.parent_id = parent_id + root_span.parent_id = parent_id end propagation_set(conf.header_type, header_type, injected_parent_span, "w3c") diff --git a/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua b/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua index 35c32a8488bf..daf0a6ee2d84 100644 --- a/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua +++ b/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua @@ -32,6 +32,30 @@ local function assert_has_span(name, spans) return span end +local function get_span_by_id(spans, id) + for _, span in ipairs(spans) do + if span.span_id == id then + return span + end + end +end + +local function assert_correct_trace_hierarchy(spans, incoming_span_id) + for _, span in ipairs(spans) do + if span.name == "kong" then + -- if there is an incoming span id, it should be the parent of the root span + if incoming_span_id then + assert.equals(incoming_span_id, span.parent_id) + end + + else + -- all other spans in this trace should have a local span as parent + assert.not_equals(incoming_span_id, span.parent_id) + assert.is_truthy(get_span_by_id(spans, span.parent_id)) + end + end +end + for _, strategy in helpers.each_strategy() do describe("propagation tests #" .. strategy, function() local service @@ -321,7 +345,7 @@ describe("propagation tests #" .. strategy, function() end) end) -for _, instrumentation in ipairs({ "request", "request,balancer" }) do +for _, instrumentation in ipairs({ "request", "request,balancer", "all" }) do describe("propagation tests with enabled " .. instrumentation .. " instrumentation (issue #11294) #" .. strategy, function() local service, route local proxy_client @@ -370,12 +394,12 @@ describe("propagation tests with enabled " .. instrumentation .. " instrumentati it("sets the outgoint parent span's ID correctly", function() local trace_id = gen_trace_id() - local span_id = gen_span_id() + local incoming_span_id = gen_span_id() local thread = helpers.tcp_server(TCP_PORT) local r = proxy_client:get("/", { headers = { - traceparent = fmt("00-%s-%s-01", trace_id, span_id), + traceparent = fmt("00-%s-%s-01", trace_id, incoming_span_id), host = "http-route" }, }) @@ -398,6 +422,8 @@ describe("propagation tests with enabled " .. instrumentation .. " instrumentati local json = cjson.decode(body) assert.matches("00%-" .. trace_id .. "%-" .. parent_span.span_id .. "%-01", json.headers.traceparent) + + assert_correct_trace_hierarchy(spans, incoming_span_id) end) end) end diff --git a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua index 13b692e44603..daf8a36c3581 100644 --- a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua +++ b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua @@ -33,10 +33,10 @@ function _M:access(conf) end if span_id then - injected_parent_span.parent_id = span_id + root_span.parent_id = span_id elseif parent_id then - injected_parent_span.parent_id = parent_id + root_span.parent_id = parent_id end local type = header_type and "preserve" or "w3c" From aa16028d15c12eb691328bce8f3a00eac5812473 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 24 Oct 2023 17:26:10 +0200 Subject: [PATCH 020/249] chore(vault): fix docstring of get_cache_value_and_ttl (#11828) Signed-off-by: Aapo Talvensaari --- kong/pdk/vault.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kong/pdk/vault.lua b/kong/pdk/vault.lua index 7023d55cbc88..08f3a0d03a1c 100644 --- a/kong/pdk/vault.lua +++ b/kong/pdk/vault.lua @@ -742,14 +742,14 @@ local function new(self) -- Function `get_cache_value_and_ttl` returns a value for caching and its ttl -- -- @local - -- @function get_from_vault + -- @function get_cache_value_and_ttl -- @tparam string value the vault returned value for a reference -- @tparam table config the configuration settings to be used -- @tparam[opt] number ttl the possible vault returned ttl -- @treturn string value to be stored in shared dictionary -- @treturn number shared dictionary ttl -- @treturn number lru ttl - -- @usage local value, err = get_from_vault(reference, strategy, config, cache_key, parsed_reference) + -- @usage local cache_value, shdict_ttl, lru_ttl = get_cache_value_and_ttl(value, config, ttl) local function get_cache_value_and_ttl(value, config, ttl) local cache_value, shdict_ttl, lru_ttl if value then From 9b138109692c02791d6e6dcae1b5b9bdc3fa5f68 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Oct 2023 08:37:18 +0000 Subject: [PATCH 021/249] chore(deps): bump docker/build-push-action from 3 to 5 Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 3 to 5. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v3...v5) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/upgrade-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/upgrade-tests.yml b/.github/workflows/upgrade-tests.yml index 94f2420c90c9..db8c8a2ff901 100644 --- a/.github/workflows/upgrade-tests.yml +++ b/.github/workflows/upgrade-tests.yml @@ -54,7 +54,7 @@ jobs: mv bazel-bin/pkg/kong.amd64.deb . - name: Build Docker Image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: file: build/dockerfiles/deb.Dockerfile context: . From d28685606ff80953acf3577309e437bde56dc3ce Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 25 Oct 2023 10:55:47 +0800 Subject: [PATCH 022/249] refactor(pdk): use `resty.core.utils.str_replace_char` instead of `gsub` (#11823) It is a sister PR of #11721, optimize the code of pdk. --- kong/pdk/service/response.lua | 12 ++++++++---- kong/pdk/vault.lua | 7 ++++--- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/kong/pdk/service/response.lua b/kong/pdk/service/response.lua index 7a0598a368ca..7a47419f96fb 100644 --- a/kong/pdk/service/response.lua +++ b/kong/pdk/service/response.lua @@ -6,12 +6,12 @@ local cjson = require "cjson.safe".new() local multipart = require "multipart" local phase_checker = require "kong.pdk.private.phases" +local string_tools = require "kong.tools.string" local ngx = ngx local sub = string.sub local fmt = string.format -local gsub = string.gsub local find = string.find local type = type local error = error @@ -26,6 +26,10 @@ local check_phase = phase_checker.check cjson.decode_array_with_array_mt(true) +local replace_dashes = string_tools.replace_dashes +local replace_dashes_lower = string_tools.replace_dashes_lower + + local PHASES = phase_checker.phases @@ -45,7 +49,7 @@ do local resp_headers_mt = { __index = function(t, name) if type(name) == "string" then - local var = fmt("upstream_http_%s", gsub(lower(name), "-", "_")) + local var = fmt("upstream_http_%s", replace_dashes_lower(name)) if not ngx.var[var] then return nil end @@ -94,7 +98,7 @@ do return response_headers[name] end - name = gsub(name, "-", "_") + name = replace_dashes(name) if response_headers[name] then return response_headers[name] @@ -106,7 +110,7 @@ do return nil end - n = gsub(lower(n), "-", "_") + n = replace_dashes_lower(n) if n == name then return v end diff --git a/kong/pdk/vault.lua b/kong/pdk/vault.lua index 08f3a0d03a1c..99e975f6e3f0 100644 --- a/kong/pdk/vault.lua +++ b/kong/pdk/vault.lua @@ -17,11 +17,13 @@ local isempty = require "table.isempty" local buffer = require "string.buffer" local clone = require "table.clone" local utils = require "kong.tools.utils" +local string_tools = require "kong.tools.string" local cjson = require("cjson.safe").new() local yield = utils.yield local get_updated_now_ms = utils.get_updated_now_ms +local replace_dashes = string_tools.replace_dashes local ngx = ngx @@ -30,7 +32,6 @@ local max = math.max local fmt = string.format local sub = string.sub local byte = string.byte -local gsub = string.gsub local type = type local sort = table.sort local pcall = pcall @@ -539,7 +540,7 @@ local function new(self) base_config = {} if self and self.configuration then local configuration = self.configuration - local env_name = gsub(name, "-", "_") + local env_name = replace_dashes(name) local _, err, schema = get_vault_strategy_and_schema(name) if not schema then return nil, err @@ -553,7 +554,7 @@ local function new(self) -- then you would configure it with KONG_VAULT_MY_VAULT_ -- or in kong.conf, where it would be called -- "vault_my_vault_". - local n = lower(fmt("vault_%s_%s", env_name, gsub(k, "-", "_"))) + local n = lower(fmt("vault_%s_%s", env_name, replace_dashes(k))) local v = configuration[n] v = arguments.infer_value(v, f) -- TODO: should we be more visible with validation errors? From 6c2dbb14d8ad0a3fd2a5d07e2e7aa294260fc6cc Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 25 Oct 2023 11:47:25 +0800 Subject: [PATCH 023/249] refactor(tools): separate table related functions from utils (#11723) This PR is a try to refactor the big tools.utils.lua, now it moves the functions of table into a separated module. KAG-2739 --- kong-3.6.0-0.rockspec | 1 + kong/tools/table.lua | 323 ++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 313 ++-------------------------------------- 3 files changed, 332 insertions(+), 305 deletions(-) create mode 100644 kong/tools/table.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 4a07e972a13b..8c5f77f00225 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -165,6 +165,7 @@ build = { ["kong.tools.mime_type"] = "kong/tools/mime_type.lua", ["kong.tools.request_aware_table"] = "kong/tools/request_aware_table.lua", ["kong.tools.string"] = "kong/tools/string.lua", + ["kong.tools.table"] = "kong/tools/table.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/table.lua b/kong/tools/table.lua new file mode 100644 index 000000000000..8999954908db --- /dev/null +++ b/kong/tools/table.lua @@ -0,0 +1,323 @@ +local type = type +local pairs = pairs +local ipairs = ipairs +local select = select +local tostring = tostring +local insert = table.insert +local setmetatable = setmetatable +local getmetatable = getmetatable + + +local _M = {} + + +--- packs a set of arguments in a table. +-- Explicitly sets field `n` to the number of arguments, so it is `nil` safe +_M.pack = function(...) return {n = select("#", ...), ...} end + + +--- unpacks a table to a list of arguments. +-- Explicitly honors the `n` field if given in the table, so it is `nil` safe +_M.unpack = function(t, i, j) return unpack(t, i or 1, j or t.n or #t) end + + +--- Merges two table together. +-- A new table is created with a non-recursive copy of the provided tables +-- @param t1 The first table +-- @param t2 The second table +-- @return The (new) merged table +function _M.table_merge(t1, t2) + local res = {} + if t1 then + for k,v in pairs(t1) do + res[k] = v + end + end + if t2 then + for k,v in pairs(t2) do + res[k] = v + end + end + return res +end + + +--- Checks if a value exists in a table. +-- @param arr The table to use +-- @param val The value to check +-- @return Returns `true` if the table contains the value, `false` otherwise +function _M.table_contains(arr, val) + if arr then + for _, v in pairs(arr) do + if v == val then + return true + end + end + end + return false +end + + +do + local floor = math.floor + local max = math.max + + local is_array_fast = require "table.isarray" + + local is_array_strict = function(t) + local m, c = 0, 0 + for k in pairs(t) do + if type(k) ~= "number" or k < 1 or floor(k) ~= k then + return false + end + m = max(m, k) + c = c + 1 + end + return c == m + end + + local is_array_lapis = function(t) + if type(t) ~= "table" then + return false + end + local i = 0 + for _ in pairs(t) do + i = i + 1 + if t[i] == nil and t[tostring(i)] == nil then + return false + end + end + return true + end + + --- Checks if a table is an array and not an associative array. + -- @param t The table to check + -- @param mode: `"strict"`: only sequential indices starting from 1 are allowed (no holes) + -- `"fast"`: OpenResty optimized version (holes and negative indices are ok) + -- `"lapis"`: Allows numeric indices as strings (no holes) + -- @return Returns `true` if the table is an array, `false` otherwise + function _M.is_array(t, mode) + if type(t) ~= "table" then + return false + end + + if mode == "lapis" then + return is_array_lapis(t) + end + + if mode == "fast" then + return is_array_fast(t) + end + + return is_array_strict(t) + end +end + + +--- Checks if a table is an array and not an associative array. +-- *** NOTE *** string-keys containing integers are considered valid array entries! +-- @param t The table to check +-- @return Returns `true` if the table is an array, `false` otherwise +function _M.is_lapis_array(t) + if type(t) ~= "table" then + return false + end + local i = 0 + for _ in pairs(t) do + i = i + 1 + if t[i] == nil and t[tostring(i)] == nil then + return false + end + end + return true +end + + +--- Deep copies a table into a new table. +-- Tables used as keys are also deep copied, as are metatables +-- @param orig The table to copy +-- @param copy_mt Copy metatable (default is true) +-- @return Returns a copy of the input table +function _M.deep_copy(orig, copy_mt) + if copy_mt == nil then + copy_mt = true + end + local copy + if type(orig) == "table" then + copy = {} + for orig_key, orig_value in next, orig, nil do + copy[_M.deep_copy(orig_key)] = _M.deep_copy(orig_value, copy_mt) + end + if copy_mt then + setmetatable(copy, _M.deep_copy(getmetatable(orig))) + end + else + copy = orig + end + return copy +end + + +do + local clone = require "table.clone" + + --- Copies a table into a new table. + -- neither sub tables nor metatables will be copied. + -- @param orig The table to copy + -- @return Returns a copy of the input table + function _M.shallow_copy(orig) + local copy + if type(orig) == "table" then + copy = clone(orig) + else -- number, string, boolean, etc + copy = orig + end + return copy + end +end + + +--- Merges two tables recursively +-- For each sub-table in t1 and t2, an equivalent (but different) table will +-- be created in the resulting merge. If t1 and t2 have a sub-table with the +-- same key k, res[k] will be a deep merge of both sub-tables. +-- Metatables are not taken into account. +-- Keys are copied by reference (if tables are used as keys they will not be +-- duplicated) +-- @param t1 one of the tables to merge +-- @param t2 one of the tables to merge +-- @return Returns a table representing a deep merge of the new table +function _M.deep_merge(t1, t2) + local res = _M.deep_copy(t1) + + for k, v in pairs(t2) do + if type(v) == "table" and type(res[k]) == "table" then + res[k] = _M.deep_merge(res[k], v) + else + res[k] = _M.deep_copy(v) -- returns v when it is not a table + end + end + + return res +end + + +--- Cycle aware deep copies a table into a new table. +-- Cycle aware means that a table value is only copied once even +-- if it is referenced multiple times in input table or its sub-tables. +-- Tables used as keys are not deep copied. Metatables are set to same +-- on copies as they were in the original. +-- @param orig The table to copy +-- @param remove_metatables Removes the metatables when set to `true`. +-- @param deep_copy_keys Deep copies the keys (and not only the values) when set to `true`. +-- @param cycle_aware_cache Cached tables that are not copied (again). +-- (the function creates this table when not given) +-- @return Returns a copy of the input table +function _M.cycle_aware_deep_copy(orig, remove_metatables, deep_copy_keys, cycle_aware_cache) + if type(orig) ~= "table" then + return orig + end + + cycle_aware_cache = cycle_aware_cache or {} + if cycle_aware_cache[orig] then + return cycle_aware_cache[orig] + end + + local copy = _M.shallow_copy(orig) + + cycle_aware_cache[orig] = copy + + local mt + if not remove_metatables then + mt = getmetatable(orig) + end + + for k, v in pairs(orig) do + if type(v) == "table" then + copy[k] = _M.cycle_aware_deep_copy(v, remove_metatables, deep_copy_keys, cycle_aware_cache) + end + + if deep_copy_keys and type(k) == "table" then + local new_k = _M.cycle_aware_deep_copy(k, remove_metatables, deep_copy_keys, cycle_aware_cache) + copy[new_k] = copy[k] + copy[k] = nil + end + end + + if mt then + setmetatable(copy, mt) + end + + return copy +end + + +--- Cycle aware merges two tables recursively +-- The table t1 is deep copied using cycle_aware_deep_copy function. +-- The table t2 is deep merged into t1. The t2 values takes precedence +-- over t1 ones. Tables used as keys are not deep copied. Metatables +-- are set to same on copies as they were in the original. +-- @param t1 one of the tables to merge +-- @param t2 one of the tables to merge +-- @param remove_metatables Removes the metatables when set to `true`. +-- @param deep_copy_keys Deep copies the keys (and not only the values) when set to `true`. +-- @param cycle_aware_cache Cached tables that are not copied (again) +-- (the function creates this table when not given) +-- @return Returns a table representing a deep merge of the new table +function _M.cycle_aware_deep_merge(t1, t2, remove_metatables, deep_copy_keys, cycle_aware_cache) + cycle_aware_cache = cycle_aware_cache or {} + local merged = _M.cycle_aware_deep_copy(t1, remove_metatables, deep_copy_keys, cycle_aware_cache) + for k, v in pairs(t2) do + if type(v) == "table" then + if type(merged[k]) == "table" then + merged[k] = _M.cycle_aware_deep_merge(merged[k], v, remove_metatables, deep_copy_keys, cycle_aware_cache) + else + merged[k] = _M.cycle_aware_deep_copy(v, remove_metatables, deep_copy_keys, cycle_aware_cache) + end + else + merged[k] = v + end + end + return merged +end + + +--- Concatenates lists into a new table. +function _M.concat(...) + local result = {} + for _, t in ipairs({...}) do + for _, v in ipairs(t) do insert(result, v) end + end + return result +end + + +local err_list_mt = {} + + +--- Add an error message to a key/value table. +-- If the key already exists, a sub table is created with the original and the new value. +-- @param errors (Optional) Table to attach the error to. If `nil`, the table will be created. +-- @param k Key on which to insert the error in the `errors` table. +-- @param v Value of the error +-- @return The `errors` table with the new error inserted. +function _M.add_error(errors, k, v) + if not errors then + errors = {} + end + + if errors and errors[k] then + if getmetatable(errors[k]) ~= err_list_mt then + errors[k] = setmetatable({errors[k]}, err_list_mt) + end + + insert(errors[k], v) + else + errors[k] = v + end + + return errors +end + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 5c1522eadef6..6fbc5b7b739d 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -22,7 +22,6 @@ local ffi_new = ffi.new local type = type local pairs = pairs local ipairs = ipairs -local select = select local tostring = tostring local tonumber = tonumber local sort = table.sort @@ -39,7 +38,6 @@ local re_match = ngx.re.match local inflate_gzip = zlib.inflateGzip local deflate_gzip = zlib.deflateGzip local setmetatable = setmetatable -local getmetatable = getmetatable ffi.cdef[[ typedef unsigned char u_char; @@ -91,14 +89,6 @@ _M.strip = function(str) end end ---- packs a set of arguments in a table. --- Explicitly sets field `n` to the number of arguments, so it is `nil` safe -_M.pack = function(...) return {n = select("#", ...), ...} end - ---- unpacks a table to a list of arguments. --- Explicitly honors the `n` field if given in the table, so it is `nil` safe -_M.unpack = function(t, i, j) return unpack(t, i or 1, j or t.n or #t) end - do local _system_infos @@ -471,301 +461,6 @@ _M.check_https = function(trusted_ip, allow_terminated) return false end ---- Merges two table together. --- A new table is created with a non-recursive copy of the provided tables --- @param t1 The first table --- @param t2 The second table --- @return The (new) merged table -function _M.table_merge(t1, t2) - local res = {} - if t1 then - for k,v in pairs(t1) do - res[k] = v - end - end - if t2 then - for k,v in pairs(t2) do - res[k] = v - end - end - return res -end - ---- Checks if a value exists in a table. --- @param arr The table to use --- @param val The value to check --- @return Returns `true` if the table contains the value, `false` otherwise -function _M.table_contains(arr, val) - if arr then - for _, v in pairs(arr) do - if v == val then - return true - end - end - end - return false -end - - -do - local floor = math.floor - local max = math.max - - local is_array_fast = require "table.isarray" - - local is_array_strict = function(t) - local m, c = 0, 0 - for k in pairs(t) do - if type(k) ~= "number" or k < 1 or floor(k) ~= k then - return false - end - m = max(m, k) - c = c + 1 - end - return c == m - end - - local is_array_lapis = function(t) - if type(t) ~= "table" then - return false - end - local i = 0 - for _ in pairs(t) do - i = i + 1 - if t[i] == nil and t[tostring(i)] == nil then - return false - end - end - return true - end - - --- Checks if a table is an array and not an associative array. - -- @param t The table to check - -- @param mode: `"strict"`: only sequential indices starting from 1 are allowed (no holes) - -- `"fast"`: OpenResty optimized version (holes and negative indices are ok) - -- `"lapis"`: Allows numeric indices as strings (no holes) - -- @return Returns `true` if the table is an array, `false` otherwise - function _M.is_array(t, mode) - if type(t) ~= "table" then - return false - end - - if mode == "lapis" then - return is_array_lapis(t) - end - - if mode == "fast" then - return is_array_fast(t) - end - - return is_array_strict(t) - end -end - - ---- Checks if a table is an array and not an associative array. --- *** NOTE *** string-keys containing integers are considered valid array entries! --- @param t The table to check --- @return Returns `true` if the table is an array, `false` otherwise -function _M.is_lapis_array(t) - if type(t) ~= "table" then - return false - end - local i = 0 - for _ in pairs(t) do - i = i + 1 - if t[i] == nil and t[tostring(i)] == nil then - return false - end - end - return true -end - - ---- Deep copies a table into a new table. --- Tables used as keys are also deep copied, as are metatables --- @param orig The table to copy --- @param copy_mt Copy metatable (default is true) --- @return Returns a copy of the input table -function _M.deep_copy(orig, copy_mt) - if copy_mt == nil then - copy_mt = true - end - local copy - if type(orig) == "table" then - copy = {} - for orig_key, orig_value in next, orig, nil do - copy[_M.deep_copy(orig_key)] = _M.deep_copy(orig_value, copy_mt) - end - if copy_mt then - setmetatable(copy, _M.deep_copy(getmetatable(orig))) - end - else - copy = orig - end - return copy -end - - -do - local clone = require "table.clone" - - --- Copies a table into a new table. - -- neither sub tables nor metatables will be copied. - -- @param orig The table to copy - -- @return Returns a copy of the input table - function _M.shallow_copy(orig) - local copy - if type(orig) == "table" then - copy = clone(orig) - else -- number, string, boolean, etc - copy = orig - end - return copy - end -end - - ---- Merges two tables recursively --- For each sub-table in t1 and t2, an equivalent (but different) table will --- be created in the resulting merge. If t1 and t2 have a sub-table with the --- same key k, res[k] will be a deep merge of both sub-tables. --- Metatables are not taken into account. --- Keys are copied by reference (if tables are used as keys they will not be --- duplicated) --- @param t1 one of the tables to merge --- @param t2 one of the tables to merge --- @return Returns a table representing a deep merge of the new table -function _M.deep_merge(t1, t2) - local res = _M.deep_copy(t1) - - for k, v in pairs(t2) do - if type(v) == "table" and type(res[k]) == "table" then - res[k] = _M.deep_merge(res[k], v) - else - res[k] = _M.deep_copy(v) -- returns v when it is not a table - end - end - - return res -end - - ---- Cycle aware deep copies a table into a new table. --- Cycle aware means that a table value is only copied once even --- if it is referenced multiple times in input table or its sub-tables. --- Tables used as keys are not deep copied. Metatables are set to same --- on copies as they were in the original. --- @param orig The table to copy --- @param remove_metatables Removes the metatables when set to `true`. --- @param deep_copy_keys Deep copies the keys (and not only the values) when set to `true`. --- @param cycle_aware_cache Cached tables that are not copied (again). --- (the function creates this table when not given) --- @return Returns a copy of the input table -function _M.cycle_aware_deep_copy(orig, remove_metatables, deep_copy_keys, cycle_aware_cache) - if type(orig) ~= "table" then - return orig - end - - cycle_aware_cache = cycle_aware_cache or {} - if cycle_aware_cache[orig] then - return cycle_aware_cache[orig] - end - - local copy = _M.shallow_copy(orig) - - cycle_aware_cache[orig] = copy - - local mt - if not remove_metatables then - mt = getmetatable(orig) - end - - for k, v in pairs(orig) do - if type(v) == "table" then - copy[k] = _M.cycle_aware_deep_copy(v, remove_metatables, deep_copy_keys, cycle_aware_cache) - end - - if deep_copy_keys and type(k) == "table" then - local new_k = _M.cycle_aware_deep_copy(k, remove_metatables, deep_copy_keys, cycle_aware_cache) - copy[new_k] = copy[k] - copy[k] = nil - end - end - - if mt then - setmetatable(copy, mt) - end - - return copy -end - - ---- Cycle aware merges two tables recursively --- The table t1 is deep copied using cycle_aware_deep_copy function. --- The table t2 is deep merged into t1. The t2 values takes precedence --- over t1 ones. Tables used as keys are not deep copied. Metatables --- are set to same on copies as they were in the original. --- @param t1 one of the tables to merge --- @param t2 one of the tables to merge --- @param remove_metatables Removes the metatables when set to `true`. --- @param deep_copy_keys Deep copies the keys (and not only the values) when set to `true`. --- @param cycle_aware_cache Cached tables that are not copied (again) --- (the function creates this table when not given) --- @return Returns a table representing a deep merge of the new table -function _M.cycle_aware_deep_merge(t1, t2, remove_metatables, deep_copy_keys, cycle_aware_cache) - cycle_aware_cache = cycle_aware_cache or {} - local merged = _M.cycle_aware_deep_copy(t1, remove_metatables, deep_copy_keys, cycle_aware_cache) - for k, v in pairs(t2) do - if type(v) == "table" then - if type(merged[k]) == "table" then - merged[k] = _M.cycle_aware_deep_merge(merged[k], v, remove_metatables, deep_copy_keys, cycle_aware_cache) - else - merged[k] = _M.cycle_aware_deep_copy(v, remove_metatables, deep_copy_keys, cycle_aware_cache) - end - else - merged[k] = v - end - end - return merged -end - - -local err_list_mt = {} - ---- Concatenates lists into a new table. -function _M.concat(...) - local result = {} - local insert = table.insert - for _, t in ipairs({...}) do - for _, v in ipairs(t) do insert(result, v) end - end - return result -end - ---- Add an error message to a key/value table. --- If the key already exists, a sub table is created with the original and the new value. --- @param errors (Optional) Table to attach the error to. If `nil`, the table will be created. --- @param k Key on which to insert the error in the `errors` table. --- @param v Value of the error --- @return The `errors` table with the new error inserted. -function _M.add_error(errors, k, v) - if not errors then - errors = {} - end - - if errors and errors[k] then - if getmetatable(errors[k]) ~= err_list_mt then - errors[k] = setmetatable({errors[k]}, err_list_mt) - end - - insert(errors[k], v) - else - errors[k] = v - end - - return errors -end --- Try to load a module. -- Will not throw an error if the module was not found, but will throw an error if the @@ -1849,4 +1544,12 @@ _M.get_start_time_ms = get_start_time_ms _M.get_updated_monotonic_ms = get_updated_monotonic_ms +do + local tbl = require "kong.tools.table" + for name, func in pairs(tbl) do + _M[name] = func + end +end + + return _M From 616bc7f7a041599971aa934b2f910336754f08e4 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Wed, 25 Oct 2023 13:37:12 +0800 Subject: [PATCH 024/249] fix(ci): correctly exit 1 when changelog not found `>` evaluates multiline string into a single line thus makes `exit 1` becoming an argument for `echo`. --- .github/workflows/changelog-requirement.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/changelog-requirement.yml b/.github/workflows/changelog-requirement.yml index c53e26a17d61..eba804875b24 100644 --- a/.github/workflows/changelog-requirement.yml +++ b/.github/workflows/changelog-requirement.yml @@ -33,7 +33,7 @@ jobs: - name: Check changelog existence if: steps.changelog-list.outputs.changelogs_any_changed == 'false' - run: > + run: | echo "Changelog file expected but found none. If you believe this PR requires no changelog entry, label it with \"skip-changelog\"." echo "Refer to https://github.com/Kong/gateway-changelog for format guidelines." exit 1 @@ -56,7 +56,7 @@ jobs: exit 1 - name: Fail when deprecated YAML keys are used - run: > + run: | for file in ${{ steps.changelog-list.outputs.changelogs_all_changed_files }}; do if grep -q "prs:" $file || grep -q "jiras:" $file; then echo "Please do not include \"prs\" or \"jiras\" keys in new changelogs, put the JIRA number inside commit message and PR description instead." From 14521a0c132a48be2ebbd6399a61951820c0bdf2 Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 25 Oct 2023 16:36:19 +0800 Subject: [PATCH 025/249] refactor(tools): simplify the logic of request_aware_table (#11756) There are some duplicated code in tracing.request_id and request_aware_table, use request_id.get() to get clean code. --- kong/tools/request_aware_table.lua | 21 ++++++--------------- kong/tracing/request_id.lua | 1 + 2 files changed, 7 insertions(+), 15 deletions(-) diff --git a/kong/tools/request_aware_table.lua b/kong/tools/request_aware_table.lua index e67d18e9eac0..c1424d9e917a 100644 --- a/kong/tools/request_aware_table.lua +++ b/kong/tools/request_aware_table.lua @@ -5,37 +5,28 @@ local table_new = require("table.new") local table_clear = require("table.clear") local get_request_id = require("kong.tracing.request_id").get + local is_not_debug_mode = (kong.configuration.log_level ~= "debug") local error = error local rawset = rawset local setmetatable = setmetatable -local get_phase = ngx.get_phase - - -local NGX_VAR_PHASES = { - set = true, - rewrite = true, - access = true, - content = true, - header_filter = true, - body_filter = true, - log = true, - balancer = true, -} + + local ALLOWED_REQUEST_ID_K = "__allowed_request_id" -- Check if access is allowed for table, based on the request ID local function enforce_sequential_access(table) - if not NGX_VAR_PHASES[get_phase()] then + local curr_request_id = get_request_id() + + if not curr_request_id then -- allow access and reset allowed request ID rawset(table, ALLOWED_REQUEST_ID_K, nil) return end - local curr_request_id = get_request_id() local allowed_request_id = rawget(table, ALLOWED_REQUEST_ID_K) if not allowed_request_id then -- first access. Set allowed request ID and allow access diff --git a/kong/tracing/request_id.lua b/kong/tracing/request_id.lua index bab196df1bb2..d391712ef4c4 100644 --- a/kong/tracing/request_id.lua +++ b/kong/tracing/request_id.lua @@ -2,6 +2,7 @@ local ngx = ngx local var = ngx.var local get_phase = ngx.get_phase + local NGX_VAR_PHASES = { set = true, rewrite = true, From 8ea36de8d6fb55274c46dd13af3cc6bf592dcf91 Mon Sep 17 00:00:00 2001 From: "Qirui(Keery) Nie" Date: Wed, 25 Oct 2023 17:40:06 +0800 Subject: [PATCH 026/249] chore(deps): bump `kong-lapis` from `1.14.0.2` to `1.14.0.3` (#11839) --- changelog/unreleased/kong/lapis_version_bump.yml | 2 ++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/lapis_version_bump.yml diff --git a/changelog/unreleased/kong/lapis_version_bump.yml b/changelog/unreleased/kong/lapis_version_bump.yml new file mode 100644 index 000000000000..a554877f6a7b --- /dev/null +++ b/changelog/unreleased/kong/lapis_version_bump.yml @@ -0,0 +1,2 @@ +message: "Bumped kong-lapis from 1.14.0.2 to 1.14.0.3" +type: dependency diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 8c5f77f00225..fd8356805d4b 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -21,7 +21,7 @@ dependencies = { "lua-ffi-zlib == 0.6", "multipart == 0.5.9", "version == 1.0.1", - "kong-lapis == 1.14.0.2", + "kong-lapis == 1.14.0.3", "kong-pgmoon == 1.16.2", "luatz == 0.4", "lua_system_constants == 0.1.4", From 0bd4eb59933703ab6881f815d72eb18d150c8e44 Mon Sep 17 00:00:00 2001 From: Water-Melon Date: Wed, 25 Oct 2023 08:17:06 +0000 Subject: [PATCH 027/249] chore(deps): Bump OpenSSL version to 3.1.4 KAG-2883 --- .github/workflows/release.yml | 11 +++++++---- .requirements | 2 +- build/openresty/openssl/openssl_repositories.bzl | 2 +- changelog/unreleased/kong/bump_openssl_3.1.4.yml | 2 ++ .../explain_manifest/fixtures/amazonlinux-2-amd64.txt | 3 ++- .../fixtures/amazonlinux-2023-amd64.txt | 3 ++- .../fixtures/amazonlinux-2023-arm64.txt | 3 ++- scripts/explain_manifest/fixtures/debian-10-amd64.txt | 3 ++- scripts/explain_manifest/fixtures/debian-11-amd64.txt | 3 ++- scripts/explain_manifest/fixtures/el7-amd64.txt | 3 ++- scripts/explain_manifest/fixtures/el8-amd64.txt | 3 ++- scripts/explain_manifest/fixtures/el9-amd64.txt | 3 ++- scripts/explain_manifest/fixtures/el9-arm64.txt | 3 ++- .../explain_manifest/fixtures/ubuntu-20.04-amd64.txt | 2 +- .../explain_manifest/fixtures/ubuntu-22.04-amd64.txt | 3 ++- .../explain_manifest/fixtures/ubuntu-22.04-arm64.txt | 3 ++- 16 files changed, 34 insertions(+), 18 deletions(-) create mode 100644 changelog/unreleased/kong/bump_openssl_3.1.4.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6aaae1c33bf0..64d03425bc52 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,6 +1,9 @@ name: Package & Release # The workflow to build and release official Kong packages and images. +# +# TODO: +# Do not bump the version of actions/checkout to v4 before dropping rhel7 and amazonlinux2. on: # yamllint disable-line rule:truthy pull_request: @@ -56,7 +59,7 @@ jobs: arch: ${{ steps.build-info.outputs.arch }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - name: Build Info id: build-info run: | @@ -173,7 +176,7 @@ jobs: apt install -y wget libz-dev libssl-dev libcurl4-gnutls-dev libexpat1-dev sudo - name: Checkout Kong source code - uses: actions/checkout@v4 + uses: actions/checkout@v3 - name: Swap git with https run: git config --global url."https://github".insteadOf git://github @@ -284,7 +287,7 @@ jobs: include: "${{ fromJSON(needs.metadata.outputs.matrix)['build-packages'] }}" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - name: Download artifact uses: actions/download-artifact@v3 @@ -316,7 +319,7 @@ jobs: include: "${{ fromJSON(needs.metadata.outputs.matrix)['build-images'] }}" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - name: Download artifact uses: actions/download-artifact@v3 diff --git a/.requirements b/.requirements index 7c6d9812e057..29282e1b8aa6 100644 --- a/.requirements +++ b/.requirements @@ -2,7 +2,7 @@ KONG_PACKAGE_NAME=kong OPENRESTY=1.21.4.2 LUAROCKS=3.9.2 -OPENSSL=3.1.2 +OPENSSL=3.1.4 PCRE=8.45 LIBEXPAT=2.5.0 diff --git a/build/openresty/openssl/openssl_repositories.bzl b/build/openresty/openssl/openssl_repositories.bzl index 896863a21999..cab43702d1dd 100644 --- a/build/openresty/openssl/openssl_repositories.bzl +++ b/build/openresty/openssl/openssl_repositories.bzl @@ -11,7 +11,7 @@ def openssl_repositories(): http_archive, name = "openssl", build_file = "//build/openresty/openssl:BUILD.bazel", - sha256 = "a0ce69b8b97ea6a35b96875235aa453b966ba3cba8af2de23657d8b6767d6539", + sha256 = "840af5366ab9b522bde525826be3ef0fb0af81c6a9ebd84caa600fea1731eee3", strip_prefix = "openssl-" + version, urls = [ "https://www.openssl.org/source/openssl-" + version + ".tar.gz", diff --git a/changelog/unreleased/kong/bump_openssl_3.1.4.yml b/changelog/unreleased/kong/bump_openssl_3.1.4.yml new file mode 100644 index 000000000000..a615fc42ba99 --- /dev/null +++ b/changelog/unreleased/kong/bump_openssl_3.1.4.yml @@ -0,0 +1,2 @@ +message: bump OpenSSL to 3.1.4 +type: dependency diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt index c8cbf3e5bd32..d3bda3284080 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt @@ -202,6 +202,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt index 95eb40ea4ba9..e85d7e578527 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt @@ -188,6 +188,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt index e352ddf9485a..0db6e70743c3 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt @@ -170,6 +170,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/debian-10-amd64.txt b/scripts/explain_manifest/fixtures/debian-10-amd64.txt index 95d532bef36b..013e8586181c 100644 --- a/scripts/explain_manifest/fixtures/debian-10-amd64.txt +++ b/scripts/explain_manifest/fixtures/debian-10-amd64.txt @@ -202,6 +202,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/debian-11-amd64.txt b/scripts/explain_manifest/fixtures/debian-11-amd64.txt index 253e43cd2a53..fe586a0c0912 100644 --- a/scripts/explain_manifest/fixtures/debian-11-amd64.txt +++ b/scripts/explain_manifest/fixtures/debian-11-amd64.txt @@ -190,6 +190,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/el7-amd64.txt b/scripts/explain_manifest/fixtures/el7-amd64.txt index c8cbf3e5bd32..d3bda3284080 100644 --- a/scripts/explain_manifest/fixtures/el7-amd64.txt +++ b/scripts/explain_manifest/fixtures/el7-amd64.txt @@ -202,6 +202,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/el8-amd64.txt b/scripts/explain_manifest/fixtures/el8-amd64.txt index 7bbdad456097..c7933610e0a3 100644 --- a/scripts/explain_manifest/fixtures/el8-amd64.txt +++ b/scripts/explain_manifest/fixtures/el8-amd64.txt @@ -201,6 +201,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/el9-amd64.txt b/scripts/explain_manifest/fixtures/el9-amd64.txt index eca28e4a403f..e4dbbaa65379 100644 --- a/scripts/explain_manifest/fixtures/el9-amd64.txt +++ b/scripts/explain_manifest/fixtures/el9-amd64.txt @@ -188,6 +188,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/el9-arm64.txt b/scripts/explain_manifest/fixtures/el9-arm64.txt index e352ddf9485a..0db6e70743c3 100644 --- a/scripts/explain_manifest/fixtures/el9-arm64.txt +++ b/scripts/explain_manifest/fixtures/el9-arm64.txt @@ -170,6 +170,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt b/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt index a7184560750f..e4b2a5396464 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt @@ -194,6 +194,6 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt b/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt index 68de4cc4203f..6d22a3f711b0 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt @@ -181,6 +181,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + diff --git a/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt b/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt index b66889974bd0..8dc1f94a1b9a 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt @@ -179,6 +179,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.2 1 Aug 2023 + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True + From 12324a16ab1a9d53a14db3db4af87e3a9aaa4d0c Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 25 Oct 2023 16:47:53 +0200 Subject: [PATCH 028/249] fix(vault): make it possible to use vault references in declarative config (#11843) ### Summary Warmup cache on `init` where we have Lua `coroutines` available so that it won't happen on `init_worker` where we don't have them (and cannot use e.g. lua-resty-http). See KAG-2620 and FTI-5080. Signed-off-by: Aapo Talvensaari * Update spec/02-integration/02-cmd/02-start_stop_spec.lua --------- Signed-off-by: Aapo Talvensaari Co-authored-by: Samuele --- .../unreleased/kong/vault-declarative.yml | 3 ++ kong/init.lua | 2 + kong/pdk/vault.lua | 22 +++++++++++ .../02-cmd/02-start_stop_spec.lua | 37 ++++++++++++++++++- .../kong/vaults/mocksocket/init.lua | 37 +++++++++++++++++++ .../kong/vaults/mocksocket/schema.lua | 13 +++++++ 6 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/vault-declarative.yml create mode 100644 spec/fixtures/custom_vaults/kong/vaults/mocksocket/init.lua create mode 100644 spec/fixtures/custom_vaults/kong/vaults/mocksocket/schema.lua diff --git a/changelog/unreleased/kong/vault-declarative.yml b/changelog/unreleased/kong/vault-declarative.yml new file mode 100644 index 000000000000..9ae6d9b22086 --- /dev/null +++ b/changelog/unreleased/kong/vault-declarative.yml @@ -0,0 +1,3 @@ +message: Vault references can be used in Dbless mode in declarative config +type: bugfix +scope: Core diff --git a/kong/init.lua b/kong/init.lua index 06a22517e036..2f02d73f9e27 100644 --- a/kong/init.lua +++ b/kong/init.lua @@ -717,6 +717,8 @@ function Kong.init() if not declarative_entities then error(err) end + + kong.vault.warmup(declarative_entities) end else diff --git a/kong/pdk/vault.lua b/kong/pdk/vault.lua index 99e975f6e3f0..8b7c48d74175 100644 --- a/kong/pdk/vault.lua +++ b/kong/pdk/vault.lua @@ -1564,6 +1564,28 @@ local function new(self) init_worker() end + --- + -- Warmups vault caches from config. + -- + -- @local + -- @function kong.vault.warmup + function _VAULT.warmup(input) + for k, v in pairs(input) do + local kt = type(k) + if kt == "table" then + _VAULT.warmup(k) + elseif kt == "string" and is_reference(k) then + get(k) + end + local vt = type(v) + if vt == "table" then + _VAULT.warmup(v) + elseif vt == "string" and is_reference(v) then + get(v) + end + end + end + if get_phase() == "init" then init() end diff --git a/spec/02-integration/02-cmd/02-start_stop_spec.lua b/spec/02-integration/02-cmd/02-start_stop_spec.lua index 01540451b2aa..2c831503a7ec 100644 --- a/spec/02-integration/02-cmd/02-start_stop_spec.lua +++ b/spec/02-integration/02-cmd/02-start_stop_spec.lua @@ -663,8 +663,43 @@ describe("kong start/stop #" .. strategy, function() assert.matches("in 'name': invalid value '@gobo': the only accepted ascii characters are alphanumerics or ., -, _, and ~", err, nil, true) assert.matches("in entry 2 of 'hosts': invalid hostname: \\\\99", err, nil, true) end) - end + it("dbless can reference secrets in declarative configuration", function() + local yaml_file = helpers.make_yaml_file [[ + _format_version: "3.0" + _transform: true + plugins: + - name: session + instance_name: session + config: + secret: "{vault://mocksocket/test}" + ]] + + finally(function() + os.remove(yaml_file) + end) + + helpers.setenv("KONG_LUA_PATH_OVERRIDE", "./spec/fixtures/custom_vaults/?.lua;./spec/fixtures/custom_vaults/?/init.lua;;") + helpers.get_db_utils(strategy, { + "vaults", + }, { + "session" + }, { + "mocksocket" + }) + + local ok, err = helpers.start_kong({ + database = "off", + declarative_config = yaml_file, + vaults = "mocksocket", + plugins = "session" + }) + + assert.truthy(ok) + assert.not_matches("error", err) + assert.logfile().has.no.line("[error]", true, 0) + end) + end end) describe("deprecated properties", function() diff --git a/spec/fixtures/custom_vaults/kong/vaults/mocksocket/init.lua b/spec/fixtures/custom_vaults/kong/vaults/mocksocket/init.lua new file mode 100644 index 000000000000..119fe23a7618 --- /dev/null +++ b/spec/fixtures/custom_vaults/kong/vaults/mocksocket/init.lua @@ -0,0 +1,37 @@ +local env = require "kong.vaults.env" +local http = require "resty.luasocket.http" + + +local assert = assert +local getenv = os.getenv + + +local function init() + env.init() + assert(getenv("KONG_PROCESS_SECRETS") == nil, "KONG_PROCESS_SECRETS environment variable found") + assert(env.get({}, "KONG_PROCESS_SECRETS") == nil, "KONG_PROCESS_SECRETS environment variable found") +end + + +local function get(conf, resource, version) + local client, err = http.new() + if not client then + return nil, err + end + + client:set_timeouts(20000, 20000, 20000) + assert(client:request_uri("http://mockbin.org/headers", { + headers = { + Accept = "application/json", + }, + })) + + return env.get(conf, resource, version) +end + + +return { + VERSION = "1.0.0", + init = init, + get = get, +} diff --git a/spec/fixtures/custom_vaults/kong/vaults/mocksocket/schema.lua b/spec/fixtures/custom_vaults/kong/vaults/mocksocket/schema.lua new file mode 100644 index 000000000000..90e86d33c371 --- /dev/null +++ b/spec/fixtures/custom_vaults/kong/vaults/mocksocket/schema.lua @@ -0,0 +1,13 @@ +return { + name = "mocksocket", + fields = { + { + config = { + type = "record", + fields = { + { prefix = { type = "string", match = [[^[%a_][%a%d_]*$]] } }, + }, + }, + }, + }, +} From e3f87d5b21463693f0d3d7e6d4a2314568f118f6 Mon Sep 17 00:00:00 2001 From: Datong Sun Date: Thu, 26 Oct 2023 14:23:01 +0800 Subject: [PATCH 029/249] docs(contributing): Travis CI is long gone (#11841) --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d12c4bf8fc0e..03eca126c562 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -295,7 +295,7 @@ accepted types are: is too big to be considered just `perf` - **chore**: Maintenance changes related to code cleaning that isn't considered part of a refactor, build process updates, dependency bumps, or - auxiliary tools and libraries updates (LuaRocks, Travis-ci, etc...). + auxiliary tools and libraries updates (LuaRocks, GitHub Actions, etc...). [Back to TOC](#table-of-contents) From 3fed60be7464b329da034b7ee9462779d5ce3b42 Mon Sep 17 00:00:00 2001 From: Nathan Date: Thu, 26 Oct 2023 14:25:37 +0800 Subject: [PATCH 030/249] fix(tcp-log):repeated sslhandshake in [tcp-log] plugin (#11803) * FIX:Repeated sslhandshake in [tcp-log] plugin * add changelog * update message as the comments * Update changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml Co-authored-by: tzssangglass --------- Co-authored-by: tzssangglass --- .../unreleased/kong/fix-tcp-log-sslhandshake.yml | 3 +++ kong/plugins/tcp-log/handler.lua | 11 +++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml diff --git a/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml b/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml new file mode 100644 index 000000000000..f712729860e3 --- /dev/null +++ b/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml @@ -0,0 +1,3 @@ +message: "**tcp-log**: fix an issue that repeated ssl handshake" +type: bugfix +scope: Plugin diff --git a/kong/plugins/tcp-log/handler.lua b/kong/plugins/tcp-log/handler.lua index 3bfc9c7c3bfd..06fddb1a0765 100644 --- a/kong/plugins/tcp-log/handler.lua +++ b/kong/plugins/tcp-log/handler.lua @@ -31,8 +31,15 @@ local function log(premature, conf, message) return end - if conf.tls then - ok, err = sock:sslhandshake(true, conf.tls_sni, false) + local times, err = sock:getreusedtimes() + if not times then + kong.log.err("failed to get socket reused time to ", host, ":", tostring(port), ": ", err) + sock:close() + return + end + + if conf.tls and times == 0 then + ok, err = sock:sslhandshake(false, conf.tls_sni, false) if not ok then kong.log.err("failed to perform TLS handshake to ", host, ":", port, ": ", err) sock:close() From 1b6c394ad8d69a5925a8b9bcc62a38364a371ce8 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Thu, 26 Oct 2023 10:46:03 +0200 Subject: [PATCH 031/249] fix(vault): resurrect positive results in lru cache for ttl + resurrect ttl (#11815) ### Summary The vault is rotating secrets on every minute which updates the shared dictionary cache with new values, both negative and positive results. This commit changes the Negative results handling on LRU. Previously the LRU was cleared for negative results, and we just used to cache for config.ttl amount of time. This commit changes it so that LRU values are deleted, and we cache things config.ttl + config.resurrect_ttl amount of time in lru cache too. It was reported by @Hayk-S on KAG-2833. Signed-off-by: Aapo Talvensaari --- changelog/unreleased/kong/vault-resurrect.yml | 3 + kong/pdk/vault.lua | 33 ++- spec/02-integration/13-vaults/05-ttl_spec.lua | 2 +- .../13-vaults/07-resurrect_spec.lua | 240 ++++++++++++++++++ .../custom_vaults/kong/vaults/test/schema.lua | 6 + 5 files changed, 271 insertions(+), 13 deletions(-) create mode 100644 changelog/unreleased/kong/vault-resurrect.yml create mode 100644 spec/02-integration/13-vaults/07-resurrect_spec.lua diff --git a/changelog/unreleased/kong/vault-resurrect.yml b/changelog/unreleased/kong/vault-resurrect.yml new file mode 100644 index 000000000000..7dc1b5d9ee1e --- /dev/null +++ b/changelog/unreleased/kong/vault-resurrect.yml @@ -0,0 +1,3 @@ +message: Vault resurrect time is respected in case a vault secret is deleted from a vault +type: bugfix +scope: Core diff --git a/kong/pdk/vault.lua b/kong/pdk/vault.lua index 8b7c48d74175..efc306d48915 100644 --- a/kong/pdk/vault.lua +++ b/kong/pdk/vault.lua @@ -29,6 +29,7 @@ local replace_dashes = string_tools.replace_dashes local ngx = ngx local get_phase = ngx.get_phase local max = math.max +local min = math.min local fmt = string.format local sub = string.sub local byte = string.byte @@ -754,15 +755,25 @@ local function new(self) local function get_cache_value_and_ttl(value, config, ttl) local cache_value, shdict_ttl, lru_ttl if value then - -- adjust ttl to the minimum and maximum values configured - lru_ttl = adjust_ttl(ttl, config) - shdict_ttl = max(lru_ttl + (config.resurrect_ttl or DAO_MAX_TTL), SECRETS_CACHE_MIN_TTL) cache_value = value + -- adjust ttl to the minimum and maximum values configured + ttl = adjust_ttl(ttl, config) + + if config.resurrect_ttl then + lru_ttl = min(ttl + config.resurrect_ttl, DAO_MAX_TTL) + shdict_ttl = max(lru_ttl, SECRETS_CACHE_MIN_TTL) + + else + lru_ttl = ttl + shdict_ttl = DAO_MAX_TTL + end + else + cache_value = NEGATIVELY_CACHED_VALUE + -- negatively cached values will be rotated on each rotation interval shdict_ttl = max(config.neg_ttl or 0, SECRETS_CACHE_MIN_TTL) - cache_value = NEGATIVELY_CACHED_VALUE end return cache_value, shdict_ttl, lru_ttl @@ -795,14 +806,13 @@ local function new(self) return nil, cache_err end - if not value then - LRU:delete(reference) + if cache_value == NEGATIVELY_CACHED_VALUE then return nil, fmt("could not get value from external vault (%s)", err) end - LRU:set(reference, value, lru_ttl) + LRU:set(reference, cache_value, lru_ttl) - return value + return cache_value end @@ -824,8 +834,7 @@ local function new(self) -- @usage -- local value, err = get(reference, cache_only) local function get(reference, cache_only) - -- the LRU stale value is ignored as the resurrection logic - -- is deferred to the shared dictionary + -- the LRU stale value is ignored local value = LRU:get(reference) if value then return value @@ -1360,8 +1369,8 @@ local function new(self) return nil, cache_err end - if value then - LRU:set(reference, value, lru_ttl) + if cache_value ~= NEGATIVELY_CACHED_VALUE then + LRU:set(reference, cache_value, lru_ttl) end return true diff --git a/spec/02-integration/13-vaults/05-ttl_spec.lua b/spec/02-integration/13-vaults/05-ttl_spec.lua index 21736bb94b18..e6f65fd56465 100644 --- a/spec/02-integration/13-vaults/05-ttl_spec.lua +++ b/spec/02-integration/13-vaults/05-ttl_spec.lua @@ -64,7 +64,7 @@ local VAULTS = { }, create_secret = function(self, _, value) - -- Currently, crate_secret is called _before_ starting Kong. + -- Currently, create_secret is called _before_ starting Kong. -- -- This means our backend won't be available yet because it is -- piggy-backing on Kong as an HTTP mock fixture. diff --git a/spec/02-integration/13-vaults/07-resurrect_spec.lua b/spec/02-integration/13-vaults/07-resurrect_spec.lua new file mode 100644 index 000000000000..d91bbcabd86b --- /dev/null +++ b/spec/02-integration/13-vaults/07-resurrect_spec.lua @@ -0,0 +1,240 @@ +local helpers = require "spec.helpers" + +-- using the full path so that we don't have to modify package.path in +-- this context +local test_vault = require "spec.fixtures.custom_vaults.kong.vaults.test" + +local CUSTOM_VAULTS = "./spec/fixtures/custom_vaults" +local CUSTOM_PLUGINS = "./spec/fixtures/custom_plugins" + +local LUA_PATH = CUSTOM_VAULTS .. "/?.lua;" .. + CUSTOM_VAULTS .. "/?/init.lua;" .. + CUSTOM_PLUGINS .. "/?.lua;" .. + CUSTOM_PLUGINS .. "/?/init.lua;;" + +local DUMMY_HEADER = "Dummy-Plugin" +local fmt = string.format + + + +--- A vault test harness is a driver for vault backends, which implements +--- all the necessary glue for initializing a vault backend and performing +--- secret read/write operations. +--- +--- All functions defined here are called as "methods" (e.g. harness:fn()), so +--- it is permitted to keep state on the harness object (self). +--- +---@class vault_test_harness +--- +---@field name string +--- +--- this table is passed directly to kong.db.vaults:insert() +---@field config table +--- +--- create_secret() is called once per test run for a given secret +---@field create_secret fun(self: vault_test_harness, secret: string, value: string, opts?: table) +--- +--- update_secret() may be called more than once per test run for a given secret +---@field update_secret fun(self: vault_test_harness, secret: string, value: string, opts?: table) +--- +--- setup() is called before kong is started and before any DB entities +--- have been created and is best used for things like validating backend +--- credentials and establishing a connection to a backend +---@field setup fun(self: vault_test_harness) +--- +--- teardown() is exactly what you'd expect +---@field teardown fun(self: vault_test_harness) +--- +--- fixtures() output is passed directly to `helpers.start_kong()` +---@field fixtures fun(self: vault_test_harness):table|nil +--- +--- +---@field prefix string # generated by the test suite +---@field host string # generated by the test suite + + +---@type vault_test_harness[] +local VAULTS = { + { + name = "test", + + config = { + default_value = "DEFAULT", + default_value_ttl = 1, + }, + + create_secret = function(self, _, value) + -- Currently, create_secret is called _before_ starting Kong. + -- + -- This means our backend won't be available yet because it is + -- piggy-backing on Kong as an HTTP mock fixture. + -- + -- We can, however, inject a default value into our configuration. + self.config.default_value = value + end, + + update_secret = function(_, secret, value, opts) + return test_vault.client.put(secret, value, opts) + end, + + delete_secret = function(_, secret) + return test_vault.client.delete(secret) + end, + + fixtures = function() + return { + http_mock = { + test_vault = test_vault.http_mock, + } + } + end, + }, +} + + +local noop = function(...) end + +for _, vault in ipairs(VAULTS) do + -- fill out some values that we'll use in route/service/plugin config + vault.prefix = vault.name .. "-ttl-test" + vault.host = vault.name .. ".vault-ttl.test" + + -- ...and fill out non-required methods + vault.setup = vault.setup or noop + vault.teardown = vault.teardown or noop + vault.fixtures = vault.fixtures or noop +end + + +for _, strategy in helpers.each_strategy() do +for _, vault in ipairs(VAULTS) do + + +describe("vault resurrect_ttl and rotation (#" .. strategy .. ") #" .. vault.name, function() + local client + local secret = "my-secret" + + + local function http_get(path) + path = path or "/" + + local res = client:get(path, { + headers = { + host = assert(vault.host), + }, + }) + + assert.response(res).has.status(200) + + return res + end + + + lazy_setup(function() + helpers.setenv("KONG_LUA_PATH_OVERRIDE", LUA_PATH) + helpers.setenv("KONG_VAULT_ROTATION_INTERVAL", "1") + + helpers.test_conf.loaded_plugins = { + dummy = true, + } + + vault:setup() + vault:create_secret(secret, "init") + + local bp = helpers.get_db_utils(strategy, + { "vaults", "routes", "services", "plugins" }, + { "dummy" }, + { vault.name }) + + + assert(bp.vaults:insert({ + name = vault.name, + prefix = vault.prefix, + config = vault.config, + })) + + local route = assert(bp.routes:insert({ + name = vault.host, + hosts = { vault.host }, + paths = { "/" }, + service = assert(bp.services:insert()), + })) + + + -- used by the plugin config test case + assert(bp.plugins:insert({ + name = "dummy", + config = { + resp_header_value = fmt("{vault://%s/%s?ttl=%d&resurrect_ttl=%d}", + vault.prefix, secret, 2, 2), + }, + route = { id = route.id }, + })) + + assert(helpers.start_kong({ + database = strategy, + nginx_conf = "spec/fixtures/custom_nginx.template", + vaults = vault.name, + plugins = "dummy", + log_level = "info", + }, nil, nil, vault:fixtures() )) + + client = helpers.proxy_client() + end) + + + lazy_teardown(function() + if client then + client:close() + end + + helpers.stop_kong(nil, true) + vault:teardown() + + helpers.unsetenv("KONG_LUA_PATH_OVERRIDE") + end) + + + it("resurrects plugin config references when secret is deleted (backend: #" .. vault.name .. ")", function() + local function check_plugin_secret(expect, ttl, leeway) + leeway = leeway or 0.25 -- 25% + + local timeout = ttl + (ttl * leeway) + + assert + .with_timeout(timeout) + .with_step(0.5) + .eventually(function() + local res = http_get("/") + local value + if expect == "" then + value = res.headers[DUMMY_HEADER] or "" + if value == "" then + return true + end + + else + value = assert.response(res).has.header(DUMMY_HEADER) + if value == expect then + return true + end + end + + return nil, { expected = expect, got = value } + end) + .is_truthy("expected plugin secret to be updated to '" .. tostring(expect) .. "' " + .. "within " .. tostring(timeout) .. " seconds") + end + + vault:update_secret(secret, "old", { ttl = 2, resurrect_ttl = 2 }) + check_plugin_secret("old", 5) + vault:delete_secret(secret) + ngx.sleep(2.5) + check_plugin_secret("old", 5) + check_plugin_secret("", 5) + end) +end) + + +end -- each vault backend +end -- each strategy diff --git a/spec/fixtures/custom_vaults/kong/vaults/test/schema.lua b/spec/fixtures/custom_vaults/kong/vaults/test/schema.lua index 4b4a335e9cb8..019179b2a5ab 100644 --- a/spec/fixtures/custom_vaults/kong/vaults/test/schema.lua +++ b/spec/fixtures/custom_vaults/kong/vaults/test/schema.lua @@ -1,3 +1,6 @@ +local typedefs = require "kong.db.schema.typedefs" + + return { name = "test", fields = { @@ -7,6 +10,9 @@ return { fields = { { default_value = { type = "string", required = false } }, { default_value_ttl = { type = "number", required = false } }, + { ttl = typedefs.ttl }, + { neg_ttl = typedefs.ttl }, + { resurrect_ttl = typedefs.ttl }, }, }, }, From ad1af8946957353822e72c1c48407ee48dbb6a64 Mon Sep 17 00:00:00 2001 From: Qi Date: Mon, 23 Oct 2023 14:43:34 +0800 Subject: [PATCH 032/249] fix(request-debugging): fix can't set root properties when enable the phase filter --- kong/timing/init.lua | 4 ---- 1 file changed, 4 deletions(-) diff --git a/kong/timing/init.lua b/kong/timing/init.lua index 12328e6978cc..8b15304c319b 100644 --- a/kong/timing/init.lua +++ b/kong/timing/init.lua @@ -142,10 +142,6 @@ end function _M.set_root_context_prop(k, v) - if not should_run() then - return - end - ngx.ctx.req_trace_ctx:set_root_context_prop(k, v) end From f59e36b554b6071a9213deea6947eb804dd7a6f6 Mon Sep 17 00:00:00 2001 From: Qi Date: Mon, 23 Oct 2023 16:07:10 +0800 Subject: [PATCH 033/249] chore: rename `ctx.is_timing_enabled` to `ctx.has_timing` to maintain synchronization with the EE code --- kong/init.lua | 92 +++++++++++++++++++-------------------- kong/resty/dns/client.lua | 2 +- kong/runloop/handler.lua | 8 ++-- 3 files changed, 51 insertions(+), 51 deletions(-) diff --git a/kong/init.lua b/kong/init.lua index 2f02d73f9e27..8fb8f605be13 100644 --- a/kong/init.lua +++ b/kong/init.lua @@ -319,9 +319,9 @@ local function execute_global_plugins_iterator(plugins_iterator, phase, ctx) end local old_ws = ctx.workspace - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:plugin_iterator") end @@ -333,13 +333,13 @@ local function execute_global_plugins_iterator(plugins_iterator, phase, ctx) setup_plugin_context(ctx, plugin, configuration) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:plugin", plugin.name, ctx.plugin_id) end plugin.handler[phase](plugin.handler, configuration) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:plugin") end @@ -350,7 +350,7 @@ local function execute_global_plugins_iterator(plugins_iterator, phase, ctx) end end - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:plugin_iterator") end end @@ -369,9 +369,9 @@ local function execute_collecting_plugins_iterator(plugins_iterator, phase, ctx) ctx.delay_response = true local old_ws = ctx.workspace - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:plugin_iterator") end @@ -384,14 +384,14 @@ local function execute_collecting_plugins_iterator(plugins_iterator, phase, ctx) setup_plugin_context(ctx, plugin, configuration) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:plugin", plugin.name, ctx.plugin_id) end local co = coroutine.create(plugin.handler[phase]) local cok, cerr = coroutine.resume(co, plugin.handler, configuration) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:plugin") end @@ -421,7 +421,7 @@ local function execute_collecting_plugins_iterator(plugins_iterator, phase, ctx) end end - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:plugin_iterator") end @@ -440,9 +440,9 @@ local function execute_collected_plugins_iterator(plugins_iterator, phase, ctx) end local old_ws = ctx.workspace - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:plugin_iterator") end @@ -454,13 +454,13 @@ local function execute_collected_plugins_iterator(plugins_iterator, phase, ctx) setup_plugin_context(ctx, plugin, configuration) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:plugin", plugin.name, ctx.plugin_id) end plugin.handler[phase](plugin.handler, configuration) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:plugin") end @@ -471,7 +471,7 @@ local function execute_collected_plugins_iterator(plugins_iterator, phase, ctx) end end - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:plugin_iterator") end end @@ -1084,16 +1084,16 @@ function Kong.rewrite() end ctx.KONG_PHASE = PHASES.rewrite - local is_timing_enabled + local has_timing req_dyn_hook_run_hooks(ctx, "timing:auth", "auth") if req_dyn_hook_is_group_enabled("timing") then - ctx.is_timing_enabled = true - is_timing_enabled = true + ctx.has_timing = true + has_timing = true end - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:rewrite") end @@ -1122,7 +1122,7 @@ function Kong.rewrite() ctx.KONG_REWRITE_ENDED_AT = get_updated_now_ms() ctx.KONG_REWRITE_TIME = ctx.KONG_REWRITE_ENDED_AT - ctx.KONG_REWRITE_START - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:rewrite") end end @@ -1130,9 +1130,9 @@ end function Kong.access() local ctx = ngx.ctx - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:access") end @@ -1158,7 +1158,7 @@ function Kong.access() ctx.KONG_ACCESS_TIME = ctx.KONG_ACCESS_ENDED_AT - ctx.KONG_ACCESS_START ctx.KONG_RESPONSE_LATENCY = ctx.KONG_ACCESS_ENDED_AT - ctx.KONG_PROCESSING_START - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:access") end @@ -1174,7 +1174,7 @@ function Kong.access() ctx.buffered_proxying = nil - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:access") end @@ -1195,7 +1195,7 @@ function Kong.access() local version = ngx.req.http_version() local upgrade = var.upstream_upgrade or "" if version < 2 and upgrade == "" then - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:access") end @@ -1211,7 +1211,7 @@ function Kong.access() ctx.buffered_proxying = nil end - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:access") end end @@ -1219,9 +1219,9 @@ end function Kong.balancer() local ctx = ngx.ctx - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:balancer") end @@ -1303,7 +1303,7 @@ function Kong.balancer() ctx.KONG_BALANCER_TIME = ctx.KONG_BALANCER_ENDED_AT - ctx.KONG_BALANCER_START ctx.KONG_PROXY_LATENCY = ctx.KONG_BALANCER_ENDED_AT - ctx.KONG_PROCESSING_START - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:balancer") end @@ -1315,7 +1315,7 @@ function Kong.balancer() if not ok then ngx_log(ngx_ERR, "failed to set balancer Host header: ", err) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:balancer") end @@ -1370,7 +1370,7 @@ function Kong.balancer() ctx.KONG_BALANCER_TIME = ctx.KONG_BALANCER_ENDED_AT - ctx.KONG_BALANCER_START ctx.KONG_PROXY_LATENCY = ctx.KONG_BALANCER_ENDED_AT - ctx.KONG_PROCESSING_START - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:balancer") end @@ -1410,7 +1410,7 @@ function Kong.balancer() -- start_time() is kept in seconds with millisecond resolution. ctx.KONG_PROXY_LATENCY = ctx.KONG_BALANCER_ENDED_AT - ctx.KONG_PROCESSING_START - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:balancer") end end @@ -1437,9 +1437,9 @@ do function Kong.response() local ctx = ngx.ctx - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:response") end @@ -1460,7 +1460,7 @@ do ctx.KONG_PHASE = PHASES.error ngx.status = res.status or 502 - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:response") end @@ -1514,7 +1514,7 @@ do -- buffered response ngx.print(body) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:response") end @@ -1526,9 +1526,9 @@ end function Kong.header_filter() local ctx = ngx.ctx - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:header_filter") end @@ -1600,7 +1600,7 @@ function Kong.header_filter() ctx.KONG_HEADER_FILTER_ENDED_AT = get_updated_now_ms() ctx.KONG_HEADER_FILTER_TIME = ctx.KONG_HEADER_FILTER_ENDED_AT - ctx.KONG_HEADER_FILTER_START - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:header_filter") end end @@ -1608,9 +1608,9 @@ end function Kong.body_filter() local ctx = ngx.ctx - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:body_filter") end @@ -1669,7 +1669,7 @@ function Kong.body_filter() execute_collected_plugins_iterator(plugins_iterator, "body_filter", ctx) if not arg[2] then - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:body_filter") end @@ -1691,7 +1691,7 @@ function Kong.body_filter() ctx.KONG_ACCESS_ENDED_AT) end - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:body_filter") end end @@ -1699,9 +1699,9 @@ end function Kong.log() local ctx = ngx.ctx - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:log") end @@ -1796,7 +1796,7 @@ function Kong.log() plugins_iterator.release(ctx) runloop.log.after(ctx) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:log") end diff --git a/kong/resty/dns/client.lua b/kong/resty/dns/client.lua index 37ee08ad214e..d3edd588cd8b 100644 --- a/kong/resty/dns/client.lua +++ b/kong/resty/dns/client.lua @@ -142,7 +142,7 @@ local cachelookup = function(qname, qtype) local cached = dnscache:get(key) local ctx = ngx.ctx - if ctx and ctx.is_timing_enabled then + if ctx and ctx.has_timing then req_dyn_hook_run_hooks(ctx, "timing", "dns:cache_lookup", cached ~= nil) end diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index ed6cfb9bed91..250d712f55b9 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -1151,9 +1151,9 @@ return { -- to plugins in the access phase for doing headers propagation instrumentation.precreate_balancer_span(ctx) - local is_timing_enabled = ctx.is_timing_enabled + local has_timing = ctx.has_timing - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "before:router") end @@ -1161,7 +1161,7 @@ return { local router = get_updated_router() local match_t = router:exec(ctx) - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "after:router") end @@ -1182,7 +1182,7 @@ return { ctx.workspace = match_t.route and match_t.route.ws_id - if is_timing_enabled then + if has_timing then req_dyn_hook_run_hooks(ctx, "timing", "workspace_id:got", ctx.workspace) end From bcbb4d3d5096cc925cfa5f6171d64c6e4d8f6b2e Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Fri, 27 Oct 2023 06:22:26 +0000 Subject: [PATCH 034/249] style(changelog): fix changelog entry grammar (#11865) --------- Co-authored-by: Datong Sun --- changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml b/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml index f712729860e3..12b05ca7eb5d 100644 --- a/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml +++ b/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml @@ -1,3 +1,3 @@ -message: "**tcp-log**: fix an issue that repeated ssl handshake" +message: "**tcp-log**: fix an issue of unnecessary handshakes when reusing TLS connection" type: bugfix scope: Plugin From 6bccc872cbb3a8bb52389a4e7b18a06b59e05ac0 Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Wed, 25 Oct 2023 13:07:10 +0200 Subject: [PATCH 035/249] chore: disable `dedicated_config_processing by default Signed-off-by: Joshua Schmid --- changelog/unreleased/kong/dedicated_config_processing.yml | 2 +- kong.conf.default | 4 ++-- kong/templates/kong_defaults.lua | 2 +- spec/01-unit/03-conf_loader_spec.lua | 5 +++-- spec/kong_tests.conf | 2 +- 5 files changed, 8 insertions(+), 7 deletions(-) diff --git a/changelog/unreleased/kong/dedicated_config_processing.yml b/changelog/unreleased/kong/dedicated_config_processing.yml index 6b78ded49b42..4f67bcab9865 100644 --- a/changelog/unreleased/kong/dedicated_config_processing.yml +++ b/changelog/unreleased/kong/dedicated_config_processing.yml @@ -1,4 +1,4 @@ message: | - rename `privileged_agent` to `dedicated_config_processing. Enable `dedicated_config_processing` by default + rename `privileged_agent` to `dedicated_config_processing. type: feature scope: Core diff --git a/kong.conf.default b/kong.conf.default index 401c0c52f8ad..10bdf50d1b59 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -182,7 +182,7 @@ # cache (i.e. when the configured # `mem_cache_size`) is full. -#dedicated_config_processing = on # Enables or disables a special worker +#dedicated_config_processing = off # Enables or disables a special worker # process for configuration processing. This process # increases memory usage a little bit while # allowing to reduce latencies by moving some @@ -2127,7 +2127,7 @@ # information such as domain name tried during these processes. # #request_debug = on # When enabled, Kong will provide detailed timing information - # for its components to the client and the error log + # for its components to the client and the error log # if the following headers are present in the proxy request: # - `X-Kong-Request-Debug`: # If the value is set to `*`, diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index c28245192924..4a450fd08825 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -161,7 +161,7 @@ dns_not_found_ttl = 30 dns_error_ttl = 1 dns_no_sync = off -dedicated_config_processing = on +dedicated_config_processing = off worker_consistency = eventual worker_state_update_frequency = 5 diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index ad41d52ea8bd..6b6cb6572926 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -65,7 +65,7 @@ describe("Configuration loader", function() assert.same({}, conf.status_ssl_cert) assert.same({}, conf.status_ssl_cert_key) assert.same(nil, conf.privileged_agent) - assert.same(true, conf.dedicated_config_processing) + assert.same(false, conf.dedicated_config_processing) assert.same(false, conf.allow_debug_header) assert.is_nil(getmetatable(conf)) end) @@ -2020,7 +2020,7 @@ describe("Configuration loader", function() privileged_agent = "on", })) assert.same(nil, conf.privileged_agent) - assert.same(true, conf.dedicated_config_processing) + assert.same(false, conf.dedicated_config_processing) assert.equal(nil, err) -- no clobber @@ -2419,6 +2419,7 @@ describe("Configuration loader", function() assert.matches(label.err, err) end end) + end) end) diff --git a/spec/kong_tests.conf b/spec/kong_tests.conf index f7c101f231ea..49714f7cb535 100644 --- a/spec/kong_tests.conf +++ b/spec/kong_tests.conf @@ -25,7 +25,7 @@ anonymous_reports = off worker_consistency = strict -dedicated_config_processing = on +dedicated_config_processing = off dns_hostsfile = spec/fixtures/hosts From 7e8dd280478c6bcb2af3b7136cc93ca3623cbad8 Mon Sep 17 00:00:00 2001 From: chronolaw Date: Wed, 25 Oct 2023 14:01:14 +0800 Subject: [PATCH 036/249] refactor(toosl): merge the implementation of is_lapis_array --- kong/tools/table.lua | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/kong/tools/table.lua b/kong/tools/table.lua index 8999954908db..9dc0ee26635b 100644 --- a/kong/tools/table.lua +++ b/kong/tools/table.lua @@ -118,19 +118,7 @@ end -- *** NOTE *** string-keys containing integers are considered valid array entries! -- @param t The table to check -- @return Returns `true` if the table is an array, `false` otherwise -function _M.is_lapis_array(t) - if type(t) ~= "table" then - return false - end - local i = 0 - for _ in pairs(t) do - i = i + 1 - if t[i] == nil and t[tostring(i)] == nil then - return false - end - end - return true -end +_M.is_lapis_array = is_array_lapis --- Deep copies a table into a new table. From 310a50b91ab2e6919594792c7a889bf1cefed5df Mon Sep 17 00:00:00 2001 From: chronolaw Date: Wed, 25 Oct 2023 14:04:08 +0800 Subject: [PATCH 037/249] clean --- kong/tools/table.lua | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/kong/tools/table.lua b/kong/tools/table.lua index 9dc0ee26635b..f5fea379c70f 100644 --- a/kong/tools/table.lua +++ b/kong/tools/table.lua @@ -111,14 +111,14 @@ do return is_array_strict(t) end -end ---- Checks if a table is an array and not an associative array. --- *** NOTE *** string-keys containing integers are considered valid array entries! --- @param t The table to check --- @return Returns `true` if the table is an array, `false` otherwise -_M.is_lapis_array = is_array_lapis + --- Checks if a table is an array and not an associative array. + -- *** NOTE *** string-keys containing integers are considered valid array entries! + -- @param t The table to check + -- @return Returns `true` if the table is an array, `false` otherwise + _M.is_lapis_array = is_array_lapis +end --- Deep copies a table into a new table. From ed798ec4bba611603d465395d21c5065a33d8287 Mon Sep 17 00:00:00 2001 From: Chrono Date: Fri, 27 Oct 2023 16:06:21 +0800 Subject: [PATCH 038/249] refactor(tools): separate yield functions from utils (#11747) Clean the huge utils.lua --- kong-3.6.0-0.rockspec | 1 + kong/tools/utils.lua | 64 +++++++------------------------------------ kong/tools/yield.lua | 59 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 70 insertions(+), 54 deletions(-) create mode 100644 kong/tools/yield.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index fd8356805d4b..908f46fd23db 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -166,6 +166,7 @@ build = { ["kong.tools.request_aware_table"] = "kong/tools/request_aware_table.lua", ["kong.tools.string"] = "kong/tools/string.lua", ["kong.tools.table"] = "kong/tools/table.lua", + ["kong.tools.yield"] = "kong/tools/yield.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 6fbc5b7b739d..74733049ce13 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -1346,57 +1346,6 @@ function _M.sort_by_handler_priority(a, b) return prio_a > prio_b end ---- --- Check if the phase is yieldable. --- @tparam string phase the phase to check, if not specified then --- the default value will be the current phase --- @treturn boolean true if the phase is yieldable, false otherwise -local in_yieldable_phase do - local get_phase = ngx.get_phase - - -- https://github.com/openresty/lua-nginx-module/blob/c89469e920713d17d703a5f3736c9335edac22bf/src/ngx_http_lua_util.h#L35C10-L35C10 - local LUA_CONTEXT_YIELDABLE_PHASE = { - rewrite = true, - server_rewrite = true, - access = true, - content = true, - timer = true, - ssl_client_hello = true, - ssl_certificate = true, - ssl_session_fetch = true, - preread = true, - } - - in_yieldable_phase = function(phase) - if LUA_CONTEXT_YIELDABLE_PHASE[phase or get_phase()] == nil then - return false - end - return true - end -end - -_M.in_yieldable_phase = in_yieldable_phase - -do - local ngx_sleep = _G.native_ngx_sleep or ngx.sleep - - local YIELD_ITERATIONS = 1000 - local counter = YIELD_ITERATIONS - - function _M.yield(in_loop, phase) - if ngx.IS_CLI or not in_yieldable_phase(phase) then - return - end - if in_loop then - counter = counter - 1 - if counter > 0 then - return - end - counter = YIELD_ITERATIONS - end - ngx_sleep(0) - end -end local time_ns do @@ -1545,9 +1494,16 @@ _M.get_updated_monotonic_ms = get_updated_monotonic_ms do - local tbl = require "kong.tools.table" - for name, func in pairs(tbl) do - _M[name] = func + local modules = { + "kong.tools.table", + "kong.tools.yield", + } + + for _, str in ipairs(modules) do + local mod = require(str) + for name, func in pairs(mod) do + _M[name] = func + end end end diff --git a/kong/tools/yield.lua b/kong/tools/yield.lua new file mode 100644 index 000000000000..d21187dc9fe3 --- /dev/null +++ b/kong/tools/yield.lua @@ -0,0 +1,59 @@ +local _M = {} + + +--- +-- Check if the phase is yieldable. +-- @tparam string phase the phase to check, if not specified then +-- the default value will be the current phase +-- @treturn boolean true if the phase is yieldable, false otherwise +local in_yieldable_phase +do + local get_phase = ngx.get_phase + + -- https://github.com/openresty/lua-nginx-module/blob/c89469e920713d17d703a5f3736c9335edac22bf/src/ngx_http_lua_util.h#L35C10-L35C10 + local LUA_CONTEXT_YIELDABLE_PHASE = { + rewrite = true, + server_rewrite = true, + access = true, + content = true, + timer = true, + ssl_client_hello = true, + ssl_certificate = true, + ssl_session_fetch = true, + preread = true, + } + + in_yieldable_phase = function(phase) + return LUA_CONTEXT_YIELDABLE_PHASE[phase or get_phase()] + end +end +_M.in_yieldable_phase = in_yieldable_phase + + +local yield +do + local ngx_sleep = _G.native_ngx_sleep or ngx.sleep + + local YIELD_ITERATIONS = 1000 + local counter = YIELD_ITERATIONS + + yield = function(in_loop, phase) + if ngx.IS_CLI or not in_yieldable_phase(phase) then + return + end + + if in_loop then + counter = counter - 1 + if counter > 0 then + return + end + counter = YIELD_ITERATIONS + end + + ngx_sleep(0) -- yield + end +end +_M.yield = yield + + +return _M From 3be2513a60b9f5f0a89631ff17c202e6113981c0 Mon Sep 17 00:00:00 2001 From: Xiaoch Date: Fri, 27 Oct 2023 19:03:56 +0800 Subject: [PATCH 039/249] fix(conf): set default value of `dns_no_sync` to `on` (#11869) This is a temporary workaround for the DNS client blocking issue until a more permanent solution can be developed. Fix FTI-5348 --------- Co-authored-by: Datong Sun --- changelog/unreleased/kong/fix_dns_enable_dns_no_sync.yml | 3 +++ kong.conf.default | 2 +- kong/templates/kong_defaults.lua | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/fix_dns_enable_dns_no_sync.yml diff --git a/changelog/unreleased/kong/fix_dns_enable_dns_no_sync.yml b/changelog/unreleased/kong/fix_dns_enable_dns_no_sync.yml new file mode 100644 index 000000000000..3e7b20b95266 --- /dev/null +++ b/changelog/unreleased/kong/fix_dns_enable_dns_no_sync.yml @@ -0,0 +1,3 @@ +message: The default value of `dns_no_sync` option has been changed to `on` +type: bugfix +scope: Configuration diff --git a/kong.conf.default b/kong.conf.default index 10bdf50d1b59..33f5c5274646 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -1543,7 +1543,7 @@ #dns_error_ttl = 1 # TTL in seconds for error responses. -#dns_no_sync = off # If enabled, then upon a cache-miss every +#dns_no_sync = on # If enabled, then upon a cache-miss every # request will trigger its own dns query. # When disabled multiple requests for the # same name/type will be synchronised to a diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index 4a450fd08825..e6915a699f06 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -159,7 +159,7 @@ dns_stale_ttl = 4 dns_cache_size = 10000 dns_not_found_ttl = 30 dns_error_ttl = 1 -dns_no_sync = off +dns_no_sync = on dedicated_config_processing = off worker_consistency = eventual From 8ee192b6dba85f7c64b28f5df90a75ccd4f916a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20Nowak?= Date: Sat, 28 Oct 2023 11:21:14 +0200 Subject: [PATCH 040/249] chore(deps): bump lua-resty-healthcheck to 3.0.0 (#11834) * chore(deps): bump lua-resty-healthcheck to 3.0.0 This bumps lua-resty-healthchceck to 3.0.0 KAG-2704 * Update changelog/unreleased/kong/deps_bump_lua_resty_healthcheck.yml Co-authored-by: Chrono --------- Co-authored-by: Chrono --- changelog/unreleased/kong/deps_bump_lua_resty_healthcheck.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/deps_bump_lua_resty_healthcheck.yml diff --git a/changelog/unreleased/kong/deps_bump_lua_resty_healthcheck.yml b/changelog/unreleased/kong/deps_bump_lua_resty_healthcheck.yml new file mode 100644 index 000000000000..03e368a65de8 --- /dev/null +++ b/changelog/unreleased/kong/deps_bump_lua_resty_healthcheck.yml @@ -0,0 +1,3 @@ +message: Bumped lua-resty-healthcheck from 1.6.3 to 3.0.0 +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 908f46fd23db..e0fcd08a13cd 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -31,7 +31,7 @@ dependencies = { "binaryheap >= 0.4", "luaxxhash >= 1.0", "lua-protobuf == 0.5.0", - "lua-resty-healthcheck == 1.6.3", + "lua-resty-healthcheck == 3.0.0", "lua-messagepack == 0.5.2", "lua-resty-aws == 1.3.5", "lua-resty-openssl == 0.8.25", From eedec8de166528499c2328cb125aa2c23ec5e324 Mon Sep 17 00:00:00 2001 From: Andy Zhang Date: Mon, 30 Oct 2023 12:45:39 +0800 Subject: [PATCH 041/249] tests(azure-functions): temporarily disable tests that use mockbin (#11878) KAG-2912 --- spec/03-plugins/35-azure-functions/01-access_spec.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/03-plugins/35-azure-functions/01-access_spec.lua b/spec/03-plugins/35-azure-functions/01-access_spec.lua index 28c098e6c979..dfcc0ffc787b 100644 --- a/spec/03-plugins/35-azure-functions/01-access_spec.lua +++ b/spec/03-plugins/35-azure-functions/01-access_spec.lua @@ -6,7 +6,7 @@ local server_tokens = meta._SERVER_TOKENS for _, strategy in helpers.each_strategy() do - describe("Plugin: Azure Functions (access) [#" .. strategy .. "]", function() + describe("#flaky Plugin: Azure Functions (access) [#" .. strategy .. "]", function() local proxy_client setup(function() From 47ff7da82a7396b86f8eb31de73c1ae78310235f Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 31 Oct 2023 10:20:54 +0800 Subject: [PATCH 042/249] refactor(tools): separate sha256 functions from tools.utils (#11874) * refactor(tools): separate sha256 functions from tools.utils * style lint --- kong-3.6.0-0.rockspec | 1 + kong/tools/sha256.lua | 67 +++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 64 +---------------------------------------- 3 files changed, 69 insertions(+), 63 deletions(-) create mode 100644 kong/tools/sha256.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index e0fcd08a13cd..3b6b1a183fd0 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -166,6 +166,7 @@ build = { ["kong.tools.request_aware_table"] = "kong/tools/request_aware_table.lua", ["kong.tools.string"] = "kong/tools/string.lua", ["kong.tools.table"] = "kong/tools/table.lua", + ["kong.tools.sha256"] = "kong/tools/sha256.lua", ["kong.tools.yield"] = "kong/tools/yield.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", diff --git a/kong/tools/sha256.lua b/kong/tools/sha256.lua new file mode 100644 index 000000000000..bc2f93b06eb9 --- /dev/null +++ b/kong/tools/sha256.lua @@ -0,0 +1,67 @@ +local _M = {} + + +local sha256_bin +do + local digest = require "resty.openssl.digest" + local sha256_digest + + function sha256_bin(key) + local _, bin, err + if not sha256_digest then + sha256_digest, err = digest.new("sha256") + if err then + return nil, err + end + end + + bin, err = sha256_digest:final(key) + if err then + sha256_digest = nil + return nil, err + end + + _, err = sha256_digest:reset() + if err then + sha256_digest = nil + end + + return bin + end +end +_M.sha256_bin = sha256_bin + + +local sha256_hex, sha256_base64, sha256_base64url +do + local to_hex = require "resty.string".to_hex + local to_base64 = ngx.encode_base64 + local to_base64url = require "ngx.base64".encode_base64url + + local function sha256_encode(encode_alg, key) + local bin, err = sha256_bin(key) + if err then + return nil, err + end + + return encode_alg(bin) + end + + function sha256_hex(key) + return sha256_encode(to_hex, key) + end + + function sha256_base64(key) + return sha256_encode(to_base64, key) + end + + function sha256_base64url(key) + return sha256_encode(to_base64url, key) + end +end +_M.sha256_hex = sha256_hex +_M.sha256_base64 = sha256_base64 +_M.sha256_base64url = sha256_base64url + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 74733049ce13..672a08a2ce63 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -1396,69 +1396,6 @@ end _M.try_decode_base64 = try_decode_base64 -local sha256_bin -do - local digest = require "resty.openssl.digest" - local sha256_digest - - function sha256_bin(key) - local _, bin, err - if not sha256_digest then - sha256_digest, err = digest.new("sha256") - if err then - return nil, err - end - end - - bin, err = sha256_digest:final(key) - if err then - sha256_digest = nil - return nil, err - end - - _, err = sha256_digest:reset() - if err then - sha256_digest = nil - end - - return bin - end -end -_M.sha256_bin = sha256_bin - - -local sha256_hex, sha256_base64, sha256_base64url -do - local to_hex = require "resty.string".to_hex - local to_base64 = ngx.encode_base64 - local to_base64url = require "ngx.base64".encode_base64url - - local function sha256_encode(encode_alg, key) - local bin, err = sha256_bin(key) - if err then - return nil, err - end - - return encode_alg(bin) - end - - function sha256_hex(key) - return sha256_encode(to_hex, key) - end - - function sha256_base64(key) - return sha256_encode(to_base64, key) - end - - function sha256_base64url(key) - return sha256_encode(to_base64url, key) - end -end -_M.sha256_hex = sha256_hex -_M.sha256_base64 = sha256_base64 -_M.sha256_base64url = sha256_base64url - - local get_now_ms local get_updated_now_ms local get_start_time_ms @@ -1496,6 +1433,7 @@ _M.get_updated_monotonic_ms = get_updated_monotonic_ms do local modules = { "kong.tools.table", + "kong.tools.sha256", "kong.tools.yield", } From a8de91a79e61b32bc78324a391bcdea24222783b Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 31 Oct 2023 10:21:31 +0800 Subject: [PATCH 043/249] refactor(tools): separate gzip functions from tools.utils (#11875) --- kong-3.6.0-0.rockspec | 1 + kong/tools/gzip.lua | 62 +++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 52 +----------------------------------- 3 files changed, 64 insertions(+), 51 deletions(-) create mode 100644 kong/tools/gzip.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 3b6b1a183fd0..35cb06cc8627 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -164,6 +164,7 @@ build = { ["kong.tools.protobuf"] = "kong/tools/protobuf.lua", ["kong.tools.mime_type"] = "kong/tools/mime_type.lua", ["kong.tools.request_aware_table"] = "kong/tools/request_aware_table.lua", + ["kong.tools.gzip"] = "kong/tools/gzip.lua", ["kong.tools.string"] = "kong/tools/string.lua", ["kong.tools.table"] = "kong/tools/table.lua", ["kong.tools.sha256"] = "kong/tools/sha256.lua", diff --git a/kong/tools/gzip.lua b/kong/tools/gzip.lua new file mode 100644 index 000000000000..16c8906683c0 --- /dev/null +++ b/kong/tools/gzip.lua @@ -0,0 +1,62 @@ +local buffer = require "string.buffer" +local zlib = require "ffi-zlib" + + +local inflate_gzip = zlib.inflateGzip +local deflate_gzip = zlib.deflateGzip + + +local _M = {} + + +-- lua-ffi-zlib allocated buffer of length +1, +-- so use 64KB - 1 instead +local GZIP_CHUNK_SIZE = 65535 + + +local function read_input_buffer(input_buffer) + return function(size) + local data = input_buffer:get(size) + return data ~= "" and data or nil + end +end + + +local function write_output_buffer(output_buffer) + return function(data) + return output_buffer:put(data) + end +end + + +local function gzip_helper(inflate_or_deflate, input) + local input_buffer = buffer.new(0):set(input) + local output_buffer = buffer.new() + local ok, err = inflate_or_deflate(read_input_buffer(input_buffer), + write_output_buffer(output_buffer), + GZIP_CHUNK_SIZE) + if not ok then + return nil, err + end + + return output_buffer:get() +end + + +--- Gzip compress the content of a string +-- @tparam string str the uncompressed string +-- @return gz (string) of the compressed content, or nil, err to if an error occurs +function _M.deflate_gzip(str) + return gzip_helper(deflate_gzip, str) +end + + +--- Gzip decompress the content of a string +-- @tparam string gz the Gzip compressed string +-- @return str (string) of the decompressed content, or nil, err to if an error occurs +function _M.inflate_gzip(gz) + return gzip_helper(inflate_gzip, gz) +end + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 672a08a2ce63..d85a418ed440 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -10,12 +10,10 @@ local ffi = require "ffi" local uuid = require "resty.jit-uuid" -local buffer = require "string.buffer" local pl_stringx = require "pl.stringx" local pl_utils = require "pl.utils" local pl_path = require "pl.path" local pl_file = require "pl.file" -local zlib = require "ffi-zlib" local C = ffi.C local ffi_new = ffi.new @@ -35,8 +33,6 @@ local join = pl_stringx.join local split = pl_stringx.split local re_find = ngx.re.find local re_match = ngx.re.match -local inflate_gzip = zlib.inflateGzip -local deflate_gzip = zlib.deflateGzip local setmetatable = setmetatable ffi.cdef[[ @@ -1038,53 +1034,6 @@ do end -do - -- lua-ffi-zlib allocated buffer of length +1, - -- so use 64KB - 1 instead - local GZIP_CHUNK_SIZE = 65535 - - local function read_input_buffer(input_buffer) - return function(size) - local data = input_buffer:get(size) - return data ~= "" and data or nil - end - end - - local function write_output_buffer(output_buffer) - return function(data) - return output_buffer:put(data) - end - end - - local function gzip_helper(inflate_or_deflate, input) - local input_buffer = buffer.new(0):set(input) - local output_buffer = buffer.new() - local ok, err = inflate_or_deflate(read_input_buffer(input_buffer), - write_output_buffer(output_buffer), - GZIP_CHUNK_SIZE) - if not ok then - return nil, err - end - - return output_buffer:get() - end - - --- Gzip compress the content of a string - -- @tparam string str the uncompressed string - -- @return gz (string) of the compressed content, or nil, err to if an error occurs - function _M.deflate_gzip(str) - return gzip_helper(deflate_gzip, str) - end - - --- Gzip decompress the content of a string - -- @tparam string gz the Gzip compressed string - -- @return str (string) of the decompressed content, or nil, err to if an error occurs - function _M.inflate_gzip(gz) - return gzip_helper(inflate_gzip, gz) - end -end - - local get_mime_type local get_response_type local get_error_template @@ -1432,6 +1381,7 @@ _M.get_updated_monotonic_ms = get_updated_monotonic_ms do local modules = { + "kong.tools.gzip", "kong.tools.table", "kong.tools.sha256", "kong.tools.yield", From 224dc334af4274448d24cbf2776287d8cb9fc134 Mon Sep 17 00:00:00 2001 From: Andy Zhang Date: Tue, 31 Oct 2023 10:47:43 +0800 Subject: [PATCH 044/249] chore(conf): enable `dedicated_config_processing` by default (#11889) * chore: enable `dedicated_config_processing by default This reverts commit 6bccc872cbb3a8bb52389a4e7b18a06b59e05ac0. * docs(dcp): remove a trailing space --- changelog/unreleased/kong/dedicated_config_processing.yml | 2 +- kong.conf.default | 2 +- kong/templates/kong_defaults.lua | 2 +- spec/01-unit/03-conf_loader_spec.lua | 5 ++--- spec/kong_tests.conf | 2 +- 5 files changed, 6 insertions(+), 7 deletions(-) diff --git a/changelog/unreleased/kong/dedicated_config_processing.yml b/changelog/unreleased/kong/dedicated_config_processing.yml index 4f67bcab9865..6b78ded49b42 100644 --- a/changelog/unreleased/kong/dedicated_config_processing.yml +++ b/changelog/unreleased/kong/dedicated_config_processing.yml @@ -1,4 +1,4 @@ message: | - rename `privileged_agent` to `dedicated_config_processing. + rename `privileged_agent` to `dedicated_config_processing. Enable `dedicated_config_processing` by default type: feature scope: Core diff --git a/kong.conf.default b/kong.conf.default index 33f5c5274646..9bbd8fcb7f94 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -182,7 +182,7 @@ # cache (i.e. when the configured # `mem_cache_size`) is full. -#dedicated_config_processing = off # Enables or disables a special worker +#dedicated_config_processing = on # Enables or disables a special worker # process for configuration processing. This process # increases memory usage a little bit while # allowing to reduce latencies by moving some diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index e6915a699f06..d1f685ae7df7 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -161,7 +161,7 @@ dns_not_found_ttl = 30 dns_error_ttl = 1 dns_no_sync = on -dedicated_config_processing = off +dedicated_config_processing = on worker_consistency = eventual worker_state_update_frequency = 5 diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index 6b6cb6572926..ad41d52ea8bd 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -65,7 +65,7 @@ describe("Configuration loader", function() assert.same({}, conf.status_ssl_cert) assert.same({}, conf.status_ssl_cert_key) assert.same(nil, conf.privileged_agent) - assert.same(false, conf.dedicated_config_processing) + assert.same(true, conf.dedicated_config_processing) assert.same(false, conf.allow_debug_header) assert.is_nil(getmetatable(conf)) end) @@ -2020,7 +2020,7 @@ describe("Configuration loader", function() privileged_agent = "on", })) assert.same(nil, conf.privileged_agent) - assert.same(false, conf.dedicated_config_processing) + assert.same(true, conf.dedicated_config_processing) assert.equal(nil, err) -- no clobber @@ -2419,7 +2419,6 @@ describe("Configuration loader", function() assert.matches(label.err, err) end end) - end) end) diff --git a/spec/kong_tests.conf b/spec/kong_tests.conf index 49714f7cb535..f7c101f231ea 100644 --- a/spec/kong_tests.conf +++ b/spec/kong_tests.conf @@ -25,7 +25,7 @@ anonymous_reports = off worker_consistency = strict -dedicated_config_processing = off +dedicated_config_processing = on dns_hostsfile = spec/fixtures/hosts From 07e82fe54844d80d7e18cf1249501eca35f9447f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 08:59:12 +0000 Subject: [PATCH 045/249] chore(deps): bump tj-actions/changed-files from 39.2.3 to 40.0.0 Bumps [tj-actions/changed-files](https://github.com/tj-actions/changed-files) from 39.2.3 to 40.0.0. - [Release notes](https://github.com/tj-actions/changed-files/releases) - [Changelog](https://github.com/tj-actions/changed-files/blob/main/HISTORY.md) - [Commits](https://github.com/tj-actions/changed-files/compare/95690f9ece77c1740f4a55b7f1de9023ed6b1f87...af292f1e845a0377b596972698a8598734eb2796) --- updated-dependencies: - dependency-name: tj-actions/changed-files dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/changelog-requirement.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/changelog-requirement.yml b/.github/workflows/changelog-requirement.yml index eba804875b24..38bf78cd69c5 100644 --- a/.github/workflows/changelog-requirement.yml +++ b/.github/workflows/changelog-requirement.yml @@ -21,7 +21,7 @@ jobs: - name: Find changelog files id: changelog-list - uses: tj-actions/changed-files@95690f9ece77c1740f4a55b7f1de9023ed6b1f87 # v37 + uses: tj-actions/changed-files@af292f1e845a0377b596972698a8598734eb2796 # v37 with: files_yaml: | changelogs: From f4e54a07d9459eae88ba691f1ec52b0443136c3b Mon Sep 17 00:00:00 2001 From: "Qirui(Keery) Nie" Date: Tue, 31 Oct 2023 14:30:42 +0800 Subject: [PATCH 046/249] tests(azure-functions): remove usage of mockbin in `azure-functions` tests (#11879) FTI-5523 KAG-2912 --- .../35-azure-functions/01-access_spec.lua | 94 ++++++++++++++++--- 1 file changed, 81 insertions(+), 13 deletions(-) diff --git a/spec/03-plugins/35-azure-functions/01-access_spec.lua b/spec/03-plugins/35-azure-functions/01-access_spec.lua index dfcc0ffc787b..9907c7e0d0b3 100644 --- a/spec/03-plugins/35-azure-functions/01-access_spec.lua +++ b/spec/03-plugins/35-azure-functions/01-access_spec.lua @@ -1,13 +1,50 @@ local helpers = require "spec.helpers" local meta = require "kong.meta" +local http_mock = require "spec.helpers.http_mock" local server_tokens = meta._SERVER_TOKENS for _, strategy in helpers.each_strategy() do - describe("#flaky Plugin: Azure Functions (access) [#" .. strategy .. "]", function() + describe("Plugin: Azure Functions (access) [#" .. strategy .. "]", function() + local mock local proxy_client + local mock_http_server_port = helpers.get_available_port() + + mock = http_mock.new("127.0.0.1:" .. mock_http_server_port, { + ["/"] = { + access = [[ + local json = require "cjson" + local method = ngx.req.get_method() + local uri = ngx.var.request_uri + local headers = ngx.req.get_headers(nil, true) + local query_args = ngx.req.get_uri_args() + ngx.req.read_body() + local body + -- collect body + body = ngx.req.get_body_data() + if not body then + local file = ngx.req.get_body_file() + if file then + local f = io.open(file, "r") + if f then + body = f:read("*a") + f:close() + end + end + end + ngx.say(json.encode({ + query_args = query_args, + uri = uri, + method = method, + headers = headers, + body = body, + status = 200, + })) + ]] + }, + }) setup(function() local _, db = helpers.get_db_utils(strategy, { @@ -21,16 +58,35 @@ for _, strategy in helpers.each_strategy() do protocols = { "http", "https" }, } - -- this plugin definition results in an upstream url to - -- http://mockbin.org/request - -- which will echo the request for inspection + -- Mocking lua-resty-http's request_uri function + db.plugins:insert { + name = "pre-function", + route = { id = route2.id }, + config = { + access = { + [[ + local http = require "resty.http" + local json = require "cjson" + local _request_uri = http.request_uri + http.request_uri = function (self, uri, params) + local scheme, host, port, _, _ = unpack(http:parse_uri(uri)) + local mock_server_port = ]] .. mock_http_server_port .. [[ + -- Replace the port with the mock server port + local new_uri = string.format("%s://%s:%d", scheme, host, mock_server_port) + return _request_uri(self, new_uri, params) + end + ]] + } + } + } + db.plugins:insert { name = "azure-functions", route = { id = route2.id }, config = { - https = true, - appname = "mockbin", - hostdomain = "org", + https = false, + appname = "azure", + hostdomain = "example.com", routeprefix = "request", functionname = "test-func-name", apikey = "anything_but_an_API_key", @@ -38,11 +94,22 @@ for _, strategy in helpers.each_strategy() do }, } - assert(helpers.start_kong{ - database = strategy, - plugins = "azure-functions", + local fixtures = { + dns_mock = helpers.dns_mock.new() + } + + fixtures.dns_mock:A({ + name = "azure.example.com", + address = "127.0.0.1", }) + assert(helpers.start_kong({ + database = strategy, + untrusted_lua = "on", + plugins = "azure-functions,pre-function", + }, nil, nil, fixtures)) + + assert(mock:start()) end) -- setup before_each(function() @@ -55,6 +122,7 @@ for _, strategy in helpers.each_strategy() do teardown(function() helpers.stop_kong() + assert(mock:stop()) end) @@ -70,7 +138,7 @@ for _, strategy in helpers.each_strategy() do assert.response(res).has.status(200) local json = assert.response(res).has.jsonbody() - assert.same({ hello ="world" }, json.queryString) + assert.same({ hello ="world" }, json.query_args) end) it("passes request body", function() @@ -87,7 +155,7 @@ for _, strategy in helpers.each_strategy() do assert.response(res).has.status(200) local json = assert.response(res).has.jsonbody() - assert.same(body, json.postData.text) + assert.same(body, json.body) end) it("passes the path parameters", function() @@ -101,7 +169,7 @@ for _, strategy in helpers.each_strategy() do assert.response(res).has.status(200) local json = assert.response(res).has.jsonbody() - assert.matches("mockbin.org/request/test%-func%-name/and/then/some", json.url) + assert.matches("/request/test%-func%-name/and/then/some", json.uri) end) it("passes the method", function() From dda623d8ebbcbb550b331e7a958b7c307418c3b4 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 31 Oct 2023 11:02:42 +0200 Subject: [PATCH 047/249] chore(patches): make arm64 reg allow patches apply cleanly (#11886) ### Summary Before: ``` patching file bundle/LuaJIT-2.1-20230410/src/lj_asm_arm64.h Hunk #1 succeeded at 1133 (offset 26 lines). Hunk #2 succeeded at 1142 (offset 26 lines). ``` After: ``` patching file bundle/LuaJIT-2.1-20230410/src/lj_asm_arm64.h ``` Signed-off-by: Aapo Talvensaari --- .../patches/LuaJIT-2.1-20230410_06_arm64_reg_alloc_fix.patch | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build/openresty/patches/LuaJIT-2.1-20230410_06_arm64_reg_alloc_fix.patch b/build/openresty/patches/LuaJIT-2.1-20230410_06_arm64_reg_alloc_fix.patch index fb190bfeb346..7a0d5fb56479 100644 --- a/build/openresty/patches/LuaJIT-2.1-20230410_06_arm64_reg_alloc_fix.patch +++ b/build/openresty/patches/LuaJIT-2.1-20230410_06_arm64_reg_alloc_fix.patch @@ -12,7 +12,7 @@ diff --git a/bundle/LuaJIT-2.1-20230410/src/lj_asm_arm64.h b/bundle/LuaJIT-2.1-2 index 3889883d..c216fced 100644 --- a/bundle/LuaJIT-2.1-20230410/src/lj_asm_arm64.h +++ b/bundle/LuaJIT-2.1-20230410/src/lj_asm_arm64.h -@@ -1107,6 +1107,8 @@ static void asm_ahuvload(ASMState *as, IRIns *ir) +@@ -1133,6 +1133,8 @@ static void asm_ahuvload(ASMState *as, IRIns *ir) } type = ra_scratch(as, rset_clear(gpr, tmp)); idx = asm_fuseahuref(as, ir->op1, &ofs, rset_clear(gpr, type), A64I_LDRx); @@ -21,7 +21,7 @@ index 3889883d..c216fced 100644 if (ir->o == IR_VLOAD) ofs += 8 * ir->op2; /* Always do the type check, even if the load result is unused. */ asm_guardcc(as, irt_isnum(ir->t) ? CC_LS : CC_NE); -@@ -1114,7 +1116,7 @@ static void asm_ahuvload(ASMState *as, IRIns *ir) +@@ -1140,7 +1142,7 @@ static void asm_ahuvload(ASMState *as, IRIns *ir) lj_assertA(irt_isinteger(ir->t) || irt_isnum(ir->t), "bad load type %d", irt_type(ir->t)); emit_nm(as, A64I_CMPx | A64F_SH(A64SH_LSR, 32), From a16522ea46cfa992a86c3db07353f4315af92b3a Mon Sep 17 00:00:00 2001 From: lena-larionova <54370747+lena-larionova@users.noreply.github.com> Date: Tue, 31 Oct 2023 11:06:13 -0700 Subject: [PATCH 048/249] fix(acl): Add missing descriptions to plugin schema (#11888) --- kong/plugins/acl/schema.lua | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/kong/plugins/acl/schema.lua b/kong/plugins/acl/schema.lua index 3cde65a74437..c8fd776ca509 100644 --- a/kong/plugins/acl/schema.lua +++ b/kong/plugins/acl/schema.lua @@ -9,9 +9,9 @@ return { { config = { type = "record", fields = { - { allow = { type = "array", elements = { type = "string" }, }, }, - { deny = { type = "array", elements = { type = "string" }, }, }, - { hide_groups_header = { type = "boolean", required = true, default = false }, }, + { allow = { type = "array", elements = { type = "string", description = "Arbitrary group names that are allowed to consume the service or route. One of `config.allow` or `config.deny` must be specified." }, }, }, + { deny = { type = "array", elements = { type = "string", description = "Arbitrary group names that are not allowed to consume the service or route. One of `config.allow` or `config.deny` must be specified." }, }, }, + { hide_groups_header = { type = "boolean", required = true, default = false, description = "If enabled (`true`), prevents the `X-Consumer-Groups` header from being sent in the request to the upstream service." }, }, }, } } From b0d5fa2b2a38bcbebeb141093ce1cca467efa740 Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 1 Nov 2023 02:08:35 +0800 Subject: [PATCH 049/249] refactor(tools): separate uuid functions from tools.utils (#11873) --- kong-3.6.0-0.rockspec | 1 + kong/tools/utils.lua | 28 +++++++--------------------- kong/tools/uuid.lua | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 21 deletions(-) create mode 100644 kong/tools/uuid.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 35cb06cc8627..fb706d21b57f 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -169,6 +169,7 @@ build = { ["kong.tools.table"] = "kong/tools/table.lua", ["kong.tools.sha256"] = "kong/tools/sha256.lua", ["kong.tools.yield"] = "kong/tools/yield.lua", + ["kong.tools.uuid"] = "kong/tools/uuid.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index d85a418ed440..37e7a83ebd8e 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -9,7 +9,6 @@ -- @module kong.tools.utils local ffi = require "ffi" -local uuid = require "resty.jit-uuid" local pl_stringx = require "pl.stringx" local pl_utils = require "pl.utils" local pl_path = require "pl.path" @@ -31,7 +30,6 @@ local find = string.find local gsub = string.gsub local join = pl_stringx.join local split = pl_stringx.split -local re_find = ngx.re.find local re_match = ngx.re.match local setmetatable = setmetatable @@ -212,11 +210,6 @@ do _M.get_rand_bytes = get_rand_bytes end ---- Generates a v4 uuid. --- @function uuid --- @return string with uuid -_M.uuid = uuid.generate_v4 - --- Generates a random unique string -- @return string The random string (a chunk of base64ish-encoded random bytes) do @@ -243,20 +236,6 @@ do _M.random_string = random_string end -local uuid_regex = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" -function _M.is_valid_uuid(str) - if type(str) ~= 'string' or #str ~= 36 then - return false - end - return re_find(str, uuid_regex, 'ioj') ~= nil -end - --- function below is more acurate, but invalidates previously accepted uuids and hence causes --- trouble with existing data during migrations. --- see: https://github.com/thibaultcha/lua-resty-jit-uuid/issues/8 --- function _M.is_valid_uuid(str) --- return str == "00000000-0000-0000-0000-000000000000" or uuid.is_valid(str) ---end do local url = require "socket.url" @@ -1009,6 +988,12 @@ do ]] end + if not pcall(ffi.typeof, "ngx_int_t") then + ffi.cdef [[ + typedef intptr_t ngx_int_t; + ]] + end + -- ngx_str_t defined by lua-resty-core local s = ffi_new("ngx_str_t[1]") s[0].data = "10" @@ -1385,6 +1370,7 @@ do "kong.tools.table", "kong.tools.sha256", "kong.tools.yield", + "kong.tools.uuid", } for _, str in ipairs(modules) do diff --git a/kong/tools/uuid.lua b/kong/tools/uuid.lua new file mode 100644 index 000000000000..08dfb5106c62 --- /dev/null +++ b/kong/tools/uuid.lua @@ -0,0 +1,35 @@ +local uuid = require "resty.jit-uuid" + + +local re_find = ngx.re.find + + +local uuid_regex = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" + + +local _M = {} + + +--- Generates a v4 uuid. +-- @function uuid +-- @return string with uuid +_M.uuid = uuid.generate_v4 + + +function _M.is_valid_uuid(str) + if type(str) ~= 'string' or #str ~= 36 then + return false + end + return re_find(str, uuid_regex, 'ioj') ~= nil +end + + +-- function below is more acurate, but invalidates previously accepted uuids and hence causes +-- trouble with existing data during migrations. +-- see: https://github.com/thibaultcha/lua-resty-jit-uuid/issues/8 +-- function _M.is_valid_uuid(str) +-- return str == "00000000-0000-0000-0000-000000000000" or uuid.is_valid(str) +--end + + +return _M From b3851a634c98660ef6559e35e4e059e6c761f9db Mon Sep 17 00:00:00 2001 From: Datong Sun Date: Thu, 2 Nov 2023 14:30:02 +0800 Subject: [PATCH 050/249] chore(deps): bump `atc-router` to `v1.3.1` (#11903) --- .requirements | 2 +- build/openresty/atc_router/atc_router_repositories.bzl | 2 +- changelog/unreleased/kong/bump_atc_router.yml | 2 ++ scripts/explain_manifest/suites.py | 6 +++++- 4 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 changelog/unreleased/kong/bump_atc_router.yml diff --git a/.requirements b/.requirements index 29282e1b8aa6..a14eda9f2d08 100644 --- a/.requirements +++ b/.requirements @@ -10,7 +10,7 @@ LUA_KONG_NGINX_MODULE=4fbc3ddc7dcbc706ed286b95344f3cb6da17e637 # 0.8.0 LUA_RESTY_LMDB=951926f20b674a0622236a0e331b359df1c02d9b # 1.3.0 LUA_RESTY_EVENTS=8448a92cec36ac04ea522e78f6496ba03c9b1fd8 # 0.2.0 LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 -ATC_ROUTER=b0d5e7e2a2ca59bb051959385d3e42d96c93bb98 # 1.2.0 +ATC_ROUTER=7a2ad42d4246598ba1f753b6ae79cb1456040afa # 1.3.1 KONG_MANAGER=nightly NGX_WASM_MODULE=21732b18fc46f409962ae77ddf01c713b568d078 # prerelease-0.1.1 diff --git a/build/openresty/atc_router/atc_router_repositories.bzl b/build/openresty/atc_router/atc_router_repositories.bzl index 9384071a7141..2daf5879f835 100644 --- a/build/openresty/atc_router/atc_router_repositories.bzl +++ b/build/openresty/atc_router/atc_router_repositories.bzl @@ -1,4 +1,4 @@ -"""A module defining the third party dependency PCRE""" +"""A module defining the dependency atc-router""" load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") diff --git a/changelog/unreleased/kong/bump_atc_router.yml b/changelog/unreleased/kong/bump_atc_router.yml new file mode 100644 index 000000000000..a0013d1e64db --- /dev/null +++ b/changelog/unreleased/kong/bump_atc_router.yml @@ -0,0 +1,2 @@ +message: Bump `atc-router` to `v1.3.1` +type: "dependency" diff --git a/scripts/explain_manifest/suites.py b/scripts/explain_manifest/suites.py index 4c50828ba07e..b1a19b9c8465 100644 --- a/scripts/explain_manifest/suites.py +++ b/scripts/explain_manifest/suites.py @@ -60,6 +60,10 @@ def common_suites(expect, libxcrypt_no_obsolete_api: bool = False): .contain("ngx_http_lua_kong_ffi_var_set_by_index") \ .contain("ngx_http_lua_kong_ffi_var_load_indexes") + expect("/usr/local/openresty/lualib/libatc_router.so", "ATC router so should have ffi module compiled") \ + .functions \ + .contain("router_execute") + if libxcrypt_no_obsolete_api: expect("/usr/local/openresty/nginx/sbin/nginx", "nginx linked with libxcrypt.so.2") \ .needed_libraries.contain("libcrypt.so.2") @@ -134,4 +138,4 @@ def docker_suites(expect): "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", #CentOS/RHEL 7 "/etc/ssl/cert.pem", #OpenBSD, Alpine ), "ca-certiticates exists") \ - .exists() \ No newline at end of file + .exists() From 5f5e272a684c9952fe57456de471b041f105c712 Mon Sep 17 00:00:00 2001 From: Chrono Date: Fri, 3 Nov 2023 12:36:56 +0800 Subject: [PATCH 051/249] refactor(router): use ATC raw string literal in expressions generation (#11904) This helps with generating easier to read expressions, and the code is more straightforward. However, we must fallback to the old style escaping if the value contains `"#` (very unlikely case). KAG-2952 --- kong/router/atc.lua | 6 ++++ kong/router/compat.lua | 4 +-- spec/01-unit/08-router_spec.lua | 51 +++++++++++++++++++++++++++------ 3 files changed, 50 insertions(+), 11 deletions(-) diff --git a/kong/router/atc.lua b/kong/router/atc.lua index 7c59cba03b4d..533ae5251207 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -96,6 +96,12 @@ end local function escape_str(str) + -- raw string + if not str:find([["#]], 1, true) then + return "r#\"" .. str .. "\"#" + end + + -- standard string escaping (unlikely case) if str:find([[\]], 1, true) then str = str:gsub([[\]], [[\\]]) end diff --git a/kong/router/compat.lua b/kong/router/compat.lua index 6da3522f4698..531cd8b1fa80 100644 --- a/kong/router/compat.lua +++ b/kong/router/compat.lua @@ -165,9 +165,9 @@ local function get_expression(route) -- See #6425, if `net.protocol` is not `https` -- then SNI matching should simply not be considered if srcs or dsts then - gen = "(net.protocol != \"tls\"" .. LOGICAL_OR .. gen .. ")" + gen = "(net.protocol != r#\"tls\"#" .. LOGICAL_OR .. gen .. ")" else - gen = "(net.protocol != \"https\"" .. LOGICAL_OR .. gen .. ")" + gen = "(net.protocol != r#\"https\"#" .. LOGICAL_OR .. gen .. ")" end expression_append(expr_buf, LOGICAL_AND, gen) diff --git a/spec/01-unit/08-router_spec.lua b/spec/01-unit/08-router_spec.lua index b8b39777f697..114ff31fbe29 100644 --- a/spec/01-unit/08-router_spec.lua +++ b/spec/01-unit/08-router_spec.lua @@ -2150,40 +2150,73 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" it("empty methods", function() use_case[1].route.methods = v - assert.equal(get_expression(use_case[1].route), [[(http.path ^= "/foo")]]) + assert.equal(get_expression(use_case[1].route), [[(http.path ^= r#"/foo"#)]]) assert(new_router(use_case)) end) it("empty hosts", function() use_case[1].route.hosts = v - assert.equal(get_expression(use_case[1].route), [[(http.method == "GET") && (http.path ^= "/foo")]]) + assert.equal(get_expression(use_case[1].route), [[(http.method == r#"GET"#) && (http.path ^= r#"/foo"#)]]) assert(new_router(use_case)) end) it("empty headers", function() use_case[1].route.headers = v - assert.equal(get_expression(use_case[1].route), [[(http.method == "GET") && (http.path ^= "/foo")]]) + assert.equal(get_expression(use_case[1].route), [[(http.method == r#"GET"#) && (http.path ^= r#"/foo"#)]]) assert(new_router(use_case)) end) it("empty paths", function() use_case[1].route.paths = v - assert.equal(get_expression(use_case[1].route), [[(http.method == "GET")]]) + assert.equal(get_expression(use_case[1].route), [[(http.method == r#"GET"#)]]) assert(new_router(use_case)) end) it("empty snis", function() use_case[1].route.snis = v - assert.equal(get_expression(use_case[1].route), [[(http.method == "GET") && (http.path ^= "/foo")]]) + assert.equal(get_expression(use_case[1].route), [[(http.method == r#"GET"#) && (http.path ^= r#"/foo"#)]]) assert(new_router(use_case)) end) end end) + describe("raw string", function() + local use_case + local get_expression = atc_compat.get_expression + + before_each(function() + use_case = { + { + service = service, + route = { + id = "e8fb37f1-102d-461e-9c51-6608a6bb8101", + methods = { "GET" }, + }, + }, + } + end) + + it("path has '\"'", function() + use_case[1].route.paths = { [[~/\"/*$]], } + + assert.equal([[(http.method == r#"GET"#) && (http.path ~ r#"^/\"/*$"#)]], + get_expression(use_case[1].route)) + assert(new_router(use_case)) + end) + + it("path has '\"#'", function() + use_case[1].route.paths = { [[~/\"#/*$]], } + + assert.equal([[(http.method == r#"GET"#) && (http.path ~ "^/\\\"#/*$")]], + get_expression(use_case[1].route)) + assert(new_router(use_case)) + end) + end) + describe("check regex with '\\'", function() local use_case local get_expression = atc_compat.get_expression @@ -2203,7 +2236,7 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" it("regex path has double '\\'", function() use_case[1].route.paths = { [[~/\\/*$]], } - assert.equal([[(http.method == "GET") && (http.path ~ "^/\\\\/*$")]], + assert.equal([[(http.method == r#"GET"#) && (http.path ~ r#"^/\\/*$"#)]], get_expression(use_case[1].route)) assert(new_router(use_case)) end) @@ -2211,7 +2244,7 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" it("regex path has '\\d'", function() use_case[1].route.paths = { [[~/\d+]], } - assert.equal([[(http.method == "GET") && (http.path ~ "^/\\d+")]], + assert.equal([[(http.method == r#"GET"#) && (http.path ~ r#"^/\d+"#)]], get_expression(use_case[1].route)) assert(new_router(use_case)) end) @@ -4659,7 +4692,7 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" use_case[1].route.destinations = {{ ip = "192.168.0.1/16" },} assert.equal(get_expression(use_case[1].route), - [[(net.protocol != "tls" || (tls.sni == "www.example.org")) && (net.dst.ip in 192.168.0.0/16)]]) + [[(net.protocol != r#"tls"# || (tls.sni == r#"www.example.org"#)) && (net.dst.ip in 192.168.0.0/16)]]) assert(new_router(use_case)) end) @@ -4667,7 +4700,7 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" use_case[1].route.destinations = v assert.equal(get_expression(use_case[1].route), - [[(net.protocol != "tls" || (tls.sni == "www.example.org")) && (net.src.ip == 127.0.0.1)]]) + [[(net.protocol != r#"tls"# || (tls.sni == r#"www.example.org"#)) && (net.src.ip == 127.0.0.1)]]) assert(new_router(use_case)) end) end From 076b8ef479bb6658637820b84fb5cacf323b8cc9 Mon Sep 17 00:00:00 2001 From: Chrono Date: Fri, 3 Nov 2023 14:14:27 +0800 Subject: [PATCH 052/249] refactor(tools): separate rand functions from tools.utils (#11897) separate rand functions from tools.utils --- kong-3.6.0-0.rockspec | 1 + kong/tools/rand.lua | 133 ++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 120 +------------------------------------ 3 files changed, 135 insertions(+), 119 deletions(-) create mode 100644 kong/tools/rand.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index fb706d21b57f..a34044faeeb2 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -170,6 +170,7 @@ build = { ["kong.tools.sha256"] = "kong/tools/sha256.lua", ["kong.tools.yield"] = "kong/tools/yield.lua", ["kong.tools.uuid"] = "kong/tools/uuid.lua", + ["kong.tools.rand"] = "kong/tools/rand.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/rand.lua b/kong/tools/rand.lua new file mode 100644 index 000000000000..cfb4bfbf3409 --- /dev/null +++ b/kong/tools/rand.lua @@ -0,0 +1,133 @@ +local ffi = require "ffi" + + +local C = ffi.C +local ffi_new = ffi.new + + +ffi.cdef[[ +typedef unsigned char u_char; + +int RAND_bytes(u_char *buf, int num); + +unsigned long ERR_get_error(void); +void ERR_load_crypto_strings(void); +void ERR_free_strings(void); + +const char *ERR_reason_error_string(unsigned long e); + +int open(const char * filename, int flags, ...); +size_t read(int fd, void *buf, size_t count); +int write(int fd, const void *ptr, int numbytes); +int close(int fd); +char *strerror(int errnum); +]] + + +local _M = {} + + +local get_rand_bytes +do + local ngx_log = ngx.log + local WARN = ngx.WARN + + local system_constants = require "lua_system_constants" + local O_RDONLY = system_constants.O_RDONLY() + local ffi_fill = ffi.fill + local ffi_str = ffi.string + local bytes_buf_t = ffi.typeof "char[?]" + + local function urandom_bytes(buf, size) + local fd = C.open("/dev/urandom", O_RDONLY, 0) -- mode is ignored + if fd < 0 then + ngx_log(WARN, "Error opening random fd: ", + ffi_str(C.strerror(ffi.errno()))) + + return false + end + + local res = C.read(fd, buf, size) + if res <= 0 then + ngx_log(WARN, "Error reading from urandom: ", + ffi_str(C.strerror(ffi.errno()))) + + return false + end + + if C.close(fd) ~= 0 then + ngx_log(WARN, "Error closing urandom: ", + ffi_str(C.strerror(ffi.errno()))) + end + + return true + end + + -- try to get n_bytes of CSPRNG data, first via /dev/urandom, + -- and then falling back to OpenSSL if necessary + get_rand_bytes = function(n_bytes, urandom) + local buf = ffi_new(bytes_buf_t, n_bytes) + ffi_fill(buf, n_bytes, 0x0) + + -- only read from urandom if we were explicitly asked + if urandom then + local rc = urandom_bytes(buf, n_bytes) + + -- if the read of urandom was successful, we returned true + -- and buf is filled with our bytes, so return it as a string + if rc then + return ffi_str(buf, n_bytes) + end + end + + if C.RAND_bytes(buf, n_bytes) == 0 then + -- get error code + local err_code = C.ERR_get_error() + if err_code == 0 then + return nil, "could not get SSL error code from the queue" + end + + -- get human-readable error string + C.ERR_load_crypto_strings() + local err = C.ERR_reason_error_string(err_code) + C.ERR_free_strings() + + return nil, "could not get random bytes (" .. + "reason:" .. ffi_str(err) .. ") " + end + + return ffi_str(buf, n_bytes) + end +end +_M.get_rand_bytes = get_rand_bytes + + +--- Generates a random unique string +-- @return string The random string (a chunk of base64ish-encoded random bytes) +local random_string +do + local char = string.char + local rand = math.random + local encode_base64 = ngx.encode_base64 + + -- generate a random-looking string by retrieving a chunk of bytes and + -- replacing non-alphanumeric characters with random alphanumeric replacements + -- (we dont care about deriving these bytes securely) + -- this serves to attempt to maintain some backward compatibility with the + -- previous implementation (stripping a UUID of its hyphens), while significantly + -- expanding the size of the keyspace. + random_string = function() + -- get 24 bytes, which will return a 32 char string after encoding + -- this is done in attempt to maintain backwards compatibility as + -- much as possible while improving the strength of this function + return encode_base64(get_rand_bytes(24, true)) + :gsub("/", char(rand(48, 57))) -- 0 - 10 + :gsub("+", char(rand(65, 90))) -- A - Z + :gsub("=", char(rand(97, 122))) -- a - z + end + +end +_M.random_string = random_string + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 37e7a83ebd8e..3fa9e2ab1f8a 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -34,8 +34,6 @@ local re_match = ngx.re.match local setmetatable = setmetatable ffi.cdef[[ -typedef unsigned char u_char; - typedef long time_t; typedef int clockid_t; typedef struct timespec { @@ -46,20 +44,6 @@ typedef struct timespec { int clock_gettime(clockid_t clk_id, struct timespec *tp); int gethostname(char *name, size_t len); - -int RAND_bytes(u_char *buf, int num); - -unsigned long ERR_get_error(void); -void ERR_load_crypto_strings(void); -void ERR_free_strings(void); - -const char *ERR_reason_error_string(unsigned long e); - -int open(const char * filename, int flags, ...); -size_t read(int fd, void *buf, size_t count); -int write(int fd, const void *ptr, int numbytes); -int close(int fd); -char *strerror(int errnum); ]] local _M = {} @@ -134,109 +118,6 @@ do end -local get_rand_bytes - -do - local ngx_log = ngx.log - local WARN = ngx.WARN - - local system_constants = require "lua_system_constants" - local O_RDONLY = system_constants.O_RDONLY() - local ffi_fill = ffi.fill - local ffi_str = ffi.string - local bytes_buf_t = ffi.typeof "char[?]" - - local function urandom_bytes(buf, size) - local fd = C.open("/dev/urandom", O_RDONLY, 0) -- mode is ignored - if fd < 0 then - ngx_log(WARN, "Error opening random fd: ", - ffi_str(C.strerror(ffi.errno()))) - - return false - end - - local res = C.read(fd, buf, size) - if res <= 0 then - ngx_log(WARN, "Error reading from urandom: ", - ffi_str(C.strerror(ffi.errno()))) - - return false - end - - if C.close(fd) ~= 0 then - ngx_log(WARN, "Error closing urandom: ", - ffi_str(C.strerror(ffi.errno()))) - end - - return true - end - - -- try to get n_bytes of CSPRNG data, first via /dev/urandom, - -- and then falling back to OpenSSL if necessary - get_rand_bytes = function(n_bytes, urandom) - local buf = ffi_new(bytes_buf_t, n_bytes) - ffi_fill(buf, n_bytes, 0x0) - - -- only read from urandom if we were explicitly asked - if urandom then - local rc = urandom_bytes(buf, n_bytes) - - -- if the read of urandom was successful, we returned true - -- and buf is filled with our bytes, so return it as a string - if rc then - return ffi_str(buf, n_bytes) - end - end - - if C.RAND_bytes(buf, n_bytes) == 0 then - -- get error code - local err_code = C.ERR_get_error() - if err_code == 0 then - return nil, "could not get SSL error code from the queue" - end - - -- get human-readable error string - C.ERR_load_crypto_strings() - local err = C.ERR_reason_error_string(err_code) - C.ERR_free_strings() - - return nil, "could not get random bytes (" .. - "reason:" .. ffi_str(err) .. ") " - end - - return ffi_str(buf, n_bytes) - end - - _M.get_rand_bytes = get_rand_bytes -end - ---- Generates a random unique string --- @return string The random string (a chunk of base64ish-encoded random bytes) -do - local char = string.char - local rand = math.random - local encode_base64 = ngx.encode_base64 - - -- generate a random-looking string by retrieving a chunk of bytes and - -- replacing non-alphanumeric characters with random alphanumeric replacements - -- (we dont care about deriving these bytes securely) - -- this serves to attempt to maintain some backward compatibility with the - -- previous implementation (stripping a UUID of its hyphens), while significantly - -- expanding the size of the keyspace. - local function random_string() - -- get 24 bytes, which will return a 32 char string after encoding - -- this is done in attempt to maintain backwards compatibility as - -- much as possible while improving the strength of this function - return encode_base64(get_rand_bytes(24, true)) - :gsub("/", char(rand(48, 57))) -- 0 - 10 - :gsub("+", char(rand(65, 90))) -- A - Z - :gsub("=", char(rand(97, 122))) -- a - z - end - - _M.random_string = random_string -end - - do local url = require "socket.url" @@ -1371,6 +1252,7 @@ do "kong.tools.sha256", "kong.tools.yield", "kong.tools.uuid", + "kong.tools.rand", } for _, str in ipairs(modules) do From d4ff0e8bc8589e2e0a277f3c3ca20caeae6adb34 Mon Sep 17 00:00:00 2001 From: Chrono Date: Fri, 3 Nov 2023 14:15:42 +0800 Subject: [PATCH 053/249] refactor(tools): separate string functions from tools.utils (#11884) separate string functions from tools.utils --- kong/tools/string.lua | 133 ++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 123 +------------------------------------- 2 files changed, 134 insertions(+), 122 deletions(-) diff --git a/kong/tools/string.lua b/kong/tools/string.lua index 3ed03a5d293a..45aa2a4ab6b6 100644 --- a/kong/tools/string.lua +++ b/kong/tools/string.lua @@ -1,3 +1,11 @@ +local pl_stringx = require "pl.stringx" + + +local type = type +local ipairs = ipairs +local tostring = tostring +local lower = string.lower +local fmt = string.format local find = string.find local gsub = string.gsub @@ -5,6 +13,131 @@ local gsub = string.gsub local _M = {} +--- splits a string. +-- just a placeholder to the penlight `pl.stringx.split` function +-- @function split +_M.split = pl_stringx.split + + +--- strips whitespace from a string. +-- @function strip +_M.strip = function(str) + if str == nil then + return "" + end + str = tostring(str) + if #str > 200 then + return str:gsub("^%s+", ""):reverse():gsub("^%s+", ""):reverse() + else + return str:match("^%s*(.-)%s*$") + end +end + + +-- Numbers taken from table 3-7 in www.unicode.org/versions/Unicode6.2.0/UnicodeStandard-6.2.pdf +-- find-based solution inspired by http://notebook.kulchenko.com/programming/fixing-malformed-utf8-in-lua +function _M.validate_utf8(val) + local str = tostring(val) + local i, len = 1, #str + while i <= len do + if i == find(str, "[%z\1-\127]", i) then i = i + 1 + elseif i == find(str, "[\194-\223][\123-\191]", i) then i = i + 2 + elseif i == find(str, "\224[\160-\191][\128-\191]", i) + or i == find(str, "[\225-\236][\128-\191][\128-\191]", i) + or i == find(str, "\237[\128-\159][\128-\191]", i) + or i == find(str, "[\238-\239][\128-\191][\128-\191]", i) then i = i + 3 + elseif i == find(str, "\240[\144-\191][\128-\191][\128-\191]", i) + or i == find(str, "[\241-\243][\128-\191][\128-\191][\128-\191]", i) + or i == find(str, "\244[\128-\143][\128-\191][\128-\191]", i) then i = i + 4 + else + return false, i + end + end + + return true +end + + +--- +-- Converts bytes to another unit in a human-readable string. +-- @tparam number bytes A value in bytes. +-- +-- @tparam[opt] string unit The unit to convert the bytes into. Can be either +-- of `b/B`, `k/K`, `m/M`, or `g/G` for bytes (unchanged), kibibytes, +-- mebibytes, or gibibytes, respectively. Defaults to `b` (bytes). +-- @tparam[opt] number scale The number of digits to the right of the decimal +-- point. Defaults to 2. +-- @treturn string A human-readable string. +-- @usage +-- +-- bytes_to_str(5497558) -- "5497558" +-- bytes_to_str(5497558, "m") -- "5.24 MiB" +-- bytes_to_str(5497558, "G", 3) -- "5.120 GiB" +-- +function _M.bytes_to_str(bytes, unit, scale) + if not unit or unit == "" or lower(unit) == "b" then + return fmt("%d", bytes) + end + + scale = scale or 2 + + if type(scale) ~= "number" or scale < 0 then + error("scale must be equal or greater than 0", 2) + end + + local fspec = fmt("%%.%df", scale) + + if lower(unit) == "k" then + return fmt(fspec .. " KiB", bytes / 2^10) + end + + if lower(unit) == "m" then + return fmt(fspec .. " MiB", bytes / 2^20) + end + + if lower(unit) == "g" then + return fmt(fspec .. " GiB", bytes / 2^30) + end + + error("invalid unit '" .. unit .. "' (expected 'k/K', 'm/M', or 'g/G')", 2) +end + + +local try_decode_base64 +do + local decode_base64 = ngx.decode_base64 + local decode_base64url = require "ngx.base64".decode_base64url + + local function decode_base64_str(str) + if type(str) == "string" then + return decode_base64(str) + or decode_base64url(str) + or nil, "base64 decoding failed: invalid input" + + else + return nil, "base64 decoding failed: not a string" + end + end + + function try_decode_base64(value) + if type(value) == "table" then + for i, v in ipairs(value) do + value[i] = decode_base64_str(v) or v + end + + return value + end + + if type(value) == "string" then + return decode_base64_str(value) or value + end + + return value + end +end +_M.try_decode_base64 = try_decode_base64 + + local replace_dashes local replace_dashes_lower do diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 3fa9e2ab1f8a..2bab014e55d5 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -48,25 +48,6 @@ int gethostname(char *name, size_t len); local _M = {} ---- splits a string. --- just a placeholder to the penlight `pl.stringx.split` function --- @function split -_M.split = split - ---- strips whitespace from a string. --- @function strip -_M.strip = function(str) - if str == nil then - return "" - end - str = tostring(str) - if #str > 200 then - return str:gsub("^%s+", ""):reverse():gsub("^%s+", ""):reverse() - else - return str:match("^%s*(.-)%s*$") - end -end - do local _system_infos @@ -338,29 +319,6 @@ function _M.load_module_if_exists(module_name) end end --- Numbers taken from table 3-7 in www.unicode.org/versions/Unicode6.2.0/UnicodeStandard-6.2.pdf --- find-based solution inspired by http://notebook.kulchenko.com/programming/fixing-malformed-utf8-in-lua -function _M.validate_utf8(val) - local str = tostring(val) - local i, len = 1, #str - while i <= len do - if i == find(str, "[%z\1-\127]", i) then i = i + 1 - elseif i == find(str, "[\194-\223][\123-\191]", i) then i = i + 2 - elseif i == find(str, "\224[\160-\191][\128-\191]", i) - or i == find(str, "[\225-\236][\128-\191][\128-\191]", i) - or i == find(str, "\237[\128-\159][\128-\191]", i) - or i == find(str, "[\238-\239][\128-\191][\128-\191]", i) then i = i + 3 - elseif i == find(str, "\240[\144-\191][\128-\191][\128-\191]", i) - or i == find(str, "[\241-\243][\128-\191][\128-\191][\128-\191]", i) - or i == find(str, "\244[\128-\143][\128-\191][\128-\191]", i) then i = i + 4 - else - return false, i - end - end - - return true -end - do local ipmatcher = require "resty.ipmatcher" @@ -815,51 +773,6 @@ do end ---- --- Converts bytes to another unit in a human-readable string. --- @tparam number bytes A value in bytes. --- --- @tparam[opt] string unit The unit to convert the bytes into. Can be either --- of `b/B`, `k/K`, `m/M`, or `g/G` for bytes (unchanged), kibibytes, --- mebibytes, or gibibytes, respectively. Defaults to `b` (bytes). --- @tparam[opt] number scale The number of digits to the right of the decimal --- point. Defaults to 2. --- @treturn string A human-readable string. --- @usage --- --- bytes_to_str(5497558) -- "5497558" --- bytes_to_str(5497558, "m") -- "5.24 MiB" --- bytes_to_str(5497558, "G", 3) -- "5.120 GiB" --- -function _M.bytes_to_str(bytes, unit, scale) - if not unit or unit == "" or lower(unit) == "b" then - return fmt("%d", bytes) - end - - scale = scale or 2 - - if type(scale) ~= "number" or scale < 0 then - error("scale must be equal or greater than 0", 2) - end - - local fspec = fmt("%%.%df", scale) - - if lower(unit) == "k" then - return fmt(fspec .. " KiB", bytes / 2^10) - end - - if lower(unit) == "m" then - return fmt(fspec .. " MiB", bytes / 2^20) - end - - if lower(unit) == "g" then - return fmt(fspec .. " GiB", bytes / 2^30) - end - - error("invalid unit '" .. unit .. "' (expected 'k/K', 'm/M', or 'g/G')", 2) -end - - do local NGX_ERROR = ngx.ERROR @@ -1176,41 +1089,6 @@ end _M.time_ns = time_ns -local try_decode_base64 -do - local decode_base64 = ngx.decode_base64 - local decode_base64url = require "ngx.base64".decode_base64url - - local function decode_base64_str(str) - if type(str) == "string" then - return decode_base64(str) - or decode_base64url(str) - or nil, "base64 decoding failed: invalid input" - - else - return nil, "base64 decoding failed: not a string" - end - end - - function try_decode_base64(value) - if type(value) == "table" then - for i, v in ipairs(value) do - value[i] = decode_base64_str(v) or v - end - - return value - end - - if type(value) == "string" then - return decode_base64_str(value) or value - end - - return value - end -end -_M.try_decode_base64 = try_decode_base64 - - local get_now_ms local get_updated_now_ms local get_start_time_ms @@ -1251,6 +1129,7 @@ do "kong.tools.table", "kong.tools.sha256", "kong.tools.yield", + "kong.tools.string", "kong.tools.uuid", "kong.tools.rand", } From bd1ac6abc42ccca0567f5ce34f7ebed71e3cafd6 Mon Sep 17 00:00:00 2001 From: Chrono Date: Sun, 5 Nov 2023 05:02:59 +0800 Subject: [PATCH 054/249] refactor(runloop/wasm): optimize hash_chain_entity with string.buffer (#11304) * refactor(runloop/wsam): optimize hash_chain_entity with string.buffer * buf:free() * buf:reset() --- kong/runloop/wasm.lua | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/kong/runloop/wasm.lua b/kong/runloop/wasm.lua index 664368ff4c3a..3ae3f7e8c029 100644 --- a/kong/runloop/wasm.lua +++ b/kong/runloop/wasm.lua @@ -55,9 +55,7 @@ local tostring = tostring local ipairs = ipairs local type = type local assert = assert -local concat = table.concat local insert = table.insert -local sha256 = utils.sha256_bin local cjson_encode = cjson.encode local cjson_decode = cjson.decode local fmt = string.format @@ -106,10 +104,14 @@ local STATUS = STATUS_DISABLED local hash_chain do + local buffer = require "string.buffer" + + local sha256 = utils.sha256_bin + local HASH_DISABLED = sha256("disabled") local HASH_NONE = sha256("none") - local buf = {} + local buf = buffer.new() ---@param chain kong.db.schema.entities.filter_chain ---@return string @@ -121,16 +123,18 @@ do return HASH_DISABLED end - local n = 0 - for _, filter in ipairs(chain.filters) do - buf[n + 1] = filter.name - buf[n + 2] = tostring(filter.enabled) - buf[n + 3] = tostring(filter.enabled and sha256(filter.config)) - n = n + 3 + local filters = chain.filters + for i = 1, #filters do + local filter = filters[i] + + buf:put(filter.name) + buf:put(tostring(filter.enabled)) + buf:put(tostring(filter.enabled and sha256(filter.config))) end - local s = concat(buf, "", 1, n) - clear_tab(buf) + local s = buf:get() + + buf:reset() return sha256(s) end From 3a7bc1660aae9f4025173dfc7f2fc9be1f98670b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Fri, 15 Sep 2023 15:05:48 +0200 Subject: [PATCH 055/249] feat(testing): add reconfiguration completion detection mechanism This change adds a new response header X-Kong-Transaction-Id to the Admin API. It contains the (ever incrementing) PostgreSQL transaction ID of the change that was made. The value can then be put into the X-If-Kong-Transaction-Id variable in a request to the proxy path. The request will be rejected with a 503 error if the proxy path has not been reconfigured yet with this or a later transaction id. The mechanism is useful in testing, when changes are made through the Admin API and the effects on the proxy path are then to be verified. Rather than waiting for a static period or retrying the proxy path request until the expected result is received, the proxy path client specifies the last transaction ID received from the Admin API in the X-If-Kong-Transaction-Id header and retries the request if a 503 error is received. --- .../reconfiguration-completion-detection.yml | 3 + kong/clustering/config_helper.lua | 11 +- kong/clustering/control_plane.lua | 5 + kong/clustering/data_plane.lua | 5 +- kong/db/declarative/import.lua | 7 +- kong/db/strategies/postgres/connector.lua | 8 +- kong/db/strategies/postgres/init.lua | 2 + kong/global.lua | 13 +- kong/runloop/handler.lua | 126 +++++++-------- .../24-reconfiguration-completion_spec.lua | 143 ++++++++++++++++++ 10 files changed, 244 insertions(+), 79 deletions(-) create mode 100644 changelog/unreleased/reconfiguration-completion-detection.yml create mode 100644 spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua diff --git a/changelog/unreleased/reconfiguration-completion-detection.yml b/changelog/unreleased/reconfiguration-completion-detection.yml new file mode 100644 index 000000000000..4389fd362a78 --- /dev/null +++ b/changelog/unreleased/reconfiguration-completion-detection.yml @@ -0,0 +1,3 @@ +message: Provide mechanism to detect completion of reconfiguration on the proxy path +type: feature +scope: Core diff --git a/kong/clustering/config_helper.lua b/kong/clustering/config_helper.lua index 790f3e72c15d..1c0083b82ec9 100644 --- a/kong/clustering/config_helper.lua +++ b/kong/clustering/config_helper.lua @@ -202,7 +202,12 @@ local function fill_empty_hashes(hashes) end end -function _M.update(declarative_config, config_table, config_hash, hashes) +function _M.update(declarative_config, msg) + + local config_table = msg.config_table + local config_hash = msg.config_hash + local hashes = msg.hashes + assert(type(config_table) == "table") if not config_hash then @@ -236,11 +241,13 @@ function _M.update(declarative_config, config_table, config_hash, hashes) -- executed by worker 0 local res - res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes) + res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes, msg.current_transaction_id) if not res then return nil, err end + ngx_log(ngx.NOTICE, "loaded configuration with transaction ID " .. msg.current_transaction_id) + return true end diff --git a/kong/clustering/control_plane.lua b/kong/clustering/control_plane.lua index a2696f9a3eb1..6939d7a78a5f 100644 --- a/kong/clustering/control_plane.lua +++ b/kong/clustering/control_plane.lua @@ -11,6 +11,7 @@ local compat = require("kong.clustering.compat") local constants = require("kong.constants") local events = require("kong.clustering.events") local calculate_config_hash = require("kong.clustering.config_helper").calculate_config_hash +local global = require("kong.global") local string = string @@ -115,8 +116,10 @@ function _M:export_deflated_reconfigure_payload() local config_hash, hashes = calculate_config_hash(config_table) + local current_transaction_id = global.get_current_transaction_id() local payload = { type = "reconfigure", + current_transaction_id = current_transaction_id, timestamp = ngx_now(), config_table = config_table, config_hash = config_hash, @@ -143,6 +146,8 @@ function _M:export_deflated_reconfigure_payload() self.current_config_hash = config_hash self.deflated_reconfigure_payload = payload + ngx_log(ngx_NOTICE, "exported configuration with transaction id " .. current_transaction_id) + return payload, nil, config_hash end diff --git a/kong/clustering/data_plane.lua b/kong/clustering/data_plane.lua index d0f0e1e020a9..4030b3174b05 100644 --- a/kong/clustering/data_plane.lua +++ b/kong/clustering/data_plane.lua @@ -213,10 +213,7 @@ function _M:communicate(premature) msg.timestamp and " with timestamp: " .. msg.timestamp or "", log_suffix) - local config_table = assert(msg.config_table) - - local pok, res, err = pcall(config_helper.update, self.declarative_config, - config_table, msg.config_hash, msg.hashes) + local pok, res, err = pcall(config_helper.update, self.declarative_config, msg) if pok then ping_immediately = true end diff --git a/kong/db/declarative/import.lua b/kong/db/declarative/import.lua index 4908e3d6a8e3..3c30a31da262 100644 --- a/kong/db/declarative/import.lua +++ b/kong/db/declarative/import.lua @@ -507,7 +507,7 @@ do local DECLARATIVE_LOCK_KEY = "declarative:lock" -- make sure no matter which path it exits, we released the lock. - load_into_cache_with_events = function(entities, meta, hash, hashes) + load_into_cache_with_events = function(entities, meta, hash, hashes, transaction_id) local kong_shm = ngx.shared.kong local ok, err = kong_shm:add(DECLARATIVE_LOCK_KEY, 0, DECLARATIVE_LOCK_TTL) @@ -522,6 +522,11 @@ do end ok, err = load_into_cache_with_events_no_lock(entities, meta, hash, hashes) + + if ok and transaction_id then + ok, err = kong_shm:set("declarative:current-transaction-id", transaction_id) + end + kong_shm:delete(DECLARATIVE_LOCK_KEY) return ok, err diff --git a/kong/db/strategies/postgres/connector.lua b/kong/db/strategies/postgres/connector.lua index fd5e9259066a..b5b9c257d8fa 100644 --- a/kong/db/strategies/postgres/connector.lua +++ b/kong/db/strategies/postgres/connector.lua @@ -519,10 +519,11 @@ function _mt:query(sql, operation) end local phase = get_phase() + local in_admin_api = phase == "content" and ngx.ctx.KONG_PHASE == ADMIN_API_PHASE if not operation or - not self.config_ro or - (phase == "content" and ngx.ctx.KONG_PHASE == ADMIN_API_PHASE) + not self.config_ro or + in_admin_api then -- admin API requests skips the replica optimization -- to ensure all its results are always strongly consistent @@ -552,6 +553,9 @@ function _mt:query(sql, operation) res, err, partial, num_queries = conn:query(sql) + if in_admin_api and operation == "write" and res and res[1] and res[1]._pg_transaction_id then + kong.response.set_header('X-Kong-Transaction-ID', res[1]._pg_transaction_id) + end -- if err is string then either it is a SQL error -- or it is a socket error, here we abort connections -- that encounter errors instead of reusing them, for diff --git a/kong/db/strategies/postgres/init.lua b/kong/db/strategies/postgres/init.lua index 74da93465aa6..804f4fb0b34a 100644 --- a/kong/db/strategies/postgres/init.lua +++ b/kong/db/strategies/postgres/init.lua @@ -987,6 +987,8 @@ function _M.new(connector, schema, errors) insert(upsert_expressions, ttl_escaped .. " = " .. "EXCLUDED." .. ttl_escaped) end + insert(select_expressions, "pg_current_xact_id() as _pg_transaction_id") + local primary_key_escaped = {} for i, key in ipairs(primary_key) do local primary_key_field = primary_key_fields[key] diff --git a/kong/global.lua b/kong/global.lua index cdceaa7f58ef..2c2449b5c64f 100644 --- a/kong/global.lua +++ b/kong/global.lua @@ -68,7 +68,8 @@ end local _GLOBAL = { - phases = phase_checker.phases, + phases = phase_checker.phases, + CURRENT_TRANSACTION_ID = 0, } @@ -294,4 +295,14 @@ function _GLOBAL.init_timing() end +function _GLOBAL.get_current_transaction_id() + local rows, err = kong.db.connector:query("select pg_current_xact_id() as _pg_transaction_id") + if not rows then + return nil, "could not query postgres for current transaction id: " .. err + else + return tonumber(rows[1]._pg_transaction_id) + end +end + + return _GLOBAL diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index 250d712f55b9..b22fc739086c 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -13,8 +13,7 @@ local concurrency = require "kong.concurrency" local lrucache = require "resty.lrucache" local ktls = require "resty.kong.tls" local request_id = require "kong.tracing.request_id" - - +local global = require "kong.global" local PluginsIterator = require "kong.runloop.plugins_iterator" @@ -748,6 +747,8 @@ do wasm.set_state(wasm_state) end + global.CURRENT_TRANSACTION_ID = kong_shm:get("declarative:current-transaction-id") or 0 + return true end) -- concurrency.with_coroutine_mutex @@ -765,11 +766,6 @@ do end -local function register_events() - events.register_events(reconfigure_handler) -end - - local balancer_prepare do local function sleep_once_for_balancer_init() @@ -921,7 +917,7 @@ return { return end - register_events() + events.register_events(reconfigure_handler) -- initialize balancers for active healthchecks timer_at(0, function() @@ -967,84 +963,59 @@ return { if strategy ~= "off" then local worker_state_update_frequency = kong.configuration.worker_state_update_frequency or 1 - local router_async_opts = { - name = "router", - timeout = 0, - on_timeout = "return_true", - } - - local function rebuild_router_timer(premature) + local function rebuild_timer(premature) if premature then return end - -- Don't wait for the semaphore (timeout = 0) when updating via the - -- timer. - -- If the semaphore is locked, that means that the rebuild is - -- already ongoing. - local ok, err = rebuild_router(router_async_opts) - if not ok then - log(ERR, "could not rebuild router via timer: ", err) + -- Before rebuiding the internal structures, retrieve the current PostgreSQL transaction ID to make it the + -- current transaction ID after the rebuild has finished. + local rebuild_transaction_id, err = global.get_current_transaction_id() + if not rebuild_transaction_id then + log(ERR, err) end - end - local _, err = kong.timer:named_every("router-rebuild", - worker_state_update_frequency, - rebuild_router_timer) - if err then - log(ERR, "could not schedule timer to rebuild router: ", err) - end - - local plugins_iterator_async_opts = { - name = "plugins_iterator", - timeout = 0, - on_timeout = "return_true", - } - - local function rebuild_plugins_iterator_timer(premature) - if premature then - return - end - - local _, err = rebuild_plugins_iterator(plugins_iterator_async_opts) - if err then - log(ERR, "could not rebuild plugins iterator via timer: ", err) + local router_update_status, err = rebuild_router({ + name = "router", + timeout = 0, + on_timeout = "return_true", + }) + if not router_update_status then + log(ERR, "could not rebuild router via timer: ", err) end - end - - local _, err = kong.timer:named_every("plugins-iterator-rebuild", - worker_state_update_frequency, - rebuild_plugins_iterator_timer) - if err then - log(ERR, "could not schedule timer to rebuild plugins iterator: ", err) - end - - if wasm.enabled() then - local wasm_async_opts = { - name = "wasm", + local plugins_iterator_update_status, err = rebuild_plugins_iterator({ + name = "plugins_iterator", timeout = 0, on_timeout = "return_true", - } - - local function rebuild_wasm_filter_chains_timer(premature) - if premature then - return - end + }) + if not plugins_iterator_update_status then + log(ERR, "could not rebuild plugins iterator via timer: ", err) + end - local _, err = rebuild_wasm_state(wasm_async_opts) - if err then + if wasm.enabled() then + local wasm_update_status, err = rebuild_wasm_state({ + name = "wasm", + timeout = 0, + on_timeout = "return_true", + }) + if not wasm_update_status then log(ERR, "could not rebuild wasm filter chains via timer: ", err) end end - local _, err = kong.timer:named_every("wasm-filter-chains-rebuild", - worker_state_update_frequency, - rebuild_wasm_filter_chains_timer) - if err then - log(ERR, "could not schedule timer to rebuild wasm filter chains: ", err) + if rebuild_transaction_id then + log(NOTICE, "configuration processing completed for transaction ID " .. rebuild_transaction_id) + global.CURRENT_TRANSACTION_ID = rebuild_transaction_id end end + + local _, err = kong.timer:named_every("rebuild", + worker_state_update_frequency, + rebuild_timer) + if err then + log(ERR, "could not schedule timer to rebuild: ", err) + end end end, }, @@ -1134,6 +1105,23 @@ return { }, access = { before = function(ctx) + -- If this is a version-conditional request, abort it if this dataplane has not processed at least the + -- specified configuration version yet. + local if_kong_transaction_id = kong.request and kong.request.get_header('x-if-kong-transaction-id') + if if_kong_transaction_id then + if_kong_transaction_id = tonumber(if_kong_transaction_id) + if if_kong_transaction_id and if_kong_transaction_id >= global.CURRENT_TRANSACTION_ID then + return kong.response.error( + 503, + "Service Unavailable", + { + ["X-Kong-Reconfiguration-Status"] = "pending", + ["Retry-After"] = tostring(kong.configuration.worker_state_update_frequency or 1), + } + ) + end + end + -- if there is a gRPC service in the context, don't re-execute the pre-access -- phase handler - it has been executed before the internal redirect if ctx.service and (ctx.service.protocol == "grpc" or diff --git a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua new file mode 100644 index 000000000000..c3c70775e3a3 --- /dev/null +++ b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua @@ -0,0 +1,143 @@ +local helpers = require "spec.helpers" +local cjson = require "cjson" + +describe("Admin API - Reconfiguration Completion -", function() + + local WORKER_STATE_UPDATE_FREQ = 1 + + local admin_client + local proxy_client + + local function run_tests() + + local res = admin_client:post("/services", { + body = { + name = "test-service", + url = "http://example.com", + }, + headers = { ["Content-Type"] = "application/json" }, + }) + local body = assert.res_status(201, res) + local service = cjson.decode(body) + + -- We're running the route setup in `eventually` to cover for the unlikely case that reconfiguration completes + -- between adding the route and requesting the path through the proxy path. + + local next_path do + local path_suffix = 0 + function next_path() + path_suffix = path_suffix + 1 + return "/" .. tostring(path_suffix) + end + end + + local service_path + local kong_transaction_id + + assert.eventually(function() + service_path = next_path() + + res = admin_client:post("/services/" .. service.id .. "/routes", { + body = { + paths = { service_path } + }, + headers = { ["Content-Type"] = "application/json" }, + }) + assert.res_status(201, res) + kong_transaction_id = res.headers['x-kong-transaction-id'] + assert.is_string(kong_transaction_id) + + res = proxy_client:get(service_path, + { + headers = { + ["X-If-Kong-Transaction-Id"] = kong_transaction_id + } + }) + assert.res_status(503, res) + assert.equals("pending", res.headers['x-kong-reconfiguration-status']) + local retry_after = tonumber(res.headers['retry-after']) + ngx.sleep(retry_after) + end) + .has_no_error() + + assert.eventually(function() + res = proxy_client:get(service_path, + { + headers = { + ["X-If-Kong-Transaction-Id"] = kong_transaction_id + } + }) + assert.res_status(200, res) + end) + .has_no_error() + end + + describe("#traditional mode -", function() + lazy_setup(function() + helpers.get_db_utils() + assert(helpers.start_kong({ + worker_consistency = "eventual", + worker_state_update_frequency = WORKER_STATE_UPDATE_FREQ, + })) + admin_client = helpers.admin_client() + proxy_client = helpers.proxy_client() + end) + + teardown(function() + if admin_client then + admin_client:close() + end + if proxy_client then + proxy_client:close() + end + helpers.stop_kong() + end) + + it("rejects proxy requests if worker state has not been updated yet", run_tests) + end) + + describe("#hybrid mode -", function() + lazy_setup(function() + helpers.get_db_utils() + + assert(helpers.start_kong({ + role = "control_plane", + database = "postgres", + prefix = "cp", + cluster_cert = "spec/fixtures/kong_clustering.crt", + cluster_cert_key = "spec/fixtures/kong_clustering.key", + lua_ssl_trusted_certificate = "spec/fixtures/kong_clustering.crt", + cluster_listen = "127.0.0.1:9005", + cluster_telemetry_listen = "127.0.0.1:9006", + nginx_conf = "spec/fixtures/custom_nginx.template", + })) + + assert(helpers.start_kong({ + role = "data_plane", + database = "off", + prefix = "dp", + cluster_cert = "spec/fixtures/kong_clustering.crt", + cluster_cert_key = "spec/fixtures/kong_clustering.key", + lua_ssl_trusted_certificate = "spec/fixtures/kong_clustering.crt", + cluster_control_plane = "127.0.0.1:9005", + cluster_telemetry_endpoint = "127.0.0.1:9006", + proxy_listen = "0.0.0.0:9002", + })) + admin_client = helpers.admin_client() + proxy_client = helpers.proxy_client("127.0.0.1", 9002) + end) + + teardown(function() + if admin_client then + admin_client:close() + end + if proxy_client then + proxy_client:close() + end + helpers.stop_kong("dp") + helpers.stop_kong("cp") + end) + + it("rejects proxy requests if worker state has not been updated yet", run_tests) + end) +end) From 00a9f9b0de5cd0d58e0bf300a117a92a901186ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Thu, 26 Oct 2023 11:45:38 +0200 Subject: [PATCH 056/249] fix(test): remove external dependency --- .../24-reconfiguration-completion_spec.lua | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua index c3c70775e3a3..9f528c4bb46b 100644 --- a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua +++ b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua @@ -10,10 +10,22 @@ describe("Admin API - Reconfiguration Completion -", function() local function run_tests() - local res = admin_client:post("/services", { + local res = admin_client:post("/plugins", { + body = { + name = "request-termination", + config = { + status_code = 200, + body = "kong terminated the request", + } + }, + headers = { ["Content-Type"] = "application/json" }, + }) + assert.res_status(201, res) + + res = admin_client:post("/services", { body = { name = "test-service", - url = "http://example.com", + url = "http://127.0.0.1", }, headers = { ["Content-Type"] = "application/json" }, }) @@ -67,7 +79,8 @@ describe("Admin API - Reconfiguration Completion -", function() ["X-If-Kong-Transaction-Id"] = kong_transaction_id } }) - assert.res_status(200, res) + body = assert.res_status(200, res) + assert.equals("kong terminated the request", body) end) .has_no_error() end From 073fcff2237ee52a8b8bdaa400e128fbaeae9122 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Thu, 26 Oct 2023 12:40:54 +0200 Subject: [PATCH 057/249] fix(core): yield before updating globals.CURRENT_TRANSACTION_ID --- kong/runloop/handler.lua | 3 +++ 1 file changed, 3 insertions(+) diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index b22fc739086c..e2759287ed4c 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -1005,6 +1005,9 @@ return { end if rebuild_transaction_id then + -- Yield to process any pending invalidations + utils.yield() + log(NOTICE, "configuration processing completed for transaction ID " .. rebuild_transaction_id) global.CURRENT_TRANSACTION_ID = rebuild_transaction_id end From 1771397d5d121891479f30ddfd5e791aa0792158 Mon Sep 17 00:00:00 2001 From: Angel Date: Tue, 10 Oct 2023 16:10:00 -0400 Subject: [PATCH 058/249] feat(db): add example field to meta schema --- kong/db/schema/metaschema.lua | 1 + 1 file changed, 1 insertion(+) diff --git a/kong/db/schema/metaschema.lua b/kong/db/schema/metaschema.lua index 6483aaab5260..cb2c9eafba49 100644 --- a/kong/db/schema/metaschema.lua +++ b/kong/db/schema/metaschema.lua @@ -179,6 +179,7 @@ local field_schema = { { required = { type = "boolean" }, }, { reference = { type = "string" }, }, { description = { type = "string", len_min = 10, len_max = 500}, }, + { examples = { type = "array", elements = { type = "any" } } }, { auto = { type = "boolean" }, }, { unique = { type = "boolean" }, }, { unique_across_ws = { type = "boolean" }, }, From fd413e34b4207f1c591a02d2167d61374094c923 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 13:57:13 +0200 Subject: [PATCH 059/249] chore(deps): bump tj-actions/changed-files from 40.0.0 to 40.1.0 (#11922) Bumps [tj-actions/changed-files](https://github.com/tj-actions/changed-files) from 40.0.0 to 40.1.0. - [Release notes](https://github.com/tj-actions/changed-files/releases) - [Changelog](https://github.com/tj-actions/changed-files/blob/main/HISTORY.md) - [Commits](https://github.com/tj-actions/changed-files/compare/af292f1e845a0377b596972698a8598734eb2796...18c8a4ecebe93d32ed8a88e1d0c098f5f68c221b) --- updated-dependencies: - dependency-name: tj-actions/changed-files dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/changelog-requirement.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/changelog-requirement.yml b/.github/workflows/changelog-requirement.yml index 38bf78cd69c5..e735d0df2622 100644 --- a/.github/workflows/changelog-requirement.yml +++ b/.github/workflows/changelog-requirement.yml @@ -21,7 +21,7 @@ jobs: - name: Find changelog files id: changelog-list - uses: tj-actions/changed-files@af292f1e845a0377b596972698a8598734eb2796 # v37 + uses: tj-actions/changed-files@18c8a4ecebe93d32ed8a88e1d0c098f5f68c221b # v37 with: files_yaml: | changelogs: From 48664d554ba5dd4a2c549b23ce5a35c3dce2eafb Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 6 Nov 2023 19:58:25 +0800 Subject: [PATCH 060/249] refactor(tools): cache lower(unit) for bytes_to_str (#11920) --- kong/tools/string.lua | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/kong/tools/string.lua b/kong/tools/string.lua index 45aa2a4ab6b6..53dfe3d233ba 100644 --- a/kong/tools/string.lua +++ b/kong/tools/string.lua @@ -75,7 +75,9 @@ end -- bytes_to_str(5497558, "G", 3) -- "5.120 GiB" -- function _M.bytes_to_str(bytes, unit, scale) - if not unit or unit == "" or lower(unit) == "b" then + local u = lower(unit or "") + + if u == "" or u == "b" then return fmt("%d", bytes) end @@ -87,15 +89,15 @@ function _M.bytes_to_str(bytes, unit, scale) local fspec = fmt("%%.%df", scale) - if lower(unit) == "k" then + if u == "k" then return fmt(fspec .. " KiB", bytes / 2^10) end - if lower(unit) == "m" then + if u == "m" then return fmt(fspec .. " MiB", bytes / 2^20) end - if lower(unit) == "g" then + if u == "g" then return fmt(fspec .. " GiB", bytes / 2^30) end From d5fa2c54bf718326c865a4a1e8c11a5ccba170bc Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 6 Nov 2023 19:59:24 +0800 Subject: [PATCH 061/249] style(tools): optimize calls of string.find (#11918) --- kong/tools/utils.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 2bab014e55d5..38e1825ba510 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -430,7 +430,7 @@ end -- @return normalized address (string) + port (number or nil), or alternatively nil+error _M.normalize_ipv4 = function(address) local a,b,c,d,port - if address:find(":") then + if address:find(":", 1, true) then -- has port number a,b,c,d,port = address:match("^(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?):(%d+)$") else @@ -488,7 +488,7 @@ _M.normalize_ipv6 = function(address) if check:sub(-1,-1) == ":" then check = check .. "0" end - if check:find("::") then + if check:find("::", 1, true) then -- expand double colon local _, count = gsub(check, ":", "") local ins = ":" .. string.rep("0:", 8 - count) From 5f34a49edc356b798f25a340522d8efe2c4f5d95 Mon Sep 17 00:00:00 2001 From: Datong Sun Date: Mon, 6 Nov 2023 20:01:12 +0800 Subject: [PATCH 062/249] docs(kong.conf.default): update descriptions for `nginx_http_lua_regex_cache_max_entries` (#11912) Leftover from KAG-719 --- kong.conf.default | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/kong.conf.default b/kong.conf.default index 9bbd8fcb7f94..7d699c4ce1e9 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -1176,9 +1176,12 @@ # roughly 2 seconds. #nginx_http_lua_regex_cache_max_entries = 8192 # Specifies the maximum number of entries allowed - # in the worker process level compiled regex cache. + # in the worker process level PCRE JIT compiled regex cache. # It is recommended to set it to at least (number of regex paths * 2) - # to avoid high CPU usages. + # to avoid high CPU usages if you manually specified `router_flavor` to + # `traditional`. `expressions` and `traditional_compat` router does + # not make use of the PCRE library and their behavior + # is unaffected by this setting. #nginx_http_keepalive_requests = 1000 # Sets the maximum number of client requests that can be served through one # keep-alive connection. After the maximum number of requests are made, From 5d76ce8d8d60ea307247e98da5808ff90154ea24 Mon Sep 17 00:00:00 2001 From: xumin Date: Mon, 30 Oct 2023 15:26:08 +0800 Subject: [PATCH 063/249] fix(core): definition of cookie name validate Fix #11860 --- .../unreleased/kong/cookie-name-validator.yml | 3 ++ kong/db/schema/entities/upstreams.lua | 2 +- kong/db/schema/typedefs.lua | 8 ++++ kong/tools/utils.lua | 38 ++++++++++++------- spec/01-unit/05-utils_spec.lua | 4 +- .../04-admin_api/07-upstreams_routes_spec.lua | 4 +- 6 files changed, 40 insertions(+), 19 deletions(-) create mode 100644 changelog/unreleased/kong/cookie-name-validator.yml diff --git a/changelog/unreleased/kong/cookie-name-validator.yml b/changelog/unreleased/kong/cookie-name-validator.yml new file mode 100644 index 000000000000..5451b28531ad --- /dev/null +++ b/changelog/unreleased/kong/cookie-name-validator.yml @@ -0,0 +1,3 @@ +message: Now cookie names are validated against RFC 6265, which allows more characters than the previous validation. +type: bugfix +scope: Core diff --git a/kong/db/schema/entities/upstreams.lua b/kong/db/schema/entities/upstreams.lua index eed59c788f75..6d3c963411c3 100644 --- a/kong/db/schema/entities/upstreams.lua +++ b/kong/db/schema/entities/upstreams.lua @@ -189,7 +189,7 @@ local r = { { hash_fallback = hash_on }, { hash_on_header = typedefs.header_name, }, { hash_fallback_header = typedefs.header_name, }, - { hash_on_cookie = { description = "The cookie name to take the value from as hash input.", type = "string", custom_validator = utils.validate_cookie_name }, }, + { hash_on_cookie = typedefs.cookie_name{ description = "The cookie name to take the value from as hash input."}, }, { hash_on_cookie_path = typedefs.path{ default = "/", }, }, { hash_on_query_arg = simple_param }, { hash_fallback_query_arg = simple_param }, diff --git a/kong/db/schema/typedefs.lua b/kong/db/schema/typedefs.lua index 91c7c7100937..3838b10d10ba 100644 --- a/kong/db/schema/typedefs.lua +++ b/kong/db/schema/typedefs.lua @@ -331,6 +331,14 @@ typedefs.url = Schema.define { description = "A string representing a URL, such as https://example.com/path/to/resource?q=search." } + +typedefs.cookie_name = Schema.define { + type = "string", + custom_validator = utils.validate_cookie_name, + description = "A string representing an HTTP token defined by RFC 2616." +} + +-- should we also allow all http token for this? typedefs.header_name = Schema.define { type = "string", custom_validator = utils.validate_header_name, diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 38e1825ba510..c823c3999521 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -601,6 +601,29 @@ _M.format_host = function(p1, p2) end end +local CONTROLS = [[\x00-\x1F\x7F]] +local HIGHBIT = [[\x80-\xFF]] +local SEPARATORS = [==[ \t()<>@,;:\\\"\/?={}\[\]]==] +local HTTP_TOKEN_FORBID_PATTERN = "[".. CONTROLS .. HIGHBIT .. SEPARATORS .. "]" + +--- Validates a token defined by RFC 2616. +-- @param token (string) the string to verify +-- @return the valid token, or `nil+error` +function _M.validate_http_token(token) + if token == nil or token == "" then + return nil, "no token provided" + end + + if not re_match(token, HTTP_TOKEN_FORBID_PATTERN, "jo") then + return token + end + + return nil, "contains one or more invalid characters. ASCII " .. + "control characters (0-31;127), space, tab and the " .. + "characters ()<>@,;:\\\"/?={}[] are not allowed." +end + +-- should we also use validate_http_token for this? --- Validates a header name. -- Checks characters used in a header name to be valid, as per nginx only -- a-z, A-Z, 0-9 and '-' are allowed. @@ -620,22 +643,9 @@ _M.validate_header_name = function(name) end --- Validates a cookie name. --- Checks characters used in a cookie name to be valid --- a-z, A-Z, 0-9, '_' and '-' are allowed. -- @param name (string) the cookie name to verify -- @return the valid cookie name, or `nil+error` -_M.validate_cookie_name = function(name) - if name == nil or name == "" then - return nil, "no cookie name provided" - end - - if re_match(name, "^[a-zA-Z0-9-_]+$", "jo") then - return name - end - - return nil, "bad cookie name '" .. name .. - "', allowed characters are A-Z, a-z, 0-9, '_', and '-'" -end +_M.validate_cookie_name = _M.validate_http_token local validate_labels diff --git a/spec/01-unit/05-utils_spec.lua b/spec/01-unit/05-utils_spec.lua index 12764e673681..58af472e50eb 100644 --- a/spec/01-unit/05-utils_spec.lua +++ b/spec/01-unit/05-utils_spec.lua @@ -656,12 +656,12 @@ describe("Utils", function() end end) it("validate_cookie_name() validates cookie names", function() - local header_chars = [[_-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz]] + local cookie_chars = [[~`|!#$%&'*+-._-^0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz]] for i = 1, 255 do local c = string.char(i) - if string.find(header_chars, c, nil, true) then + if string.find(cookie_chars, c, nil, true) then assert(utils.validate_cookie_name(c) == c, "ascii character '" .. c .. "' (" .. i .. ") should have been allowed") else diff --git a/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua b/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua index 025435994d32..a7d5121bf329 100644 --- a/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua +++ b/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua @@ -404,7 +404,7 @@ describe("Admin API: #" .. strategy, function() }) body = assert.res_status(400, res) local json = cjson.decode(body) - assert.equals("bad cookie name 'not a <> valid <> cookie name', allowed characters are A-Z, a-z, 0-9, '_', and '-'", + assert.equals([[must not contain invalid characters: ASCII control characters (0-31;127), space, tab and the following characters: ()<>@,;:"/?={}[]. Please refer to RFC 2616]], json.fields.hash_on_cookie) -- Invalid cookie path @@ -437,7 +437,7 @@ describe("Admin API: #" .. strategy, function() }) body = assert.res_status(400, res) local json = cjson.decode(body) - assert.equals("bad cookie name 'not a <> valid <> cookie name', allowed characters are A-Z, a-z, 0-9, '_', and '-'", + assert.equals([[must not contain invalid characters: ASCII control characters (0-31;127), space, tab and the following characters: ()<>@,;:"/?={}[]. Please refer to RFC 2616]], json.fields.hash_on_cookie) -- Invalid cookie path in hash fallback From 04f0b3e583f4b41de52f18d30e50293d4a561c84 Mon Sep 17 00:00:00 2001 From: xumin Date: Wed, 1 Nov 2023 13:39:24 +0800 Subject: [PATCH 064/249] apply suggestion --- spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua b/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua index a7d5121bf329..69f7bb52ea74 100644 --- a/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua +++ b/spec/02-integration/04-admin_api/07-upstreams_routes_spec.lua @@ -404,7 +404,7 @@ describe("Admin API: #" .. strategy, function() }) body = assert.res_status(400, res) local json = cjson.decode(body) - assert.equals([[must not contain invalid characters: ASCII control characters (0-31;127), space, tab and the following characters: ()<>@,;:"/?={}[]. Please refer to RFC 2616]], + assert.equals([[contains one or more invalid characters. ASCII control characters (0-31;127), space, tab and the characters ()<>@,;:\"/?={}[] are not allowed.]], json.fields.hash_on_cookie) -- Invalid cookie path @@ -437,7 +437,7 @@ describe("Admin API: #" .. strategy, function() }) body = assert.res_status(400, res) local json = cjson.decode(body) - assert.equals([[must not contain invalid characters: ASCII control characters (0-31;127), space, tab and the following characters: ()<>@,;:"/?={}[]. Please refer to RFC 2616]], + assert.equals([[contains one or more invalid characters. ASCII control characters (0-31;127), space, tab and the characters ()<>@,;:\"/?={}[] are not allowed.]], json.fields.hash_on_cookie) -- Invalid cookie path in hash fallback From d88dc5a907c632e474eece044911d4aa043f4283 Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 7 Nov 2023 10:28:52 +0800 Subject: [PATCH 065/249] Revert "fix(core): yield before updating globals.CURRENT_TRANSACTION_ID" This reverts commit 073fcff2237ee52a8b8bdaa400e128fbaeae9122. --- kong/runloop/handler.lua | 3 --- 1 file changed, 3 deletions(-) diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index e2759287ed4c..b22fc739086c 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -1005,9 +1005,6 @@ return { end if rebuild_transaction_id then - -- Yield to process any pending invalidations - utils.yield() - log(NOTICE, "configuration processing completed for transaction ID " .. rebuild_transaction_id) global.CURRENT_TRANSACTION_ID = rebuild_transaction_id end From 6528af4be4b6c89bf3ef56cc2c911cac1adf9554 Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 7 Nov 2023 10:28:52 +0800 Subject: [PATCH 066/249] Revert "fix(test): remove external dependency" This reverts commit 00a9f9b0de5cd0d58e0bf300a117a92a901186ca. --- .../24-reconfiguration-completion_spec.lua | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua index 9f528c4bb46b..c3c70775e3a3 100644 --- a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua +++ b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua @@ -10,22 +10,10 @@ describe("Admin API - Reconfiguration Completion -", function() local function run_tests() - local res = admin_client:post("/plugins", { - body = { - name = "request-termination", - config = { - status_code = 200, - body = "kong terminated the request", - } - }, - headers = { ["Content-Type"] = "application/json" }, - }) - assert.res_status(201, res) - - res = admin_client:post("/services", { + local res = admin_client:post("/services", { body = { name = "test-service", - url = "http://127.0.0.1", + url = "http://example.com", }, headers = { ["Content-Type"] = "application/json" }, }) @@ -79,8 +67,7 @@ describe("Admin API - Reconfiguration Completion -", function() ["X-If-Kong-Transaction-Id"] = kong_transaction_id } }) - body = assert.res_status(200, res) - assert.equals("kong terminated the request", body) + assert.res_status(200, res) end) .has_no_error() end From 8cac765ec651427fa0b37bada5a787c57caee034 Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 7 Nov 2023 10:28:52 +0800 Subject: [PATCH 067/249] Revert "feat(testing): add reconfiguration completion detection mechanism" This reverts commit 3a7bc1660aae9f4025173dfc7f2fc9be1f98670b. --- .../reconfiguration-completion-detection.yml | 3 - kong/clustering/config_helper.lua | 11 +- kong/clustering/control_plane.lua | 5 - kong/clustering/data_plane.lua | 5 +- kong/db/declarative/import.lua | 7 +- kong/db/strategies/postgres/connector.lua | 8 +- kong/db/strategies/postgres/init.lua | 2 - kong/global.lua | 13 +- kong/runloop/handler.lua | 126 ++++++++------- .../24-reconfiguration-completion_spec.lua | 143 ------------------ 10 files changed, 79 insertions(+), 244 deletions(-) delete mode 100644 changelog/unreleased/reconfiguration-completion-detection.yml delete mode 100644 spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua diff --git a/changelog/unreleased/reconfiguration-completion-detection.yml b/changelog/unreleased/reconfiguration-completion-detection.yml deleted file mode 100644 index 4389fd362a78..000000000000 --- a/changelog/unreleased/reconfiguration-completion-detection.yml +++ /dev/null @@ -1,3 +0,0 @@ -message: Provide mechanism to detect completion of reconfiguration on the proxy path -type: feature -scope: Core diff --git a/kong/clustering/config_helper.lua b/kong/clustering/config_helper.lua index 1c0083b82ec9..790f3e72c15d 100644 --- a/kong/clustering/config_helper.lua +++ b/kong/clustering/config_helper.lua @@ -202,12 +202,7 @@ local function fill_empty_hashes(hashes) end end -function _M.update(declarative_config, msg) - - local config_table = msg.config_table - local config_hash = msg.config_hash - local hashes = msg.hashes - +function _M.update(declarative_config, config_table, config_hash, hashes) assert(type(config_table) == "table") if not config_hash then @@ -241,13 +236,11 @@ function _M.update(declarative_config, msg) -- executed by worker 0 local res - res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes, msg.current_transaction_id) + res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes) if not res then return nil, err end - ngx_log(ngx.NOTICE, "loaded configuration with transaction ID " .. msg.current_transaction_id) - return true end diff --git a/kong/clustering/control_plane.lua b/kong/clustering/control_plane.lua index 6939d7a78a5f..a2696f9a3eb1 100644 --- a/kong/clustering/control_plane.lua +++ b/kong/clustering/control_plane.lua @@ -11,7 +11,6 @@ local compat = require("kong.clustering.compat") local constants = require("kong.constants") local events = require("kong.clustering.events") local calculate_config_hash = require("kong.clustering.config_helper").calculate_config_hash -local global = require("kong.global") local string = string @@ -116,10 +115,8 @@ function _M:export_deflated_reconfigure_payload() local config_hash, hashes = calculate_config_hash(config_table) - local current_transaction_id = global.get_current_transaction_id() local payload = { type = "reconfigure", - current_transaction_id = current_transaction_id, timestamp = ngx_now(), config_table = config_table, config_hash = config_hash, @@ -146,8 +143,6 @@ function _M:export_deflated_reconfigure_payload() self.current_config_hash = config_hash self.deflated_reconfigure_payload = payload - ngx_log(ngx_NOTICE, "exported configuration with transaction id " .. current_transaction_id) - return payload, nil, config_hash end diff --git a/kong/clustering/data_plane.lua b/kong/clustering/data_plane.lua index 4030b3174b05..d0f0e1e020a9 100644 --- a/kong/clustering/data_plane.lua +++ b/kong/clustering/data_plane.lua @@ -213,7 +213,10 @@ function _M:communicate(premature) msg.timestamp and " with timestamp: " .. msg.timestamp or "", log_suffix) - local pok, res, err = pcall(config_helper.update, self.declarative_config, msg) + local config_table = assert(msg.config_table) + + local pok, res, err = pcall(config_helper.update, self.declarative_config, + config_table, msg.config_hash, msg.hashes) if pok then ping_immediately = true end diff --git a/kong/db/declarative/import.lua b/kong/db/declarative/import.lua index 3c30a31da262..4908e3d6a8e3 100644 --- a/kong/db/declarative/import.lua +++ b/kong/db/declarative/import.lua @@ -507,7 +507,7 @@ do local DECLARATIVE_LOCK_KEY = "declarative:lock" -- make sure no matter which path it exits, we released the lock. - load_into_cache_with_events = function(entities, meta, hash, hashes, transaction_id) + load_into_cache_with_events = function(entities, meta, hash, hashes) local kong_shm = ngx.shared.kong local ok, err = kong_shm:add(DECLARATIVE_LOCK_KEY, 0, DECLARATIVE_LOCK_TTL) @@ -522,11 +522,6 @@ do end ok, err = load_into_cache_with_events_no_lock(entities, meta, hash, hashes) - - if ok and transaction_id then - ok, err = kong_shm:set("declarative:current-transaction-id", transaction_id) - end - kong_shm:delete(DECLARATIVE_LOCK_KEY) return ok, err diff --git a/kong/db/strategies/postgres/connector.lua b/kong/db/strategies/postgres/connector.lua index b5b9c257d8fa..fd5e9259066a 100644 --- a/kong/db/strategies/postgres/connector.lua +++ b/kong/db/strategies/postgres/connector.lua @@ -519,11 +519,10 @@ function _mt:query(sql, operation) end local phase = get_phase() - local in_admin_api = phase == "content" and ngx.ctx.KONG_PHASE == ADMIN_API_PHASE if not operation or - not self.config_ro or - in_admin_api + not self.config_ro or + (phase == "content" and ngx.ctx.KONG_PHASE == ADMIN_API_PHASE) then -- admin API requests skips the replica optimization -- to ensure all its results are always strongly consistent @@ -553,9 +552,6 @@ function _mt:query(sql, operation) res, err, partial, num_queries = conn:query(sql) - if in_admin_api and operation == "write" and res and res[1] and res[1]._pg_transaction_id then - kong.response.set_header('X-Kong-Transaction-ID', res[1]._pg_transaction_id) - end -- if err is string then either it is a SQL error -- or it is a socket error, here we abort connections -- that encounter errors instead of reusing them, for diff --git a/kong/db/strategies/postgres/init.lua b/kong/db/strategies/postgres/init.lua index 804f4fb0b34a..74da93465aa6 100644 --- a/kong/db/strategies/postgres/init.lua +++ b/kong/db/strategies/postgres/init.lua @@ -987,8 +987,6 @@ function _M.new(connector, schema, errors) insert(upsert_expressions, ttl_escaped .. " = " .. "EXCLUDED." .. ttl_escaped) end - insert(select_expressions, "pg_current_xact_id() as _pg_transaction_id") - local primary_key_escaped = {} for i, key in ipairs(primary_key) do local primary_key_field = primary_key_fields[key] diff --git a/kong/global.lua b/kong/global.lua index 2c2449b5c64f..cdceaa7f58ef 100644 --- a/kong/global.lua +++ b/kong/global.lua @@ -68,8 +68,7 @@ end local _GLOBAL = { - phases = phase_checker.phases, - CURRENT_TRANSACTION_ID = 0, + phases = phase_checker.phases, } @@ -295,14 +294,4 @@ function _GLOBAL.init_timing() end -function _GLOBAL.get_current_transaction_id() - local rows, err = kong.db.connector:query("select pg_current_xact_id() as _pg_transaction_id") - if not rows then - return nil, "could not query postgres for current transaction id: " .. err - else - return tonumber(rows[1]._pg_transaction_id) - end -end - - return _GLOBAL diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index b22fc739086c..250d712f55b9 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -13,7 +13,8 @@ local concurrency = require "kong.concurrency" local lrucache = require "resty.lrucache" local ktls = require "resty.kong.tls" local request_id = require "kong.tracing.request_id" -local global = require "kong.global" + + local PluginsIterator = require "kong.runloop.plugins_iterator" @@ -747,8 +748,6 @@ do wasm.set_state(wasm_state) end - global.CURRENT_TRANSACTION_ID = kong_shm:get("declarative:current-transaction-id") or 0 - return true end) -- concurrency.with_coroutine_mutex @@ -766,6 +765,11 @@ do end +local function register_events() + events.register_events(reconfigure_handler) +end + + local balancer_prepare do local function sleep_once_for_balancer_init() @@ -917,7 +921,7 @@ return { return end - events.register_events(reconfigure_handler) + register_events() -- initialize balancers for active healthchecks timer_at(0, function() @@ -963,59 +967,84 @@ return { if strategy ~= "off" then local worker_state_update_frequency = kong.configuration.worker_state_update_frequency or 1 - local function rebuild_timer(premature) + local router_async_opts = { + name = "router", + timeout = 0, + on_timeout = "return_true", + } + + local function rebuild_router_timer(premature) if premature then return end - -- Before rebuiding the internal structures, retrieve the current PostgreSQL transaction ID to make it the - -- current transaction ID after the rebuild has finished. - local rebuild_transaction_id, err = global.get_current_transaction_id() - if not rebuild_transaction_id then - log(ERR, err) + -- Don't wait for the semaphore (timeout = 0) when updating via the + -- timer. + -- If the semaphore is locked, that means that the rebuild is + -- already ongoing. + local ok, err = rebuild_router(router_async_opts) + if not ok then + log(ERR, "could not rebuild router via timer: ", err) end + end - local router_update_status, err = rebuild_router({ - name = "router", - timeout = 0, - on_timeout = "return_true", - }) - if not router_update_status then - log(ERR, "could not rebuild router via timer: ", err) + local _, err = kong.timer:named_every("router-rebuild", + worker_state_update_frequency, + rebuild_router_timer) + if err then + log(ERR, "could not schedule timer to rebuild router: ", err) + end + + local plugins_iterator_async_opts = { + name = "plugins_iterator", + timeout = 0, + on_timeout = "return_true", + } + + local function rebuild_plugins_iterator_timer(premature) + if premature then + return end - local plugins_iterator_update_status, err = rebuild_plugins_iterator({ - name = "plugins_iterator", - timeout = 0, - on_timeout = "return_true", - }) - if not plugins_iterator_update_status then + local _, err = rebuild_plugins_iterator(plugins_iterator_async_opts) + if err then log(ERR, "could not rebuild plugins iterator via timer: ", err) end + end - if wasm.enabled() then - local wasm_update_status, err = rebuild_wasm_state({ - name = "wasm", - timeout = 0, - on_timeout = "return_true", - }) - if not wasm_update_status then + local _, err = kong.timer:named_every("plugins-iterator-rebuild", + worker_state_update_frequency, + rebuild_plugins_iterator_timer) + if err then + log(ERR, "could not schedule timer to rebuild plugins iterator: ", err) + end + + + if wasm.enabled() then + local wasm_async_opts = { + name = "wasm", + timeout = 0, + on_timeout = "return_true", + } + + local function rebuild_wasm_filter_chains_timer(premature) + if premature then + return + end + + local _, err = rebuild_wasm_state(wasm_async_opts) + if err then log(ERR, "could not rebuild wasm filter chains via timer: ", err) end end - if rebuild_transaction_id then - log(NOTICE, "configuration processing completed for transaction ID " .. rebuild_transaction_id) - global.CURRENT_TRANSACTION_ID = rebuild_transaction_id + local _, err = kong.timer:named_every("wasm-filter-chains-rebuild", + worker_state_update_frequency, + rebuild_wasm_filter_chains_timer) + if err then + log(ERR, "could not schedule timer to rebuild wasm filter chains: ", err) end end - - local _, err = kong.timer:named_every("rebuild", - worker_state_update_frequency, - rebuild_timer) - if err then - log(ERR, "could not schedule timer to rebuild: ", err) - end end end, }, @@ -1105,23 +1134,6 @@ return { }, access = { before = function(ctx) - -- If this is a version-conditional request, abort it if this dataplane has not processed at least the - -- specified configuration version yet. - local if_kong_transaction_id = kong.request and kong.request.get_header('x-if-kong-transaction-id') - if if_kong_transaction_id then - if_kong_transaction_id = tonumber(if_kong_transaction_id) - if if_kong_transaction_id and if_kong_transaction_id >= global.CURRENT_TRANSACTION_ID then - return kong.response.error( - 503, - "Service Unavailable", - { - ["X-Kong-Reconfiguration-Status"] = "pending", - ["Retry-After"] = tostring(kong.configuration.worker_state_update_frequency or 1), - } - ) - end - end - -- if there is a gRPC service in the context, don't re-execute the pre-access -- phase handler - it has been executed before the internal redirect if ctx.service and (ctx.service.protocol == "grpc" or diff --git a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua deleted file mode 100644 index c3c70775e3a3..000000000000 --- a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua +++ /dev/null @@ -1,143 +0,0 @@ -local helpers = require "spec.helpers" -local cjson = require "cjson" - -describe("Admin API - Reconfiguration Completion -", function() - - local WORKER_STATE_UPDATE_FREQ = 1 - - local admin_client - local proxy_client - - local function run_tests() - - local res = admin_client:post("/services", { - body = { - name = "test-service", - url = "http://example.com", - }, - headers = { ["Content-Type"] = "application/json" }, - }) - local body = assert.res_status(201, res) - local service = cjson.decode(body) - - -- We're running the route setup in `eventually` to cover for the unlikely case that reconfiguration completes - -- between adding the route and requesting the path through the proxy path. - - local next_path do - local path_suffix = 0 - function next_path() - path_suffix = path_suffix + 1 - return "/" .. tostring(path_suffix) - end - end - - local service_path - local kong_transaction_id - - assert.eventually(function() - service_path = next_path() - - res = admin_client:post("/services/" .. service.id .. "/routes", { - body = { - paths = { service_path } - }, - headers = { ["Content-Type"] = "application/json" }, - }) - assert.res_status(201, res) - kong_transaction_id = res.headers['x-kong-transaction-id'] - assert.is_string(kong_transaction_id) - - res = proxy_client:get(service_path, - { - headers = { - ["X-If-Kong-Transaction-Id"] = kong_transaction_id - } - }) - assert.res_status(503, res) - assert.equals("pending", res.headers['x-kong-reconfiguration-status']) - local retry_after = tonumber(res.headers['retry-after']) - ngx.sleep(retry_after) - end) - .has_no_error() - - assert.eventually(function() - res = proxy_client:get(service_path, - { - headers = { - ["X-If-Kong-Transaction-Id"] = kong_transaction_id - } - }) - assert.res_status(200, res) - end) - .has_no_error() - end - - describe("#traditional mode -", function() - lazy_setup(function() - helpers.get_db_utils() - assert(helpers.start_kong({ - worker_consistency = "eventual", - worker_state_update_frequency = WORKER_STATE_UPDATE_FREQ, - })) - admin_client = helpers.admin_client() - proxy_client = helpers.proxy_client() - end) - - teardown(function() - if admin_client then - admin_client:close() - end - if proxy_client then - proxy_client:close() - end - helpers.stop_kong() - end) - - it("rejects proxy requests if worker state has not been updated yet", run_tests) - end) - - describe("#hybrid mode -", function() - lazy_setup(function() - helpers.get_db_utils() - - assert(helpers.start_kong({ - role = "control_plane", - database = "postgres", - prefix = "cp", - cluster_cert = "spec/fixtures/kong_clustering.crt", - cluster_cert_key = "spec/fixtures/kong_clustering.key", - lua_ssl_trusted_certificate = "spec/fixtures/kong_clustering.crt", - cluster_listen = "127.0.0.1:9005", - cluster_telemetry_listen = "127.0.0.1:9006", - nginx_conf = "spec/fixtures/custom_nginx.template", - })) - - assert(helpers.start_kong({ - role = "data_plane", - database = "off", - prefix = "dp", - cluster_cert = "spec/fixtures/kong_clustering.crt", - cluster_cert_key = "spec/fixtures/kong_clustering.key", - lua_ssl_trusted_certificate = "spec/fixtures/kong_clustering.crt", - cluster_control_plane = "127.0.0.1:9005", - cluster_telemetry_endpoint = "127.0.0.1:9006", - proxy_listen = "0.0.0.0:9002", - })) - admin_client = helpers.admin_client() - proxy_client = helpers.proxy_client("127.0.0.1", 9002) - end) - - teardown(function() - if admin_client then - admin_client:close() - end - if proxy_client then - proxy_client:close() - end - helpers.stop_kong("dp") - helpers.stop_kong("cp") - end) - - it("rejects proxy requests if worker state has not been updated yet", run_tests) - end) -end) From 5d2c51100de727a582d17f20bbdeae9c2e710b9d Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 7 Nov 2023 11:10:28 +0800 Subject: [PATCH 068/249] refactor(pdk): serialize log msg with string.buffer (#11811) Use string.buffer to optimize string operation. Here I simply replace table.insert and table.concat, but not sure the serializers[n]'s effect, so just keep them. --- kong/pdk/log.lua | 52 +++++++++++++++++++++++++----------------------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/kong/pdk/log.lua b/kong/pdk/log.lua index a0914e525421..e1cf4892cd8d 100644 --- a/kong/pdk/log.lua +++ b/kong/pdk/log.lua @@ -10,6 +10,7 @@ -- @module kong.log +local buffer = require "string.buffer" local errlog = require "ngx.errlog" local ngx_re = require "ngx.re" local inspect = require "inspect" @@ -137,34 +138,34 @@ end local serializers = { - [1] = function(buf, to_string, ...) - buf[1] = to_string((select(1, ...))) + [1] = function(buf, sep, to_string, ...) + buf:put(to_string((select(1, ...)))) end, - [2] = function(buf, to_string, ...) - buf[1] = to_string((select(1, ...))) - buf[2] = to_string((select(2, ...))) + [2] = function(buf, sep, to_string, ...) + buf:put(to_string((select(1, ...)))):put(sep) + :put(to_string((select(2, ...)))) end, - [3] = function(buf, to_string, ...) - buf[1] = to_string((select(1, ...))) - buf[2] = to_string((select(2, ...))) - buf[3] = to_string((select(3, ...))) + [3] = function(buf, sep, to_string, ...) + buf:put(to_string((select(1, ...)))):put(sep) + :put(to_string((select(2, ...)))):put(sep) + :put(to_string((select(3, ...)))) end, - [4] = function(buf, to_string, ...) - buf[1] = to_string((select(1, ...))) - buf[2] = to_string((select(2, ...))) - buf[3] = to_string((select(3, ...))) - buf[4] = to_string((select(4, ...))) + [4] = function(buf, sep, to_string, ...) + buf:put(to_string((select(1, ...)))):put(sep) + :put(to_string((select(2, ...)))):put(sep) + :put(to_string((select(3, ...)))):put(sep) + :put(to_string((select(4, ...)))) end, - [5] = function(buf, to_string, ...) - buf[1] = to_string((select(1, ...))) - buf[2] = to_string((select(2, ...))) - buf[3] = to_string((select(3, ...))) - buf[4] = to_string((select(4, ...))) - buf[5] = to_string((select(5, ...))) + [5] = function(buf, sep, to_string, ...) + buf:put(to_string((select(1, ...)))):put(sep) + :put(to_string((select(2, ...)))):put(sep) + :put(to_string((select(3, ...)))):put(sep) + :put(to_string((select(4, ...)))):put(sep) + :put(to_string((select(5, ...)))) end, } @@ -282,7 +283,7 @@ local function gen_log_func(lvl_const, imm_buf, to_string, stack_level, sep) to_string = to_string or tostring stack_level = stack_level or 2 - local variadic_buf = {} + local variadic_buf = buffer.new() return function(...) local sys_log_level = nil @@ -320,15 +321,16 @@ local function gen_log_func(lvl_const, imm_buf, to_string, stack_level, sep) end if serializers[n] then - serializers[n](variadic_buf, to_string, ...) + serializers[n](variadic_buf, sep or "" , to_string, ...) else - for i = 1, n do - variadic_buf[i] = to_string((select(i, ...))) + for i = 1, n - 1 do + variadic_buf:put(to_string((select(i, ...)))):put(sep or "") end + variadic_buf:put(to_string((select(n, ...)))) end - local msg = concat(variadic_buf, sep, 1, n) + local msg = variadic_buf:get() for i = 1, imm_buf.n_messages do imm_buf[imm_buf.message_idxs[i]] = msg From 3a0a1f9436e88393117090f079159284b034cbb6 Mon Sep 17 00:00:00 2001 From: Samuele Date: Tue, 7 Nov 2023 09:39:07 +0100 Subject: [PATCH 069/249] fix(rate-limiting): counters accuracy with redis policy & sync_rate (#11859) * fix(rate-limiting): redis async updates When the periodic sync to redis feature is turned on, using the `sync_rate` configuration option, keys are incremented by steps of 2 instead of 1 for requests that arrive after the `sync_rate` interval has expired. This happens because after each sync, the key is loaded again from redis and also incremented atomically (see: https://github.com/Kong/kong/pull/10559) however the next call to `increment` also adds 1 to its value, so the key is incremented by 2 every time it's loaded from redis. This fix sets a negative delta for the key when `conf.sync_rate ~= SYNC_RATE_REALTIME` and the key was loaded from redis in order to invalidate the next call to `increment`. Includes a small code refactor --- .../rate-limiting-fix-redis-sync-rate.yml | 3 + kong/plugins/rate-limiting/policies/init.lua | 11 +-- .../23-rate-limiting/02-policies_spec.lua | 90 ++++++++++--------- 3 files changed, 57 insertions(+), 47 deletions(-) create mode 100644 changelog/unreleased/kong/rate-limiting-fix-redis-sync-rate.yml diff --git a/changelog/unreleased/kong/rate-limiting-fix-redis-sync-rate.yml b/changelog/unreleased/kong/rate-limiting-fix-redis-sync-rate.yml new file mode 100644 index 000000000000..959e7263dc6b --- /dev/null +++ b/changelog/unreleased/kong/rate-limiting-fix-redis-sync-rate.yml @@ -0,0 +1,3 @@ +message: "**Rate Limiting**: fix to provide better accuracy in counters when sync_rate is used with the redis policy." +type: bugfix +scope: Plugin diff --git a/kong/plugins/rate-limiting/policies/init.lua b/kong/plugins/rate-limiting/policies/init.lua index 12f9f32983e8..f20a2ea5b4d4 100644 --- a/kong/plugins/rate-limiting/policies/init.lua +++ b/kong/plugins/rate-limiting/policies/init.lua @@ -206,14 +206,9 @@ local function update_local_counters(conf, periods, limits, identifier, value) if limits[period] then local cache_key = get_local_key(conf, identifier, period, period_date) - if cur_delta[cache_key] then - cur_delta[cache_key] = cur_delta[cache_key] + value - else - cur_delta[cache_key] = value - end + cur_delta[cache_key] = (cur_delta[cache_key] or 0) + value end end - end return { @@ -346,7 +341,9 @@ return { if conf.sync_rate ~= SYNC_RATE_REALTIME then cur_usage[cache_key] = current_metric or 0 cur_usage_expire_at[cache_key] = periods[period] + EXPIRATION[period] - cur_delta[cache_key] = 0 + -- The key was just read from Redis using `incr`, which incremented it + -- by 1. Adjust the value to account for the prior increment. + cur_delta[cache_key] = -1 end return current_metric or 0 diff --git a/spec/03-plugins/23-rate-limiting/02-policies_spec.lua b/spec/03-plugins/23-rate-limiting/02-policies_spec.lua index 7ce052080e18..6ee5ef674e71 100644 --- a/spec/03-plugins/23-rate-limiting/02-policies_spec.lua +++ b/spec/03-plugins/23-rate-limiting/02-policies_spec.lua @@ -176,53 +176,63 @@ describe("Plugin: rate-limiting (policies)", function() end) end - for _, sync_rate in ipairs{1, SYNC_RATE_REALTIME} do - describe("redis with sync rate: " .. sync_rate, function() - local identifier = uuid() - local conf = { - route_id = uuid(), - service_id = uuid(), - redis_host = helpers.redis_host, - redis_port = helpers.redis_port, - redis_database = 0, - sync_rate = sync_rate, - } - - before_each(function() - local red = require "resty.redis" - local redis = assert(red:new()) - redis:set_timeout(1000) - assert(redis:connect(conf.redis_host, conf.redis_port)) - redis:flushall() - redis:close() - end) - - it("increase & usage", function() - --[[ - Just a simple test: - - increase 1 - - check usage == 1 - - increase 1 - - check usage == 2 - - increase 1 (beyond the limit) - - check usage == 3 - --]] - - local current_timestamp = 1424217600 - local periods = timestamp.get_timestamps(current_timestamp) + for _, sync_rate in ipairs{0.5, SYNC_RATE_REALTIME} do + local current_timestamp = 1424217600 + local periods = timestamp.get_timestamps(current_timestamp) + + for period in pairs(periods) do + describe("redis with sync rate: " .. sync_rate .. " period: " .. period, function() + local identifier = uuid() + local conf = { + route_id = uuid(), + service_id = uuid(), + redis_host = helpers.redis_host, + redis_port = helpers.redis_port, + redis_database = 0, + sync_rate = sync_rate, + } - for period in pairs(periods) do + before_each(function() + local red = require "resty.redis" + local redis = assert(red:new()) + redis:set_timeout(1000) + assert(redis:connect(conf.redis_host, conf.redis_port)) + redis:flushall() + redis:close() + end) + + it("increase & usage", function() + --[[ + Just a simple test: + - increase 1 + - check usage == 1 + - increase 1 + - check usage == 2 + - increase 1 (beyond the limit) + - check usage == 3 + --]] local metric = assert(policies.redis.usage(conf, identifier, period, current_timestamp)) assert.equal(0, metric) for i = 1, 3 do - assert(policies.redis.increment(conf, { [period] = 2 }, identifier, current_timestamp, 1)) - metric = assert(policies.redis.usage(conf, identifier, period, current_timestamp)) - assert.equal(i, metric) + -- "second" keys expire too soon to check the async increment. + -- Let's verify all the other scenarios: + if not (period == "second" and sync_rate ~= SYNC_RATE_REALTIME) then + assert(policies.redis.increment(conf, { [period] = 2 }, identifier, current_timestamp, 1)) + + -- give time to the async increment to happen + if sync_rate ~= SYNC_RATE_REALTIME then + local sleep_time = 1 + (sync_rate > 0 and sync_rate or 0) + ngx.sleep(sleep_time) + end + + metric = assert(policies.redis.usage(conf, identifier, period, current_timestamp)) + assert.equal(i, metric) + end end - end + end) end) - end) + end end end) From 349d36edf5f8cd01cb33baebe03d486dc526627f Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Mon, 23 Oct 2023 10:43:45 -0300 Subject: [PATCH 070/249] refactor(pluginserver): reset instance triggers invalidation Consistently trigger invalidation events. --- kong/runloop/plugin_servers/init.lua | 25 +++++++++++++++++-------- kong/runloop/plugin_servers/pb_rpc.lua | 23 ++++++++++++----------- 2 files changed, 29 insertions(+), 19 deletions(-) diff --git a/kong/runloop/plugin_servers/init.lua b/kong/runloop/plugin_servers/init.lua index 429657384bc3..cc4830cd3524 100644 --- a/kong/runloop/plugin_servers/init.lua +++ b/kong/runloop/plugin_servers/init.lua @@ -158,6 +158,7 @@ local exposed_api = { local get_instance_id local reset_instance +local reset_instances_for_plugin local protocol_implementations = { ["MsgPack:1"] = "kong.runloop.plugin_servers.mp_rpc", @@ -204,6 +205,7 @@ function get_instance_id(plugin_name, conf) -- to prevent a potential dead loop when someone failed to release the ID wait_count = wait_count + 1 if wait_count > MAX_WAIT_STEPS then + running_instances[key] = nil return nil, "Could not claim instance_id for " .. plugin_name .. " (key: " .. key .. ")" end instance_info = running_instances[key] @@ -243,6 +245,7 @@ function get_instance_id(plugin_name, conf) end instance_info.id = new_instance_info.id + instance_info.plugin_name = plugin_name instance_info.conf = new_instance_info.conf instance_info.seq = new_instance_info.seq instance_info.Config = new_instance_info.Config @@ -257,11 +260,16 @@ function get_instance_id(plugin_name, conf) return instance_info.id end +function reset_instances_for_plugin(plugin_name) + for k, instance in pairs(running_instances) do + if instance.plugin_name == plugin_name then + running_instances[k] = nil + end + end +end + --- reset_instance: removes an instance from the table. function reset_instance(plugin_name, conf) - local key = type(conf) == "table" and kong.plugin.get_id() or plugin_name - local current_instance = running_instances[key] - -- -- the same plugin (which acts as a plugin server) is shared among -- instances of the plugin; for example, the same plugin can be applied @@ -269,10 +277,11 @@ function reset_instance(plugin_name, conf) -- `reset_instance` is called when (but not only) the plugin server died; -- in such case, all associated instances must be removed, not only the current -- - for k, instance in pairs(running_instances) do - if instance.rpc == current_instance.rpc then - running_instances[k] = nil - end + reset_instances_for_plugin(plugin_name) + + local ok, err = kong.worker_events.post("plugin_server", "reset_instances", { plugin_name = plugin_name }) + if not ok then + kong.log.err("failed to post plugin_server reset_instances event: ", err) end end @@ -390,7 +399,7 @@ function plugin_servers.start() -- in case plugin server restarts, all workers need to update their defs kong.worker_events.register(function (data) - reset_instance(data.plugin_name, data.conf) + reset_instances_for_plugin(data.plugin_name) end, "plugin_server", "reset_instances") end diff --git a/kong/runloop/plugin_servers/pb_rpc.lua b/kong/runloop/plugin_servers/pb_rpc.lua index aa170ccbd1b2..c93fe9a23813 100644 --- a/kong/runloop/plugin_servers/pb_rpc.lua +++ b/kong/runloop/plugin_servers/pb_rpc.lua @@ -399,19 +399,20 @@ function Rpc:handle_event(plugin_name, conf, phase) end if not res or res == "" then - local ok, err2 = kong.worker_events.post("plugin_server", "reset_instances", - { plugin_name = plugin_name, conf = conf }) - if not ok then - kong.log.err("failed to post plugin_server reset_instances event: ", err2) - end + if err then + local err_lowered = err and err:lower() or "" + + kong.log.err(err_lowered) - local err_lowered = err and err:lower() or "" - if str_find(err_lowered, "no plugin instance") - or str_find(err_lowered, "closed") then - kong.log.warn(err) - return self:handle_event(plugin_name, conf, phase) + if err_lowered == "not ready" then + self.reset_instance(plugin_name, conf) + end + if str_find(err_lowered, "no plugin instance") + or str_find(err_lowered, "closed") then + self.reset_instance(plugin_name, conf) + return self:handle_event(plugin_name, conf, phase) + end end - kong.log.err(err) end end From 9ca82ddb46f5b766c9df1982444697f0b3c0b496 Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Mon, 23 Oct 2023 10:44:31 -0300 Subject: [PATCH 071/249] fix(plugin-servers): harden seq number generation Also, `get_instance_id` uses plugin cache key to fetch instance id. --- kong/runloop/plugin_servers/init.lua | 4 +--- kong/runloop/plugin_servers/pb_rpc.lua | 3 +++ kong/runloop/plugins_iterator.lua | 10 +++++++--- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/kong/runloop/plugin_servers/init.lua b/kong/runloop/plugin_servers/init.lua index cc4830cd3524..c78913f4cf8b 100644 --- a/kong/runloop/plugin_servers/init.lua +++ b/kong/runloop/plugin_servers/init.lua @@ -213,7 +213,7 @@ function get_instance_id(plugin_name, conf) if instance_info and instance_info.id - and instance_info.seq == conf.__seq__ + and instance_info.conf and instance_info.conf.__key__ == key then -- exact match, return it return instance_info.id @@ -224,7 +224,6 @@ function get_instance_id(plugin_name, conf) -- we're the first, put something to claim instance_info = { conf = conf, - seq = conf.__seq__, } running_instances[key] = instance_info else @@ -247,7 +246,6 @@ function get_instance_id(plugin_name, conf) instance_info.id = new_instance_info.id instance_info.plugin_name = plugin_name instance_info.conf = new_instance_info.conf - instance_info.seq = new_instance_info.seq instance_info.Config = new_instance_info.Config instance_info.rpc = new_instance_info.rpc diff --git a/kong/runloop/plugin_servers/pb_rpc.lua b/kong/runloop/plugin_servers/pb_rpc.lua index c93fe9a23813..dc2d15393e21 100644 --- a/kong/runloop/plugin_servers/pb_rpc.lua +++ b/kong/runloop/plugin_servers/pb_rpc.lua @@ -371,6 +371,9 @@ function Rpc:call_start_instance(plugin_name, conf) return nil, err end + kong.log.debug("started plugin server: seq ", conf.__seq__, ", worker ", ngx.worker.id(), ", instance id ", + status.instance_status.instance_id) + return { id = status.instance_status.instance_id, conf = conf, diff --git a/kong/runloop/plugins_iterator.lua b/kong/runloop/plugins_iterator.lua index a2caffa4f0f4..515d14a947eb 100644 --- a/kong/runloop/plugins_iterator.lua +++ b/kong/runloop/plugins_iterator.lua @@ -61,7 +61,6 @@ do end -local NEXT_SEQ = 0 local PLUGINS_NS = "plugins." .. subsystem local ENABLED_PLUGINS local LOADED_PLUGINS @@ -170,8 +169,13 @@ local function get_plugin_config(plugin, name, ws_id) -- TODO: deprecate usage of __key__ as id of plugin if not cfg.__key__ then cfg.__key__ = key - cfg.__seq__ = NEXT_SEQ - NEXT_SEQ = NEXT_SEQ + 1 + -- generate a unique sequence across workers + -- with a seq 0, plugin server generates an unused random instance id + local next_seq, err = ngx.shared.kong:incr("plugins_iterator:__seq__", 1, 0, 0) + if err then + next_seq = 0 + end + cfg.__seq__ = next_seq end return cfg From d573911c141eb655cd80ae4857b1101ad2d83bf8 Mon Sep 17 00:00:00 2001 From: Michael Martin Date: Tue, 7 Nov 2023 03:17:32 -0600 Subject: [PATCH 072/249] fix(conf_loader): adjust Wasm shm_kv nginx.conf prefix (#11919) --- changelog/unreleased/kong/wasm-injected-shm-kv.yml | 6 ++++++ kong.conf.default | 2 +- kong/conf_loader/init.lua | 4 ++-- kong/templates/nginx.lua | 2 +- spec/01-unit/04-prefix_handler_spec.lua | 4 ++-- spec/fixtures/custom_nginx.template | 2 +- 6 files changed, 13 insertions(+), 7 deletions(-) create mode 100644 changelog/unreleased/kong/wasm-injected-shm-kv.yml diff --git a/changelog/unreleased/kong/wasm-injected-shm-kv.yml b/changelog/unreleased/kong/wasm-injected-shm-kv.yml new file mode 100644 index 000000000000..0a5c72dfc6f9 --- /dev/null +++ b/changelog/unreleased/kong/wasm-injected-shm-kv.yml @@ -0,0 +1,6 @@ +message: > + **BREAKING:** To avoid ambiguity with other Wasm-related nginx.conf directives, + the prefix for Wasm `shm_kv` nginx.conf directives was changed from + `nginx_wasm_shm_` to `nginx_wasm_shm_kv_` +type: breaking_change +scope: Core diff --git a/kong.conf.default b/kong.conf.default index 7d699c4ce1e9..4b673ba0c773 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -2054,7 +2054,7 @@ # The following namespaces are supported: # # - `nginx_wasm_`: Injects `` into the `wasm {}` block. -# - `nginx_wasm_shm_`: Injects `shm_kv ` into the `wasm {}` block, +# - `nginx_wasm_shm_kv_`: Injects `shm_kv ` into the `wasm {}` block, # allowing operators to define custom shared memory zones which are usable by # the `get_shared_data`/`set_shared_data` Proxy-Wasm SDK functions. # - `nginx_wasm_wasmtime_`: Injects `flag ` into the `wasmtime {}` diff --git a/kong/conf_loader/init.lua b/kong/conf_loader/init.lua index 69a92c3d4af2..9b04ed7a9fe2 100644 --- a/kong/conf_loader/init.lua +++ b/kong/conf_loader/init.lua @@ -249,8 +249,8 @@ local DYNAMIC_KEY_NAMESPACES = { ignore = EMPTY, }, { - injected_conf_name = "nginx_wasm_main_shm_directives", - prefix = "nginx_wasm_shm_", + injected_conf_name = "nginx_wasm_main_shm_kv_directives", + prefix = "nginx_wasm_shm_kv_", ignore = EMPTY, }, { diff --git a/kong/templates/nginx.lua b/kong/templates/nginx.lua index d3552a9287d5..d6d01f03b2d9 100644 --- a/kong/templates/nginx.lua +++ b/kong/templates/nginx.lua @@ -22,7 +22,7 @@ events { > if wasm then wasm { -> for _, el in ipairs(nginx_wasm_main_shm_directives) do +> for _, el in ipairs(nginx_wasm_main_shm_kv_directives) do shm_kv $(el.name) $(el.value); > end diff --git a/spec/01-unit/04-prefix_handler_spec.lua b/spec/01-unit/04-prefix_handler_spec.lua index 27b109fba1a0..0337917237a4 100644 --- a/spec/01-unit/04-prefix_handler_spec.lua +++ b/spec/01-unit/04-prefix_handler_spec.lua @@ -847,12 +847,12 @@ describe("NGINX conf compiler", function() assert.matches("wasm {.+socket_connect_timeout 10s;.+}", ngx_cfg({ wasm = true, nginx_wasm_socket_connect_timeout="10s" }, debug)) end) it("injects a shm_kv", function() - assert.matches("wasm {.+shm_kv counters 10m;.+}", ngx_cfg({ wasm = true, nginx_wasm_shm_counters="10m" }, debug)) + assert.matches("wasm {.+shm_kv counters 10m;.+}", ngx_cfg({ wasm = true, nginx_wasm_shm_kv_counters="10m" }, debug)) end) it("injects multiple shm_kvs", function() assert.matches( "wasm {.+shm_kv cache 10m.+shm_kv counters 10m;.+}", - ngx_cfg({ wasm = true, nginx_wasm_shm_cache="10m", nginx_wasm_shm_counters="10m"}, debug) + ngx_cfg({ wasm = true, nginx_wasm_shm_kv_cache="10m", nginx_wasm_shm_kv_counters="10m"}, debug) ) end) it("injects default configurations if wasm=on", function() diff --git a/spec/fixtures/custom_nginx.template b/spec/fixtures/custom_nginx.template index b5df446a7fed..abee4616d9bb 100644 --- a/spec/fixtures/custom_nginx.template +++ b/spec/fixtures/custom_nginx.template @@ -27,7 +27,7 @@ events { > if wasm then wasm { -> for _, el in ipairs(nginx_wasm_main_shm_directives) do +> for _, el in ipairs(nginx_wasm_main_shm_kv_directives) do shm_kv $(el.name) $(el.value); > end From 201b0a9858f4f185f7855ebf7900c52284e00138 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Mon, 6 Nov 2023 17:13:02 +0200 Subject: [PATCH 073/249] fix(db): pg store connection called without self ### Summary The PR https://github.com/Kong/kong/pull/11480 introduced a bug that calls `store_connection` without passing `self`. This fixes that. Signed-off-by: Aapo Talvensaari --- kong/db/strategies/postgres/connector.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kong/db/strategies/postgres/connector.lua b/kong/db/strategies/postgres/connector.lua index fd5e9259066a..703a91bb889e 100644 --- a/kong/db/strategies/postgres/connector.lua +++ b/kong/db/strategies/postgres/connector.lua @@ -564,7 +564,7 @@ function _mt:query(sql, operation) -- we cannot cleanup the connection ngx.log(ngx.ERR, "failed to disconnect: ", err) end - self.store_connection(nil, operation) + self:store_connection(nil, operation) elseif is_new_conn then local keepalive_timeout = self:get_keepalive_timeout(operation) From ab111ee4674a27b7946db005915cc7b023c17c18 Mon Sep 17 00:00:00 2001 From: lena-larionova <54370747+lena-larionova@users.noreply.github.com> Date: Mon, 6 Nov 2023 10:14:12 -0800 Subject: [PATCH 074/249] fix(acl-plugin): Move schema descriptions into the right field --- kong/plugins/acl/schema.lua | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/kong/plugins/acl/schema.lua b/kong/plugins/acl/schema.lua index c8fd776ca509..df0afc638edf 100644 --- a/kong/plugins/acl/schema.lua +++ b/kong/plugins/acl/schema.lua @@ -9,8 +9,12 @@ return { { config = { type = "record", fields = { - { allow = { type = "array", elements = { type = "string", description = "Arbitrary group names that are allowed to consume the service or route. One of `config.allow` or `config.deny` must be specified." }, }, }, - { deny = { type = "array", elements = { type = "string", description = "Arbitrary group names that are not allowed to consume the service or route. One of `config.allow` or `config.deny` must be specified." }, }, }, + { allow = { description = "Arbitrary group names that are allowed to consume the service or route. One of `config.allow` or `config.deny` must be specified.", + type = "array", + elements = { type = "string" }, }, }, + { deny = { description = "Arbitrary group names that are not allowed to consume the service or route. One of `config.allow` or `config.deny` must be specified.", + type = "array", + elements = { type = "string" }, }, }, { hide_groups_header = { type = "boolean", required = true, default = false, description = "If enabled (`true`), prevents the `X-Consumer-Groups` header from being sent in the request to the upstream service." }, }, }, } From e5fb023dc1de77e29ed8be4304bfd9b08f1cda92 Mon Sep 17 00:00:00 2001 From: lena-larionova <54370747+lena-larionova@users.noreply.github.com> Date: Fri, 3 Nov 2023 13:19:26 -0700 Subject: [PATCH 075/249] fix(opentelemetry): add missing descriptions to schema --- kong/plugins/opentelemetry/schema.lua | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/kong/plugins/opentelemetry/schema.lua b/kong/plugins/opentelemetry/schema.lua index e499a20ea7df..a04bedfed920 100644 --- a/kong/plugins/opentelemetry/schema.lua +++ b/kong/plugins/opentelemetry/schema.lua @@ -22,6 +22,7 @@ end local resource_attributes = Schema.define { type = "map", + description = "The attributes specified on this property are added to the OpenTelemetry resource object. Kong follows the OpenTelemetry specification for Semantic Attributes. \nThe following attributes are automatically added to the resource object: \n- `service.name`: The name of the service. This is kong by default. \n- `service.version`: The version of Kong Gateway. \n- service.instance.id: The node id of Kong Gateway. \n\nThe default values for the above attributes can be overridden by specifying them in this property. For example, to override the default value of `service.name` to `my-service`, you can specify `{ \"service.name\": \"my-service\" }`.", keys = { type = "string", required = true }, -- TODO: support [string, number, boolean] values = { type = "string", required = true }, @@ -36,7 +37,8 @@ return { type = "record", fields = { { endpoint = typedefs.url { required = true, referenceable = true } }, -- OTLP/HTTP - { headers = { description = "The custom headers to be added in the HTTP request sent to the OTLP server. This setting is useful for adding the authentication headers (token) for the APM backend.", type = "map", + { headers = { description = "The custom headers to be added in the HTTP request sent to the OTLP server. This setting is useful for adding the authentication headers (token) for the APM backend.", + type = "map", keys = typedefs.header_name, values = { type = "string", @@ -50,9 +52,14 @@ return { { connect_timeout = typedefs.timeout { default = 1000 } }, { send_timeout = typedefs.timeout { default = 5000 } }, { read_timeout = typedefs.timeout { default = 5000 } }, - { http_response_header_for_traceid = { type = "string", default = nil }}, - { header_type = { type = "string", required = false, default = "preserve", - one_of = { "preserve", "ignore", "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "gcp" } } }, + { http_response_header_for_traceid = { description = "Specifies a custom header for the `trace_id`. If set, the plugin sets the corresponding header in the response.", + type = "string", + default = nil }}, + { header_type = { description = "All HTTP requests going through the plugin are tagged with a tracing HTTP request. This property codifies what kind of tracing header the plugin expects on incoming requests.", + type = "string", + required = false, + default = "preserve", + one_of = { "preserve", "ignore", "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "gcp" } } }, }, entity_checks = { { custom_entity_check = { From c74cbc72963c9d6ca9916c8f47b617901d8b22de Mon Sep 17 00:00:00 2001 From: lena-larionova <54370747+lena-larionova@users.noreply.github.com> Date: Fri, 3 Nov 2023 13:39:37 -0700 Subject: [PATCH 076/249] fix(otel): shorten description for resource_attributes --- kong/plugins/opentelemetry/schema.lua | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/kong/plugins/opentelemetry/schema.lua b/kong/plugins/opentelemetry/schema.lua index a04bedfed920..afeae44008be 100644 --- a/kong/plugins/opentelemetry/schema.lua +++ b/kong/plugins/opentelemetry/schema.lua @@ -22,7 +22,7 @@ end local resource_attributes = Schema.define { type = "map", - description = "The attributes specified on this property are added to the OpenTelemetry resource object. Kong follows the OpenTelemetry specification for Semantic Attributes. \nThe following attributes are automatically added to the resource object: \n- `service.name`: The name of the service. This is kong by default. \n- `service.version`: The version of Kong Gateway. \n- service.instance.id: The node id of Kong Gateway. \n\nThe default values for the above attributes can be overridden by specifying them in this property. For example, to override the default value of `service.name` to `my-service`, you can specify `{ \"service.name\": \"my-service\" }`.", + description = "Attributes to add to the OpenTelemetry resource object, following the spec for Semantic Attributes. \nThe following attributes are automatically added:\n- `service.name`: The name of the service (default: `kong`).\n- `service.version`: The version of Kong Gateway.\n- `service.instance.id`: The node ID of Kong Gateway.\n\nYou can use this property to override default attribute values. For example, to override the default for `service.name`, you can specify `{ \"service.name\": \"my-service\" }`.", keys = { type = "string", required = true }, -- TODO: support [string, number, boolean] values = { type = "string", required = true }, @@ -37,8 +37,7 @@ return { type = "record", fields = { { endpoint = typedefs.url { required = true, referenceable = true } }, -- OTLP/HTTP - { headers = { description = "The custom headers to be added in the HTTP request sent to the OTLP server. This setting is useful for adding the authentication headers (token) for the APM backend.", - type = "map", + { headers = { description = "The custom headers to be added in the HTTP request sent to the OTLP server. This setting is useful for adding the authentication headers (token) for the APM backend.", type = "map", keys = typedefs.header_name, values = { type = "string", From ae5d5ea87f608bf80b1efe7a1aa3a2062fd6b873 Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 7 Nov 2023 20:59:28 +0800 Subject: [PATCH 077/249] refactor(pdk): output content with string.buffer (#11937) --- kong/pdk/service/request.lua | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/kong/pdk/service/request.lua b/kong/pdk/service/request.lua index 86a2ce7cf06a..7210877f45d6 100644 --- a/kong/pdk/service/request.lua +++ b/kong/pdk/service/request.lua @@ -3,6 +3,7 @@ -- @module kong.service.request local cjson = require "cjson.safe" +local buffer = require "string.buffer" local checks = require "kong.pdk.private.checks" local phase_checker = require "kong.pdk.private.phases" @@ -541,26 +542,23 @@ local function new(self) table_sort(keys) - local out = {} - local i = 1 + local out = buffer.new() for _, k in ipairs(keys) do - out[i] = "--" - out[i + 1] = boundary - out[i + 2] = "\r\n" - out[i + 3] = 'Content-Disposition: form-data; name="' - out[i + 4] = k - out[i + 5] = '"\r\n\r\n' - local v = args[k] - out[i + 6] = v - out[i + 7] = "\r\n" - i = i + 8 + out:put("--") + :put(boundary) + :put("\r\n") + :put('Content-Disposition: form-data; name="') + :put(k) + :put('"\r\n\r\n') + :put(args[k]) + :put("\r\n") end - out[i] = "--" - out[i + 1] = boundary - out[i + 2] = "--\r\n" + out:put("--") + :put(boundary) + :put("--\r\n") - local output = table.concat(out) + local output = out:get() return output, CONTENT_TYPE_FORM_DATA .. "; boundary=" .. boundary end, From f75fec1b1d585a3da0657e5bce90e09dd9d35107 Mon Sep 17 00:00:00 2001 From: lena-larionova <54370747+lena-larionova@users.noreply.github.com> Date: Tue, 7 Nov 2023 05:17:43 -0800 Subject: [PATCH 078/249] docs(kong.conf): add names of referenced caches to mem_cache_size entry (#11680) --- kong.conf.default | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/kong.conf.default b/kong.conf.default index 4b673ba0c773..c904d64a60d6 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -730,7 +730,9 @@ #mem_cache_size = 128m # Size of each of the two shared memory caches # for traditional mode database entities - # and runtime data. + # and runtime data, `kong_core_cache` and + # `kong_cache`. + # # The accepted units are `k` and `m`, with a minimum # recommended value of a few MBs. # From 70c149611bc5c819ca5b83bb4a6d6fc4c7b1a629 Mon Sep 17 00:00:00 2001 From: tzssangglass Date: Wed, 1 Nov 2023 15:10:21 +0800 Subject: [PATCH 079/249] tests(dns): add a test case to cover dns resolution in stream subsystem Signed-off-by: tzssangglass --- spec/02-integration/05-proxy/05-dns_spec.lua | 59 ++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/spec/02-integration/05-proxy/05-dns_spec.lua b/spec/02-integration/05-proxy/05-dns_spec.lua index 720f372d87c1..cb21e58ed92c 100644 --- a/spec/02-integration/05-proxy/05-dns_spec.lua +++ b/spec/02-integration/05-proxy/05-dns_spec.lua @@ -206,5 +206,64 @@ for _, strategy in helpers.each_strategy() do assert.equals(0, assert(tonumber(stdout))) end) end) + + describe("run in stream subsystem #tag", function() + local domain_name = "www.example.test" + local address = "127.0.0.1" + + local fixtures = { + dns_mock = helpers.dns_mock.new() + } + fixtures.dns_mock:A({ + name = domain_name, + address = address, + }) + + lazy_setup(function() + local bp = helpers.get_db_utils(strategy, { + "routes", + "services", + }) + + local tcp_srv = bp.services:insert({ + name = "tcp", + host = domain_name, + port = helpers.mock_upstream_stream_port, + protocol = "tcp", + }) + + bp.routes:insert { + destinations = { + { ip = "0.0.0.0/0", port = 19000 }, + }, + protocols = { + "tcp", + }, + service = tcp_srv, + } + + assert(helpers.start_kong({ + database = strategy, + nginx_conf = "spec/fixtures/custom_nginx.template", + stream_listen = helpers.get_proxy_ip(false) .. ":19000", + log_level = "info", + }, nil, nil, fixtures)) + + end) + + lazy_teardown(function() + helpers.stop_kong() + end) + + it("resolve domain name", function() + local tcp = ngx.socket.tcp() + assert(tcp:connect(helpers.get_proxy_ip(false), 19000)) + local MESSAGE = "echo, ping, pong. echo, ping, pong. echo, ping, pong.\n" + assert(tcp:send(MESSAGE)) + local body = assert(tcp:receive("*a")) + assert.equal(MESSAGE, body) + tcp:close() + end) + end) end) end From 3d35ed072b3ab921b61c89c8b9d2d649c16366c8 Mon Sep 17 00:00:00 2001 From: tzssangglass Date: Thu, 2 Nov 2023 00:26:46 +0800 Subject: [PATCH 080/249] remove tag Signed-off-by: tzssangglass --- spec/02-integration/05-proxy/05-dns_spec.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/02-integration/05-proxy/05-dns_spec.lua b/spec/02-integration/05-proxy/05-dns_spec.lua index cb21e58ed92c..d3ce2d0f266a 100644 --- a/spec/02-integration/05-proxy/05-dns_spec.lua +++ b/spec/02-integration/05-proxy/05-dns_spec.lua @@ -207,7 +207,7 @@ for _, strategy in helpers.each_strategy() do end) end) - describe("run in stream subsystem #tag", function() + describe("run in stream subsystem", function() local domain_name = "www.example.test" local address = "127.0.0.1" From f5ece68fe7ce0d69e9036f3db7b6db50f45ff827 Mon Sep 17 00:00:00 2001 From: Michael Martin Date: Fri, 3 Nov 2023 17:23:18 -0700 Subject: [PATCH 081/249] tests(wasm/clustering): configure data plane node id explicitly This makes the test less complicated and easier to debug on failure. --- .../20-wasm/06-clustering_spec.lua | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/spec/02-integration/20-wasm/06-clustering_spec.lua b/spec/02-integration/20-wasm/06-clustering_spec.lua index a139a68d1fc3..f0573a1bbf29 100644 --- a/spec/02-integration/20-wasm/06-clustering_spec.lua +++ b/spec/02-integration/20-wasm/06-clustering_spec.lua @@ -11,19 +11,8 @@ local FILTER_SRC = "spec/fixtures/proxy_wasm_filters/build/response_transformer. local json = cjson.encode local file = helpers.file -local function get_node_id(prefix) - local data = helpers.wait_for_file_contents(prefix .. "/kong.id") - data = data:gsub("%s*(.-)%s*", "%1") - assert(utils.is_valid_uuid(data), "invalid kong node ID found in " .. prefix) - return data -end - - -local function expect_status(prefix, exp) - local id = get_node_id(prefix) - local msg = "waiting for clustering sync status to equal" - .. " '" .. exp .. "' for data plane" +local function expect_status(id, exp) assert .eventually(function() local cp_client = helpers.admin_client() @@ -69,7 +58,8 @@ local function expect_status(prefix, exp) return true end) - .is_truthy(msg) + .is_truthy("waiting for clustering sync status to equal " + .. "'filter_set_incompatible' for data plane") end local function new_wasm_filter_directory() @@ -89,6 +79,9 @@ describe("#wasm - hybrid mode #postgres", function() local dp_prefix = "dp" lazy_setup(function() + helpers.clean_prefix(cp_prefix) + helpers.clean_prefix(dp_prefix) + local _, db = helpers.get_db_utils("postgres", { "services", "routes", @@ -129,9 +122,11 @@ describe("#wasm - hybrid mode #postgres", function() describe("[happy path]", function() local client local dp_filter_path + local node_id lazy_setup(function() dp_filter_path = new_wasm_filter_directory() + node_id = utils.uuid() assert(helpers.start_kong({ role = "data_plane", @@ -144,6 +139,7 @@ describe("#wasm - hybrid mode #postgres", function() nginx_conf = "spec/fixtures/custom_nginx.template", wasm = true, wasm_filters_path = dp_filter_path, + node_id = node_id, })) client = helpers.proxy_client() @@ -271,13 +267,16 @@ describe("#wasm - hybrid mode #postgres", function() end) .is_truthy("wasm filter has been removed from the data plane") - expect_status(dp_prefix, STATUS.NORMAL) + expect_status(node_id, STATUS.NORMAL) end) end) describe("data planes with wasm disabled", function() + local node_id + lazy_setup(function() helpers.clean_logfile(cp_errlog) + node_id = utils.uuid() assert(helpers.start_kong({ role = "data_plane", @@ -289,6 +288,7 @@ describe("#wasm - hybrid mode #postgres", function() admin_listen = "off", nginx_conf = "spec/fixtures/custom_nginx.template", wasm = "off", + node_id = node_id, })) end) @@ -302,16 +302,18 @@ describe("#wasm - hybrid mode #postgres", function() [[unable to send updated configuration to data plane: data plane is missing one or more wasm filters]], true, 5) - expect_status(dp_prefix, STATUS.FILTER_SET_INCOMPATIBLE) + expect_status(node_id, STATUS.FILTER_SET_INCOMPATIBLE) end) end) describe("data planes missing one or more wasm filter", function() local tmp_dir + local node_id lazy_setup(function() helpers.clean_logfile(cp_errlog) tmp_dir = helpers.make_temp_dir() + node_id = utils.uuid() assert(helpers.start_kong({ role = "data_plane", @@ -324,6 +326,7 @@ describe("#wasm - hybrid mode #postgres", function() nginx_conf = "spec/fixtures/custom_nginx.template", wasm = true, wasm_filters_path = tmp_dir, + node_id = node_id, })) end) @@ -338,7 +341,7 @@ describe("#wasm - hybrid mode #postgres", function() [[unable to send updated configuration to data plane: data plane is missing one or more wasm filters]], true, 5) - expect_status(dp_prefix, STATUS.FILTER_SET_INCOMPATIBLE) + expect_status(node_id, STATUS.FILTER_SET_INCOMPATIBLE) end) end) end) From 083ab25d26acddcc5632305d698c854d67951859 Mon Sep 17 00:00:00 2001 From: Michael Martin Date: Fri, 3 Nov 2023 17:25:06 -0700 Subject: [PATCH 082/249] fix(clustering): ensure data plane config hash is never nil The previous logic defaulted the config_hash to nil when it was detected to be an empty string. This can cause update_sync_status() to fail, because config_hash is a required attribute: > 2023/11/03 17:13:30 [debug] 4052224#0: *150 [lua] connector.lua:560: execute(): SQL query throw error: ERROR: null value in column "config_hash" of relation "clustering_data_planes" violates not-null constraint > Failing row contains (4fb29006-8db1-48bb-b68c-34b582e1d91a, soup, 127.0.0.1, 2023-11-04 00:13:30+00, null, 2023-11-18 00:13:30.799+00, 3.6.0, filter_set_incompatible, 2023-11-04 00:13:30+00, {})., close connection > 2023/11/03 17:13:30 [notice] 4052224#0: *150 [lua] init.lua:275: upsert(): ERROR: null value in column "config_hash" of relation "clustering_data_planes" violates not-null constraint This change addresses the problem from two angles: 1. when empty, config_hash is set to the default DECLARATIVE_EMPTY_CONFIG_HASH constant instead of nil 2. an additional guard was added to the dp reader thread, which checks the length of ping frame data and returns an error if it is not a proper config hash --- .../kong/clustering-empty-data-plane-hash-fix.yml | 3 +++ kong/clustering/control_plane.lua | 8 +++++++- kong/clustering/data_plane.lua | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/clustering-empty-data-plane-hash-fix.yml diff --git a/changelog/unreleased/kong/clustering-empty-data-plane-hash-fix.yml b/changelog/unreleased/kong/clustering-empty-data-plane-hash-fix.yml new file mode 100644 index 000000000000..1c405ecd53fa --- /dev/null +++ b/changelog/unreleased/kong/clustering-empty-data-plane-hash-fix.yml @@ -0,0 +1,3 @@ +message: Fix a bug causing data-plane status updates to fail when an empty PING frame is received from a data-plane +type: bugfix +scope: Clustering diff --git a/kong/clustering/control_plane.lua b/kong/clustering/control_plane.lua index a2696f9a3eb1..f4395979716b 100644 --- a/kong/clustering/control_plane.lua +++ b/kong/clustering/control_plane.lua @@ -229,7 +229,9 @@ function _M:handle_cp_websocket() local ok ok, err = kong.db.clustering_data_planes:upsert({ id = dp_id, }, { last_seen = last_seen, - config_hash = config_hash ~= "" and config_hash or nil, + config_hash = config_hash ~= "" + and config_hash + or DECLARATIVE_EMPTY_CONFIG_HASH, hostname = dp_hostname, ip = dp_ip, version = dp_version, @@ -336,6 +338,10 @@ function _M:handle_cp_websocket() if not data then return nil, "did not receive ping frame from data plane" + + elseif #data ~= 32 then + return nil, "received a ping frame from the data plane with an invalid" + .. " hash: '" .. tostring(data) .. "'" end -- dps only send pings diff --git a/kong/clustering/data_plane.lua b/kong/clustering/data_plane.lua index d0f0e1e020a9..93d7e8ef60eb 100644 --- a/kong/clustering/data_plane.lua +++ b/kong/clustering/data_plane.lua @@ -91,7 +91,7 @@ local function send_ping(c, log_suffix) local hash = declarative.get_current_hash() - if hash == true then + if hash == "" or type(hash) ~= "string" then hash = DECLARATIVE_EMPTY_CONFIG_HASH end From 6ce12628e05f4aa3e5c90ab518729fa8825191d2 Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 8 Nov 2023 14:36:39 +0800 Subject: [PATCH 083/249] fix(router): http headers value match should be case sensitive in `expressions` flavor (#11905) `traditional_compatible` flavor remains case insensitive to stay compatible with `traditional` flavor. This change allow `expressions` route authors to pick whether they want case sensitive or insensitive matches. KAG-2905 --------- Co-authored-by: Datong Sun --- .../expression_http_headers_sensitive.yml | 6 + kong/router/atc.lua | 10 +- kong/router/compat.lua | 2 +- spec/01-unit/08-router_spec.lua | 175 +++++++++++++++++- 4 files changed, 182 insertions(+), 11 deletions(-) create mode 100644 changelog/unreleased/kong/expression_http_headers_sensitive.yml diff --git a/changelog/unreleased/kong/expression_http_headers_sensitive.yml b/changelog/unreleased/kong/expression_http_headers_sensitive.yml new file mode 100644 index 000000000000..5d3bb6243275 --- /dev/null +++ b/changelog/unreleased/kong/expression_http_headers_sensitive.yml @@ -0,0 +1,6 @@ +message: | + Header value matching (`http.headers.*`) in `expressions` router flavor are now case sensitive. + This change does not affect on `traditional_compatible` mode + where header value match are always performed ignoring the case. +type: bugfix +scope: Core diff --git a/kong/router/atc.lua b/kong/router/atc.lua index 533ae5251207..17f9f48752b1 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -467,14 +467,14 @@ function _M:select(req_method, req_uri, req_host, req_scheme, local v = req_headers[h] if type(v) == "string" then - local res, err = c:add_value(field, v:lower()) + local res, err = c:add_value(field, v) if not res then return nil, err end elseif type(v) == "table" then for _, v in ipairs(v) do - local res, err = c:add_value(field, v:lower()) + local res, err = c:add_value(field, v) if not res then return nil, err end @@ -580,14 +580,8 @@ do local name = replace_dashes_lower(name) if type(value) == "table" then - for i, v in ipairs(value) do - value[i] = v:lower() - end tb_sort(value) value = tb_concat(value, ", ") - - else - value = value:lower() end str_buf:putf("|%s=%s", name, value) diff --git a/kong/router/compat.lua b/kong/router/compat.lua index 531cd8b1fa80..86864dfce514 100644 --- a/kong/router/compat.lua +++ b/kong/router/compat.lua @@ -252,7 +252,7 @@ local function get_expression(route) single_header_buf:reset():put("(") for i, value in ipairs(v) do - local name = "any(http.headers." .. replace_dashes_lower(h) .. ")" + local name = "any(lower(http.headers." .. replace_dashes_lower(h) .. "))" local op = OP_EQUAL -- value starts with "~*" diff --git a/spec/01-unit/08-router_spec.lua b/spec/01-unit/08-router_spec.lua index 114ff31fbe29..4ab4539d48ff 100644 --- a/spec/01-unit/08-router_spec.lua +++ b/spec/01-unit/08-router_spec.lua @@ -2249,7 +2249,32 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" assert(new_router(use_case)) end) end) - end + + describe("match http.headers.*", function() + local use_case + local get_expression = atc_compat.get_expression + + before_each(function() + use_case = { + { + service = service, + route = { + id = "e8fb37f1-102d-461e-9c51-6608a6bb8101", + methods = { "GET" }, + }, + }, + } + end) + + it("should always add lower()", function() + use_case[1].route.headers = { test = { "~*Quote" }, } + + assert.equal([[(http.method == r#"GET"#) && (any(lower(http.headers.test)) ~ r#"quote"#)]], + get_expression(use_case[1].route)) + assert(new_router(use_case)) + end) + end) + end -- if flavor ~= "traditional" describe("normalization stopgap measurements", function() local use_case, router @@ -4890,6 +4915,65 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible" }) do end) end +do + local flavor = "traditional_compatible" + + describe("Router (flavor = " .. flavor .. ")", function() + reload_router(flavor) + + local use_case, router + + lazy_setup(function() + use_case = { + { + service = service, + route = { + id = "e8fb37f1-102d-461e-9c51-6608a6bb8101", + paths = { + "/foo", + }, + headers = { + test1 = { "Quote" }, + }, + }, + }, + } + end) + + it("[cache hit should be case sensitive]", function() + router = assert(new_router(use_case)) + + local ctx = {} + local _ngx = mock_ngx("GET", "/foo", { test1 = "QUOTE", }) + router._set_ngx(_ngx) + + -- first match, case insensitive + local match_t = router:exec(ctx) + assert.truthy(match_t) + assert.same(use_case[1].route, match_t.route) + assert.falsy(ctx.route_match_cached) + + -- cache hit + local match_t = router:exec(ctx) + assert.truthy(match_t) + assert.same(use_case[1].route, match_t.route) + assert.same(ctx.route_match_cached, "pos") + + local ctx = {} + local _ngx = mock_ngx("GET", "/foo", { test1 = "QuoTe", }) + router._set_ngx(_ngx) + + -- case insensitive match + local match_t = router:exec(ctx) + assert.truthy(match_t) + assert.same(use_case[1].route, match_t.route) + + -- cache miss, case sensitive + assert.falsy(ctx.route_match_cached) + end) + end) +end -- local flavor = "traditional_compatible" + do local flavor = "expressions" @@ -5063,5 +5147,92 @@ do end) end) -end + + describe("Router (flavor = " .. flavor .. ") [http]", function() + reload_router(flavor) + + local use_case, router + + lazy_setup(function() + use_case = { + { + service = service, + route = { + id = "e8fb37f1-102d-461e-9c51-6608a6bb8101", + expression = [[http.path == "/foo/bar" && http.headers.test1 == "Quote"]], + priority = 100, + }, + }, + { + service = service, + route = { + id = "e8fb37f1-102d-461e-9c51-6608a6bb8102", + expression = [[http.path == "/foo/bar" && lower(http.headers.test2) == "quote"]], + priority = 100, + }, + }, + } + end) + + it("select() should match with case sensitivity", function() + router = assert(new_router(use_case)) + + local match_t = router:select("GET", "/foo/bar", nil, nil, nil, nil, nil, nil, nil, {test1 = "quote"}) + assert.falsy(match_t) + + local match_t = router:select("GET", "/foo/bar", nil, nil, nil, nil, nil, nil, nil, {test1 = "quoTe"}) + assert.falsy(match_t) + + local match_t = router:select("GET", "/foo/bar", nil, nil, nil, nil, nil, nil, nil, {test1 = "Quote"}) + assert.truthy(match_t) + assert.same(use_case[1].route, match_t.route) + end) + + it("select() should match with lower() (case insensitive)", function() + router = assert(new_router(use_case)) + + local match_t = router:select("GET", "/foo/bar", nil, nil, nil, nil, nil, nil, nil, {test2 = "QuoTe"}) + assert.truthy(match_t) + assert.same(use_case[2].route, match_t.route) + + local match_t = router:select("GET", "/foo/bar", nil, nil, nil, nil, nil, nil, nil, {test2 = "QUOTE"}) + assert.truthy(match_t) + assert.same(use_case[2].route, match_t.route) + end) + + it("exec() should hit cache with case sensitive", function() + router = assert(new_router(use_case)) + + local ctx = {} + local _ngx = mock_ngx("GET", "/foo/bar", { test1 = "Quote", }) + router._set_ngx(_ngx) + + -- first match + local match_t = router:exec(ctx) + assert.truthy(match_t) + assert.same(use_case[1].route, match_t.route) + assert.falsy(ctx.route_match_cached) + + -- cache hit pos + local match_t = router:exec(ctx) + assert.truthy(match_t) + assert.same(use_case[1].route, match_t.route) + assert.same(ctx.route_match_cached, "pos") + + local ctx = {} + local _ngx = mock_ngx("GET", "/foo/bar", { test1 = "QUOTE", }) + router._set_ngx(_ngx) + + -- case sensitive not match + local match_t = router:exec(ctx) + assert.falsy(match_t) + assert.falsy(ctx.route_match_cached) + + -- cache hit neg + local match_t = router:exec(ctx) + assert.falsy(match_t) + assert.same(ctx.route_match_cached, "neg") + end) + end) +end -- local flavor = "expressions" From 04392670a1e4b43d52aac085bbdda9f08687af8a Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Wed, 8 Nov 2023 15:16:51 +0800 Subject: [PATCH 084/249] fix(schema): validate public and private key for `keys` entity (#11923) KAG-390 --------- Co-authored-by: Datong Sun --- .../unreleased/kong/validate_private_key.yml | 3 ++ kong/db/dao/keys.lua | 10 ++++- kong/db/schema/typedefs.lua | 33 +++++++++++---- kong/plugins/acme/client.lua | 16 +------ .../01-db/01-schema/03-typedefs_spec.lua | 20 +++++++++ spec/02-integration/03-db/18-keys_spec.lua | 42 +++++++++++++++++++ 6 files changed, 100 insertions(+), 24 deletions(-) create mode 100644 changelog/unreleased/kong/validate_private_key.yml diff --git a/changelog/unreleased/kong/validate_private_key.yml b/changelog/unreleased/kong/validate_private_key.yml new file mode 100644 index 000000000000..70aa941103ff --- /dev/null +++ b/changelog/unreleased/kong/validate_private_key.yml @@ -0,0 +1,3 @@ +message: Validate private and public key for `keys` entity to ensure they match each other. +type: bugfix +scope: Core diff --git a/kong/db/dao/keys.lua b/kong/db/dao/keys.lua index 1f04fadf710c..8e14f0ac55b0 100644 --- a/kong/db/dao/keys.lua +++ b/kong/db/dao/keys.lua @@ -76,14 +76,20 @@ local function _load_pkey(key, part) pk, err = pkey.new(key.jwk, { format = "JWK" }) end if key.pem then - if not key.pem[part] then - return nil, fmt("%s key not found.", part) + -- public key can be derived from private key, but not vice versa + if part == "private_key" and not key.pem[part] then + return nil, "could not load a private key from public key material" end pk, err = pkey.new(key.pem[part], { format = "PEM" }) end if not pk then return nil, "could not load pkey. " .. err end + + if part == "private_key" and not pk:is_private() then + return nil, "could not load a private key from public key material" + end + return pk end diff --git a/kong/db/schema/typedefs.lua b/kong/db/schema/typedefs.lua index 3838b10d10ba..cd875302280d 100644 --- a/kong/db/schema/typedefs.lua +++ b/kong/db/schema/typedefs.lua @@ -654,20 +654,34 @@ local function validate_pem_keys(values) local private_key = values.private_key -- unless it's a vault reference - if kong.vault.is_reference(private_key) or - kong.vault.is_reference(public_key) then + if kong and ( + kong.vault.is_reference(private_key) or + kong.vault.is_reference(public_key)) then return true end - local pk, err = openssl_pkey.new(public_key, { format = "PEM" }) - if not pk or err then - return false, "could not load public key" + local pubkey, privkey, err + + if public_key and public_key ~= null then + pubkey, err = openssl_pkey.new(public_key, { format = "PEM", type = "pu" }) + if not pubkey or err then + return false, "could not load public key" + end end - local ppk, perr = openssl_pkey.new(private_key, { format = "PEM" }) - if not ppk or perr then - return false, "could not load private key" .. (perr or "") + if private_key and private_key ~= null then + privkey, err = openssl_pkey.new(private_key, { format = "PEM", type = "pr" }) + if not privkey or err then + return false, "could not load private key" .. (err or "") + end end + + if privkey and pubkey then + if privkey:to_PEM("public") ~= pubkey:to_PEM() then + return false, "public key does not match private key" + end + end + return true end @@ -691,6 +705,9 @@ typedefs.pem = Schema.define { }, }, }, + entity_checks = { + { at_least_one_of = { "private_key", "public_key" } } + }, custom_validator = validate_pem_keys, description = "A pair of PEM-encoded public and private keys, which can be either a string or a reference to a credential in Kong Vault. If provided as strings, they must be valid PEM-encoded keys." diff --git a/kong/plugins/acme/client.lua b/kong/plugins/acme/client.lua index cb3cb3d8749e..826f0a030502 100644 --- a/kong/plugins/acme/client.lua +++ b/kong/plugins/acme/client.lua @@ -234,13 +234,7 @@ local function get_account_key(conf) local key_set, key_set_err = kong.db.key_sets:select_by_name(conf.key_set) if key_set_err then - kong.log.warn("error loading keyset ", conf.key_set, " : ", key_set_err) - return nil, key_set_err - end - - if not key_set then - kong.log.warn("could not load keyset nil value was returned") - return nil, error("nil returned by key_sets:select_by_name for key_set ", conf.key_set) + return nil, "could not load keyset: " .. key_set_err end lookup.set = {id = key_set.id} @@ -250,13 +244,7 @@ local function get_account_key(conf) local key, key_err = kong.db.keys:select_by_cache_key(cache_key) if key_err then - kong.log.warn("error loading key ", kid, " : ", key_err) - return nil, key_err - end - - if not key then - kong.log.warn("could not load key nil value was returned") - return nil, error("nil returned by keys:select_by_cache_key for key ", conf.key_id) + return nil, "could not load keys: " .. key_err end return kong.db.keys:get_privkey(key) diff --git a/spec/01-unit/01-db/01-schema/03-typedefs_spec.lua b/spec/01-unit/01-db/01-schema/03-typedefs_spec.lua index cbd011d25597..1183e0858e04 100644 --- a/spec/01-unit/01-db/01-schema/03-typedefs_spec.lua +++ b/spec/01-unit/01-db/01-schema/03-typedefs_spec.lua @@ -203,4 +203,24 @@ describe("typedefs", function() assert.equal(false, uuid2.auto) end) + it("features pem", function() + local Test = Schema.new({ + fields = { + { f = typedefs.pem } + } + }) + local tmpkey = openssl_pkey.new { type = 'EC', curve = 'prime256v1' } + assert.truthy(Test:validate({ f = { public_key = tmpkey:to_PEM("public") }})) + assert.truthy(Test:validate({ f = { private_key = tmpkey:to_PEM("private") }})) + assert.falsy( Test:validate({ f = { private_key = tmpkey:to_PEM("public") }})) + assert.falsy(Test:validate({ f = { public_key = tmpkey:to_PEM("private") }})) + assert.truthy(Test:validate({ f = { public_key = tmpkey:to_PEM("public"), + private_key = tmpkey:to_PEM("private") }})) + local anotherkey = openssl_pkey.new { type = 'EC', curve = 'prime256v1' } + assert.falsy( Test:validate({ f = { public_key = anotherkey:to_PEM("public"), + private_key = tmpkey:to_PEM("private") }})) + assert.falsy( Test:validate({ f = { public_key = tmpkey:to_PEM("public"), + private_key = anotherkey:to_PEM("private") }})) +end) + end) diff --git a/spec/02-integration/03-db/18-keys_spec.lua b/spec/02-integration/03-db/18-keys_spec.lua index 737a25aaef56..5cac149a1e77 100644 --- a/spec/02-integration/03-db/18-keys_spec.lua +++ b/spec/02-integration/03-db/18-keys_spec.lua @@ -207,5 +207,47 @@ for _, strategy in helpers.all_strategies() do assert.is_not_nil(decoded_jwk.q) assert.is_not_nil(decoded_jwk.qi) end) + + it(":get_privkey errors if only got pubkey [pem]", function() + local pem_t, err = db.keys:insert { + name = "pem_key", + set = init_key_set, + kid = "999", + pem = { public_key = pem_pub } + } + assert.is_nil(err) + assert(pem_t) + + local pem_pub_t, g_err = db.keys:get_pubkey(pem_t) + assert.is_nil(g_err) + assert.matches("-----BEGIN PUBLIC KEY", pem_pub_t) + + local pem_priv, p_err = db.keys:get_privkey(pem_t) + assert.is_nil(pem_priv) + assert.matches("could not load a private key from public key material", p_err) + end) + + it(":get_privkey errors if only got pubkey [jwk]", function() + jwk.d = nil + local jwk_t, _ = db.keys:insert { + name = "jwk_key", + set = init_key_set, + kid = jwk.kid, + jwk = cjson.encode(jwk) + } + assert(jwk_t) + + local jwk_pub_t, g_err = db.keys:get_pubkey(jwk_t) + assert.is_nil(g_err) + local jwk_pub_o = cjson.decode(jwk_pub_t) + assert.is_not_nil(jwk_pub_o.e) + assert.is_not_nil(jwk_pub_o.kid) + assert.is_not_nil(jwk_pub_o.kty) + assert.is_not_nil(jwk_pub_o.n) + + local jwk_priv, p_err = db.keys:get_privkey(jwk_t) + assert.is_nil(jwk_priv) + assert.matches("could not load a private key from public key material", p_err) + end) end) end From 37bd9c2f94267538ecf518bc5ca8545302594290 Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 8 Nov 2023 15:27:14 +0800 Subject: [PATCH 085/249] fix(router): `http` and `stream` subsystems no longer share the expressions router schema (#11914) KAG-2961 --------- Co-authored-by: Datong Sun --- ...subsystems_do_not_share_router_schemas.yml | 6 + kong/db/schema/entities/routes.lua | 37 +---- kong/router/atc.lua | 53 ++++++- .../01-db/01-schema/06-routes_spec.lua | 129 +++++++++++++++++- 4 files changed, 186 insertions(+), 39 deletions(-) create mode 100644 changelog/unreleased/kong/subsystems_do_not_share_router_schemas.yml diff --git a/changelog/unreleased/kong/subsystems_do_not_share_router_schemas.yml b/changelog/unreleased/kong/subsystems_do_not_share_router_schemas.yml new file mode 100644 index 000000000000..07a40e62f259 --- /dev/null +++ b/changelog/unreleased/kong/subsystems_do_not_share_router_schemas.yml @@ -0,0 +1,6 @@ +message: | + Expressions route in `http` and `stream` subsystem now have stricter validation. + Previously they share the same validation schema which means admin can configure expressions + route using fields like `http.path` even for stream routes. This is no longer allowed. +type: bugfix +scope: Core diff --git a/kong/db/schema/entities/routes.lua b/kong/db/schema/entities/routes.lua index 5c98e3931b3e..0ff3943ddced 100644 --- a/kong/db/schema/entities/routes.lua +++ b/kong/db/schema/entities/routes.lua @@ -3,30 +3,22 @@ local router = require("resty.router.router") local deprecation = require("kong.deprecation") local validate_route -local has_paths do - local isempty = require("table.isempty") - local CACHED_SCHEMA = require("kong.router.atc").schema + local get_schema = require("kong.router.atc").schema local get_expression = require("kong.router.compat").get_expression - local type = type - -- works with both `traditional_compatiable` and `expressions` routes` validate_route = function(entity) + local schema = get_schema(entity.protocols) local exp = entity.expression or get_expression(entity) - local ok, err = router.validate(CACHED_SCHEMA, exp) + local ok, err = router.validate(schema, exp) if not ok then return nil, "Router Expression failed validation: " .. err end return true end - - has_paths = function(entity) - local paths = entity.paths - return type(paths) == "table" and not isempty(paths) - end end local kong_router_flavor = kong and kong.configuration and kong.configuration.router_flavor @@ -73,15 +65,8 @@ if kong_router_flavor == "expressions" then entity_checks = { { custom_entity_check = { - field_sources = { "expression", "id", }, - fn = function(entity) - local ok, err = validate_route(entity) - if not ok then - return nil, err - end - - return true - end, + field_sources = { "expression", "id", "protocols", }, + fn = validate_route, } }, }, } @@ -126,17 +111,7 @@ else table.insert(entity_checks, { custom_entity_check = { run_with_missing_fields = true, - field_sources = { "id", "paths", }, - fn = function(entity) - if has_paths(entity) then - local ok, err = validate_route(entity) - if not ok then - return nil, err - end - end - - return true - end, + fn = validate_route, }} ) end diff --git a/kong/router/atc.lua b/kong/router/atc.lua index 17f9f48752b1..df8b7c636ce3 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -55,8 +55,10 @@ local values_buf = buffer.new(64) local CACHED_SCHEMA +local HTTP_SCHEMA +local STREAM_SCHEMA do - local FIELDS = { + local HTTP_FIELDS = { ["String"] = {"net.protocol", "tls.sni", "http.method", "http.host", @@ -66,21 +68,39 @@ do }, ["Int"] = {"net.port", - "net.src.port", "net.dst.port", + }, + } + + local STREAM_FIELDS = { + + ["String"] = {"net.protocol", "tls.sni", + }, + + ["Int"] = {"net.src.port", "net.dst.port", }, ["IpAddr"] = {"net.src.ip", "net.dst.ip", }, } - CACHED_SCHEMA = schema.new() + local function generate_schema(fields) + local s = schema.new() - for typ, fields in pairs(FIELDS) do - for _, v in ipairs(fields) do - assert(CACHED_SCHEMA:add_field(v, typ)) + for t, f in pairs(fields) do + for _, v in ipairs(f) do + assert(s:add_field(v, t)) + end end + + return s end + -- used by validation + HTTP_SCHEMA = generate_schema(HTTP_FIELDS) + STREAM_SCHEMA = generate_schema(STREAM_FIELDS) + + -- used by running router + CACHED_SCHEMA = is_http and HTTP_SCHEMA or STREAM_SCHEMA end @@ -871,7 +891,26 @@ function _M._set_ngx(mock_ngx) end -_M.schema = CACHED_SCHEMA +do + local protocol_to_schema = { + http = HTTP_SCHEMA, + https = HTTP_SCHEMA, + grpc = HTTP_SCHEMA, + grpcs = HTTP_SCHEMA, + + tcp = STREAM_SCHEMA, + udp = STREAM_SCHEMA, + tls = STREAM_SCHEMA, + + tls_passthrough = STREAM_SCHEMA, + } + + -- for db schema validation + function _M.schema(protocols) + return assert(protocol_to_schema[protocols[1]]) + end +end + _M.LOGICAL_OR = LOGICAL_OR _M.LOGICAL_AND = LOGICAL_AND diff --git a/spec/01-unit/01-db/01-schema/06-routes_spec.lua b/spec/01-unit/01-db/01-schema/06-routes_spec.lua index 7146043dbdbd..f4ef090ce0fe 100644 --- a/spec/01-unit/01-db/01-schema/06-routes_spec.lua +++ b/spec/01-unit/01-db/01-schema/06-routes_spec.lua @@ -1329,7 +1329,7 @@ describe("routes schema (flavor = traditional_compatible)", function() reload_flavor("traditional_compatible") setup_global_env() - it("validates a valid route", function() + it("validates a valid http route", function() local route = { id = a_valid_uuid, name = "my_route", @@ -1351,6 +1351,21 @@ describe("routes schema (flavor = traditional_compatible)", function() assert.falsy(route.strip_path) end) + it("validates a valid stream route", function() + local route = { + id = a_valid_uuid, + name = "my_route", + protocols = { "tcp" }, + sources = { { ip = "1.2.3.4", port = 80 } }, + service = { id = another_uuid }, + } + route = Routes:process_auto_fields(route, "insert") + assert.truthy(route.created_at) + assert.truthy(route.updated_at) + assert.same(route.created_at, route.updated_at) + assert.truthy(Routes:validate(route)) + end) + it("fails when path is invalid", function() local route = { id = a_valid_uuid, @@ -1370,6 +1385,23 @@ describe("routes schema (flavor = traditional_compatible)", function() assert.falsy(errs["@entity"]) end) + it("fails when ip address is invalid", function() + local route = { + id = a_valid_uuid, + name = "my_route", + protocols = { "tcp" }, + sources = { { ip = "x.x.x.x", port = 80 } }, + service = { id = another_uuid }, + } + route = Routes:process_auto_fields(route, "insert") + local ok, errs = Routes:validate_insert(route) + assert.falsy(ok) + assert.truthy(errs["sources"]) + + -- verified by `schema/typedefs.lua` + assert.falsy(errs["@entity"]) + end) + it("won't fail when rust.regex update to 1.8", function() local route = { id = a_valid_uuid, @@ -1384,3 +1416,98 @@ describe("routes schema (flavor = traditional_compatible)", function() assert.is_nil(errs) end) end) + + +describe("routes schema (flavor = expressions)", function() + local a_valid_uuid = "cbb297c0-a956-486d-ad1d-f9b42df9465a" + local another_uuid = "64a8670b-900f-44e7-a900-6ec7ef5aa4d3" + + reload_flavor("expressions") + setup_global_env() + + it("validates a valid http route", function() + local route = { + id = a_valid_uuid, + name = "my_route", + protocols = { "http" }, + expression = [[http.method == "GET" && http.host == "example.com" && http.path == "/ovo"]], + priority = 100, + strip_path = false, + preserve_host = true, + service = { id = another_uuid }, + } + route = Routes:process_auto_fields(route, "insert") + assert.truthy(route.created_at) + assert.truthy(route.updated_at) + assert.same(route.created_at, route.updated_at) + assert.truthy(Routes:validate(route)) + assert.falsy(route.strip_path) + end) + + it("validates a valid stream route", function() + local route = { + id = a_valid_uuid, + name = "my_route", + protocols = { "tcp" }, + expression = [[net.src.ip == 1.2.3.4 && net.src.port == 80]], + priority = 100, + service = { id = another_uuid }, + } + route = Routes:process_auto_fields(route, "insert") + assert.truthy(route.created_at) + assert.truthy(route.updated_at) + assert.same(route.created_at, route.updated_at) + assert.truthy(Routes:validate(route)) + end) + + it("fails when path is invalid", function() + local route = { + id = a_valid_uuid, + name = "my_route", + protocols = { "http" }, + expression = [[http.method == "GET" && http.path ~ "/[abc/*/user$"]], + priority = 100, + service = { id = another_uuid }, + } + route = Routes:process_auto_fields(route, "insert") + local ok, errs = Routes:validate_insert(route) + assert.falsy(ok) + + -- verified by `schema/typedefs.lua` + assert.truthy(errs["@entity"]) + end) + + it("fails when ip address is invalid", function() + local route = { + id = a_valid_uuid, + name = "my_route", + protocols = { "tcp" }, + expression = [[net.src.ip in 1.2.3.4/16 && net.src.port == 80]], + priority = 100, + service = { id = another_uuid }, + } + route = Routes:process_auto_fields(route, "insert") + local ok, errs = Routes:validate_insert(route) + assert.falsy(ok) + + -- verified by `schema/typedefs.lua` + assert.truthy(errs["@entity"]) + end) + + it("fails if http route's field appears in stream route", function() + local route = { + id = a_valid_uuid, + name = "my_route", + protocols = { "tcp" }, + expression = [[http.method == "GET" && net.src.ip == 1.2.3.4 && net.src.port == 80]], + priority = 100, + service = { id = another_uuid }, + } + route = Routes:process_auto_fields(route, "insert") + local ok, errs = Routes:validate_insert(route) + assert.falsy(ok) + + -- verified by `schema/typedefs.lua` + assert.truthy(errs["@entity"]) + end) +end) From 444b214c8b9bb1fe954a5516410ef630083c6c69 Mon Sep 17 00:00:00 2001 From: Keery Nie Date: Wed, 8 Nov 2023 17:24:21 +0800 Subject: [PATCH 086/249] fix(pdk): response send function should ignore transfer-encoding correctly (#11936) This PR is a follow-up fix for #8698 to ignore the transfer encoding header set by the user. The line removed in this PR seems to be conflict with the original fix and makes the original fix meaningless, so removed this line to get the expected behavior. We have related bug reports that when using the AWS-Lambda plugin in proxy_integration mode if the lamdba function returns an arbitrary transfer-encoding header, the response sent by Kong will both contain content-length and transfer-encoding, which is an unexpected result. Fix FTI-5028 --- ...response-send-remove-transfer-encoding.yml | 3 ++ kong/pdk/response.lua | 1 - .../27-aws-lambda/99-access_spec.lua | 37 +++++++++++++++++++ spec/fixtures/aws-lambda.lua | 3 ++ t/01-pdk/08-response/11-exit.t | 5 ++- 5 files changed, 46 insertions(+), 3 deletions(-) create mode 100644 changelog/unreleased/kong/pdk-response-send-remove-transfer-encoding.yml diff --git a/changelog/unreleased/kong/pdk-response-send-remove-transfer-encoding.yml b/changelog/unreleased/kong/pdk-response-send-remove-transfer-encoding.yml new file mode 100644 index 000000000000..f0bd4d19f65e --- /dev/null +++ b/changelog/unreleased/kong/pdk-response-send-remove-transfer-encoding.yml @@ -0,0 +1,3 @@ +message: Fix an issue that when using kong.response.exit, the Transfer-Encoding header set by user is not removed +type: bugfix +scope: PDK diff --git a/kong/pdk/response.lua b/kong/pdk/response.lua index b519ac12ef25..228626b62943 100644 --- a/kong/pdk/response.lua +++ b/kong/pdk/response.lua @@ -660,7 +660,6 @@ local function new(self, major_version) local has_content_length if headers ~= nil then for name, value in pairs(headers) do - ngx.header[name] = normalize_multi_header(value) local lower_name = lower(name) if lower_name == "transfer-encoding" or lower_name == "transfer_encoding" then self.log.warn("manually setting Transfer-Encoding. Ignored.") diff --git a/spec/03-plugins/27-aws-lambda/99-access_spec.lua b/spec/03-plugins/27-aws-lambda/99-access_spec.lua index dc9ec8205ebc..3ffb2d152149 100644 --- a/spec/03-plugins/27-aws-lambda/99-access_spec.lua +++ b/spec/03-plugins/27-aws-lambda/99-access_spec.lua @@ -150,6 +150,12 @@ for _, strategy in helpers.each_strategy() do service = null, } + local route24 = bp.routes:insert { + hosts = { "lambda24.com" }, + protocols = { "http", "https" }, + service = null, + } + bp.plugins:insert { name = "aws-lambda", route = { id = route1.id }, @@ -463,6 +469,19 @@ for _, strategy in helpers.each_strategy() do } } + bp.plugins:insert { + name = "aws-lambda", + route = { id = route24.id }, + config = { + port = 10001, + aws_key = "mock-key", + aws_secret = "mock-secret", + aws_region = "us-east-1", + function_name = "functionWithTransferEncodingHeader", + is_proxy_integration = true, + } + } + fixtures.dns_mock:A({ name = "custom.lambda.endpoint", address = "127.0.0.1", @@ -1148,6 +1167,24 @@ for _, strategy in helpers.each_strategy() do assert.equals("https", req.vars.scheme) end) + it("#test2 works normally by removing transfer encoding header when proxy integration mode", function () + proxy_client:set_timeout(3000) + assert.eventually(function () + local res = assert(proxy_client:send({ + method = "GET", + path = "/get", + headers = { + ["Host"] = "lambda24.com" + } + })) + + assert.res_status(200, res) + assert.is_nil(res.headers["Transfer-Encoding"]) + assert.is_nil(res.headers["transfer-encoding"]) + + return true + end).with_timeout(3).is_truthy() + end) end) describe("AWS_REGION environment is set", function() diff --git a/spec/fixtures/aws-lambda.lua b/spec/fixtures/aws-lambda.lua index 0fa0dec80964..1d99bad795c7 100644 --- a/spec/fixtures/aws-lambda.lua +++ b/spec/fixtures/aws-lambda.lua @@ -57,6 +57,9 @@ local fixtures = { elseif string.match(ngx.var.uri, "functionEcho") then require("spec.fixtures.mock_upstream").send_default_json_response() + elseif string.match(ngx.var.uri, "functionWithTransferEncodingHeader") then + ngx.say("{\"statusCode\": 200, \"headers\": { \"Transfer-Encoding\": \"chunked\", \"transfer-encoding\": \"chunked\"}}") + elseif type(res) == 'string' then ngx.header["Content-Length"] = #res + 1 ngx.say(res) diff --git a/t/01-pdk/08-response/11-exit.t b/t/01-pdk/08-response/11-exit.t index 79b659c6f686..f45564eed560 100644 --- a/t/01-pdk/08-response/11-exit.t +++ b/t/01-pdk/08-response/11-exit.t @@ -4,7 +4,7 @@ use Test::Nginx::Socket::Lua; use Test::Nginx::Socket::Lua::Stream; do "./t/Util.pm"; -plan tests => repeat_each() * (blocks() * 4) + 11; +plan tests => repeat_each() * (blocks() * 4) + 12; run_tests(); @@ -1128,7 +1128,7 @@ finalize stream session: 200 -=== TEST 18: response.exit() does not set transfer-encoding from headers +=== TEST 44: response.exit() does not set transfer-encoding from headers --- http_config eval: $t::Util::HttpConfig --- config location = /t { @@ -1148,6 +1148,7 @@ GET /t --- response_body test --- response_headers +! Transfer-Encoding Content-Length: 5 X-test: test --- error_log From 4b12b2394440ad8474fb16bde5081116da5983a3 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 8 Nov 2023 11:56:03 +0200 Subject: [PATCH 087/249] chore(deps): bump openresty from 1.21.4.2 to 1.21.4.3 (#11952) ### Summary - bugfix: applied the patch for security advisory to NGINX cores. (CVE-2023-44487). Kong already had the patch, but well, now that it is packaged, we can remove ours, and get to the latest OpenResty KAG-3033 Signed-off-by: Aapo Talvensaari --- .requirements | 2 +- ...pid-reset-ddos-attack-cve-2023-44487.patch | 53 ------------------- build/openresty/repositories.bzl | 2 +- .../kong/bump-openresty-1.21.4.3.yml | 3 ++ kong/meta.lua | 2 +- 5 files changed, 6 insertions(+), 56 deletions(-) delete mode 100644 build/openresty/patches/nginx-1.21.4_09-http2-rapid-reset-ddos-attack-cve-2023-44487.patch create mode 100644 changelog/unreleased/kong/bump-openresty-1.21.4.3.yml diff --git a/.requirements b/.requirements index a14eda9f2d08..7f7cae2e52f4 100644 --- a/.requirements +++ b/.requirements @@ -1,6 +1,6 @@ KONG_PACKAGE_NAME=kong -OPENRESTY=1.21.4.2 +OPENRESTY=1.21.4.3 LUAROCKS=3.9.2 OPENSSL=3.1.4 PCRE=8.45 diff --git a/build/openresty/patches/nginx-1.21.4_09-http2-rapid-reset-ddos-attack-cve-2023-44487.patch b/build/openresty/patches/nginx-1.21.4_09-http2-rapid-reset-ddos-attack-cve-2023-44487.patch deleted file mode 100644 index 1ab586cfcdcf..000000000000 --- a/build/openresty/patches/nginx-1.21.4_09-http2-rapid-reset-ddos-attack-cve-2023-44487.patch +++ /dev/null @@ -1,53 +0,0 @@ -diff --git a/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.c b/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.c -index 3afa8b6..228b060 100644 ---- a/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.c -+++ b/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.c -@@ -361,6 +361,7 @@ ngx_http_v2_read_handler(ngx_event_t *rev) - ngx_log_debug0(NGX_LOG_DEBUG_HTTP, c->log, 0, "http2 read handler"); - - h2c->blocked = 1; -+ h2c->new_streams = 0; - - if (c->close) { - c->close = 0; -@@ -1321,6 +1322,14 @@ ngx_http_v2_state_headers(ngx_http_v2_connection_t *h2c, u_char *pos, - goto rst_stream; - } - -+ if (h2c->new_streams++ >= 2 * h2scf->concurrent_streams) { -+ ngx_log_error(NGX_LOG_INFO, h2c->connection->log, 0, -+ "client sent too many streams at once"); -+ -+ status = NGX_HTTP_V2_REFUSED_STREAM; -+ goto rst_stream; -+ } -+ - if (!h2c->settings_ack - && !(h2c->state.flags & NGX_HTTP_V2_END_STREAM_FLAG) - && h2scf->preread_size < NGX_HTTP_V2_DEFAULT_WINDOW) -@@ -1386,6 +1395,12 @@ ngx_http_v2_state_headers(ngx_http_v2_connection_t *h2c, u_char *pos, - - rst_stream: - -+ if (h2c->refused_streams++ > ngx_max(h2scf->concurrent_streams, 100)) { -+ ngx_log_error(NGX_LOG_INFO, h2c->connection->log, 0, -+ "client sent too many refused streams"); -+ return ngx_http_v2_connection_error(h2c, NGX_HTTP_V2_NO_ERROR); -+ } -+ - if (ngx_http_v2_send_rst_stream(h2c, h2c->state.sid, status) != NGX_OK) { - return ngx_http_v2_connection_error(h2c, NGX_HTTP_V2_INTERNAL_ERROR); - } -diff --git a/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.h b/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.h -index 0eceae3..aef40bb 100644 ---- a/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.h -+++ b/bundle/nginx-1.21.4/src/http/v2/ngx_http_v2.h -@@ -124,6 +124,8 @@ struct ngx_http_v2_connection_s { - ngx_uint_t processing; - ngx_uint_t frames; - ngx_uint_t idle; -+ ngx_uint_t new_streams; -+ ngx_uint_t refused_streams; - ngx_uint_t priority_limit; - - ngx_uint_t pushing; diff --git a/build/openresty/repositories.bzl b/build/openresty/repositories.bzl index c2722ac50ee6..43ff3faa995f 100644 --- a/build/openresty/repositories.bzl +++ b/build/openresty/repositories.bzl @@ -30,7 +30,7 @@ def openresty_repositories(): openresty_http_archive_wrapper, name = "openresty", build_file = "//build/openresty:BUILD.openresty.bazel", - sha256 = "5b1eded25c1d4ed76c0336dfae50bd94d187af9c85ead244135dd5ae363b2e2a", + sha256 = "33a84c63cfd9e46b0e5c62eb2ddc7b8068bda2e1686314343b89fc3ffd24cdd3", strip_prefix = "openresty-" + openresty_version, urls = [ "https://openresty.org/download/openresty-" + openresty_version + ".tar.gz", diff --git a/changelog/unreleased/kong/bump-openresty-1.21.4.3.yml b/changelog/unreleased/kong/bump-openresty-1.21.4.3.yml new file mode 100644 index 000000000000..f44f1e9d1b78 --- /dev/null +++ b/changelog/unreleased/kong/bump-openresty-1.21.4.3.yml @@ -0,0 +1,3 @@ +message: "Bumped OpenResty from 1.21.4.2 to 1.21.4.3" +type: dependency +scope: Core diff --git a/kong/meta.lua b/kong/meta.lua index bc71d8a3f156..403d09d69bdf 100644 --- a/kong/meta.lua +++ b/kong/meta.lua @@ -24,6 +24,6 @@ return { -- third-party dependencies' required version, as they would be specified -- to lua-version's `set()` in the form {from, to} _DEPENDENCIES = { - nginx = { "1.21.4.2" }, + nginx = { "1.21.4.3" }, } } From 1c906a9b4282e9176f044642bd63b4b479db222f Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 8 Nov 2023 17:58:12 +0200 Subject: [PATCH 088/249] chore(deps): bump resty-openssl from 0.8.25 to 1.0.1 (#11954) ### Summary #### [1.0.1] - 2023-11-07 ##### bug fixes - **jwk:** return error if exporting private key from public key ([#128](https://github.com/fffonion/lua-resty-openssl/issues/128)) [3a1bc27](https://github.com/fffonion/lua-resty-openssl/commit/3a1bc273e2a3f41faa7eb68f2939fd1fc25cdecb) #### [1.0.0] - 2023-11-03 ##### code refactoring - **\*:** remove unused cdefs [84abc0a](https://github.com/fffonion/lua-resty-openssl/commit/84abc0ab99b3d649c7fe4575cf13867cf96a94ef) - **\*:** BREAKING: drop OpenSSL 1.0.2, 1.1.0 and BoringSSL support [99b493e](https://github.com/fffonion/lua-resty-openssl/commit/99b493e671886e68c07b1b9c9472075c22ce38e9) ##### features - **fips:** add get_fips_version_text [935227b](https://github.com/fffonion/lua-resty-openssl/commit/935227b348ba4416f2f4d671dd94f7910cbf9e61) #### [0.8.26] - 2023-10-30 ##### bug fixes - **version:** add support for all 3.x versions [1516b4d](https://github.com/fffonion/lua-resty-openssl/commit/1516b4d94ac4621a1b243c14b5133ded81515d28) - **x509.csr:** remove extension before adding it [d6ed964](https://github.com/fffonion/lua-resty-openssl/commit/d6ed9648e39f46f7519413489baf021092ccbc49) Signed-off-by: Aapo Talvensaari --- changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml diff --git a/changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml b/changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml new file mode 100644 index 000000000000..d90a6effd810 --- /dev/null +++ b/changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml @@ -0,0 +1,3 @@ +message: Bump resty-openssl from 0.8.25 to 1.0.1 +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index a34044faeeb2..f24012848cb0 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -34,7 +34,7 @@ dependencies = { "lua-resty-healthcheck == 3.0.0", "lua-messagepack == 0.5.2", "lua-resty-aws == 1.3.5", - "lua-resty-openssl == 0.8.25", + "lua-resty-openssl == 1.0.1", "lua-resty-counter == 0.2.1", "lua-resty-ipmatcher == 0.6.1", "lua-resty-acme == 0.12.0", From 690bcf5607c3234d3e61f2142b209af1148209e3 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 8 Nov 2023 18:58:28 +0200 Subject: [PATCH 089/249] chore(deps): bump lpeg from 1.0.2 to 1.1.0 (#11955) ### Summary + accumulator capture + UTF-8 ranges + Larger limit for number of rules in a grammar + Larger limit for number of captures in a match + bug fixes + other small improvements Signed-off-by: Aapo Talvensaari --- changelog/unreleased/kong/bump-lpeg-1.1.0.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/bump-lpeg-1.1.0.yml diff --git a/changelog/unreleased/kong/bump-lpeg-1.1.0.yml b/changelog/unreleased/kong/bump-lpeg-1.1.0.yml new file mode 100644 index 000000000000..d6608d3a23e5 --- /dev/null +++ b/changelog/unreleased/kong/bump-lpeg-1.1.0.yml @@ -0,0 +1,3 @@ +message: "Bumped LPEG from 1.0.2 to 1.1.0" +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index f24012848cb0..cd53d78a7eb8 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -40,7 +40,7 @@ dependencies = { "lua-resty-acme == 0.12.0", "lua-resty-session == 4.0.5", "lua-resty-timer-ng == 0.2.5", - "lpeg == 1.0.2", + "lpeg == 1.1.0", "lua-resty-ljsonschema == 1.1.6-2", } build = { From 67200823e8b58c8afccdfb186a117f01b6d2cfa3 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 8 Nov 2023 18:58:46 +0200 Subject: [PATCH 090/249] chore(deps): bump lua-messagepack from 0.5.2 to 0.5.3 (#11956) ### Summary - support Lua 5.4 - testsuite with TestAssertion - minor refactors Signed-off-by: Aapo Talvensaari --- changelog/unreleased/kong/bump-lua-messagepack-0.5.3.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/bump-lua-messagepack-0.5.3.yml diff --git a/changelog/unreleased/kong/bump-lua-messagepack-0.5.3.yml b/changelog/unreleased/kong/bump-lua-messagepack-0.5.3.yml new file mode 100644 index 000000000000..5c9cc499e6dd --- /dev/null +++ b/changelog/unreleased/kong/bump-lua-messagepack-0.5.3.yml @@ -0,0 +1,3 @@ +message: "Bumped lua-messagepack from 0.5.2 to 0.5.3" +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index cd53d78a7eb8..bdc60a5ccdbb 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -32,7 +32,7 @@ dependencies = { "luaxxhash >= 1.0", "lua-protobuf == 0.5.0", "lua-resty-healthcheck == 3.0.0", - "lua-messagepack == 0.5.2", + "lua-messagepack == 0.5.3", "lua-resty-aws == 1.3.5", "lua-resty-openssl == 1.0.1", "lua-resty-counter == 0.2.1", From 1032e48a7fca9adad3d6b722ce4a3267b8ce0c52 Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Wed, 8 Nov 2023 22:52:34 +0000 Subject: [PATCH 091/249] tests(*): improve http mock (#11902) Simplify most common use of http mock --- spec/helpers/http_mock.lua | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/spec/helpers/http_mock.lua b/spec/helpers/http_mock.lua index 91fc85c6121a..c1c998a864ae 100644 --- a/spec/helpers/http_mock.lua +++ b/spec/helpers/http_mock.lua @@ -25,6 +25,32 @@ for _, module in ipairs(modules) do end end +-- get a session from the logs with a timeout +-- throws error if no request is recieved within the timeout +-- @treturn table the session +function http_mock:get_session() + local ret + self.eventually:has_session_satisfy(function(s) + ret = s + return true + end) + return ret +end + +-- get a request from the logs with a timeout +-- throws error if no request is recieved within the timeout +-- @treturn table the request +function http_mock:get_request() + return self:get_session().req +end + +-- get a response from the logs with a timeout +-- throws error if no request is recieved within the timeout +-- @treturn table the response +function http_mock:get_response() + return self:get_session().resp +end + local http_mock_MT = { __index = http_mock, __gc = http_mock.stop } From 735d652aacf9274d769a4d24c52ad3d46183e879 Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Thu, 9 Nov 2023 10:49:20 +0800 Subject: [PATCH 092/249] refactor(tools): separate system-related functions from tools.utils (#11949) KAG-2954 --- kong-3.6.0-0.rockspec | 1 + kong/tools/system.lua | 62 +++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 52 +----------------------------------- 3 files changed, 64 insertions(+), 51 deletions(-) create mode 100644 kong/tools/system.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index bdc60a5ccdbb..375812703908 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -171,6 +171,7 @@ build = { ["kong.tools.yield"] = "kong/tools/yield.lua", ["kong.tools.uuid"] = "kong/tools/uuid.lua", ["kong.tools.rand"] = "kong/tools/rand.lua", + ["kong.tools.system"] = "kong/tools/system.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/system.lua b/kong/tools/system.lua new file mode 100644 index 000000000000..38938688a3b9 --- /dev/null +++ b/kong/tools/system.lua @@ -0,0 +1,62 @@ +local pl_utils = require "pl.utils" +local pl_path = require "pl.path" + + +local _M = {} + + +do + local _system_infos + + + function _M.get_system_infos() + if _system_infos then + return _system_infos + end + + _system_infos = {} + + local ok, _, stdout = pl_utils.executeex("getconf _NPROCESSORS_ONLN") + if ok then + _system_infos.cores = tonumber(stdout:sub(1, -2)) + end + + ok, _, stdout = pl_utils.executeex("uname -ms") + if ok then + _system_infos.uname = stdout:gsub(";", ","):sub(1, -2) + end + + return _system_infos + end +end + + +do + local trusted_certs_paths = { + "/etc/ssl/certs/ca-certificates.crt", -- Debian/Ubuntu/Gentoo + "/etc/pki/tls/certs/ca-bundle.crt", -- Fedora/RHEL 6 + "/etc/ssl/ca-bundle.pem", -- OpenSUSE + "/etc/pki/tls/cacert.pem", -- OpenELEC + "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", -- CentOS/RHEL 7 + "/etc/ssl/cert.pem", -- OpenBSD, Alpine + } + + + function _M.get_system_trusted_certs_filepath() + for _, path in ipairs(trusted_certs_paths) do + if pl_path.exists(path) then + return path + end + end + + return nil, + "Could not find trusted certs file in " .. + "any of the `system`-predefined locations. " .. + "Please install a certs file there or set " .. + "lua_ssl_trusted_certificate to an " .. + "specific filepath instead of `system`" + end +end + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index c823c3999521..6d9af9f60c09 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -10,7 +10,6 @@ local ffi = require "ffi" local pl_stringx = require "pl.stringx" -local pl_utils = require "pl.utils" local pl_path = require "pl.path" local pl_file = require "pl.file" @@ -48,56 +47,6 @@ int gethostname(char *name, size_t len); local _M = {} -do - local _system_infos - - function _M.get_system_infos() - if _system_infos then - return _system_infos - end - - _system_infos = {} - - local ok, _, stdout = pl_utils.executeex("getconf _NPROCESSORS_ONLN") - if ok then - _system_infos.cores = tonumber(stdout:sub(1, -2)) - end - - ok, _, stdout = pl_utils.executeex("uname -ms") - if ok then - _system_infos.uname = stdout:gsub(";", ","):sub(1, -2) - end - - return _system_infos - end -end - -do - local trusted_certs_paths = { - "/etc/ssl/certs/ca-certificates.crt", -- Debian/Ubuntu/Gentoo - "/etc/pki/tls/certs/ca-bundle.crt", -- Fedora/RHEL 6 - "/etc/ssl/ca-bundle.pem", -- OpenSUSE - "/etc/pki/tls/cacert.pem", -- OpenELEC - "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", -- CentOS/RHEL 7 - "/etc/ssl/cert.pem", -- OpenBSD, Alpine - } - - function _M.get_system_trusted_certs_filepath() - for _, path in ipairs(trusted_certs_paths) do - if pl_path.exists(path) then - return path - end - end - - return nil, - "Could not find trusted certs file in " .. - "any of the `system`-predefined locations. " .. - "Please install a certs file there or set " .. - "lua_ssl_trusted_certificate to an " .. - "specific filepath instead of `system`" - end -end - do local url = require "socket.url" @@ -1142,6 +1091,7 @@ do "kong.tools.string", "kong.tools.uuid", "kong.tools.rand", + "kong.tools.system", } for _, str in ipairs(modules) do From 0c1c94ce0cc964cb01f951af98a62dd6ad5c667e Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Thu, 9 Nov 2023 05:00:23 +0100 Subject: [PATCH 093/249] chore(ci): improve backporting process (#11924) * now contains all the commits of a PR, not only the last one * now copies labels on the backport PRs * now copies milestones on the backport PRS * now copies requested reviewers to the backport PRS The action instructions for manually merging were mostly wrong and rarely worked. The actions are now more descriptive and separated (using worktrees) Signed-off-by: Joshua Schmid --- .github/workflows/backport.yml | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 7cc4b9c134a3..c2cc8d2a5100 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,24 +1,27 @@ name: Backport on: pull_request_target: - types: - - closed - - labeled - + types: [closed] +permissions: + contents: write # so it can comment + pull-requests: write # so it can create pull requests jobs: backport: name: Backport runs-on: ubuntu-latest - if: > - github.event.pull_request.merged - && ( - github.event.action == 'closed' - || ( - github.event.action == 'labeled' - && contains(github.event.label.name, 'backport') - ) - ) + if: github.event.pull_request.merged steps: - - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 + - uses: actions/checkout@v4 + - name: Create backport pull requests + uses: korthout/backport-action@cb79e4e5f46c7d7d653dd3d5fa8a9b0a945dfe4b # v2.1.0 with: github_token: ${{ secrets.PAT }} + pull_title: '[backport -> ${target_branch}] ${pull_title}' + merge_commits: 'skip' + copy_labels_pattern: ^(?!backport ).* # copies all labels except those starting with "backport " + label_pattern: ^backport (release\/[^ ]+)$ # filters for labels starting with "backport " and extracts the branch name + pull_description: |- + Automated backport to `${target_branch}`, triggered by a label in #${pull_number}. + copy_assignees: true + copy_milestone: true + copy_requested_reviewers: true From 12f45ad91b7ab696172ca2244bc96fec8304613d Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 9 Nov 2023 12:01:39 +0800 Subject: [PATCH 094/249] refactor(router): simplify the functions of cache calculation (#11948) --- kong/router/atc.lua | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/kong/router/atc.lua b/kong/router/atc.lua index df8b7c636ce3..e67a207d1973 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -592,12 +592,14 @@ do local str_buf = buffer.new(64) - get_headers_key = function(headers) + local function get_headers_or_queries_key(values, lower_func) str_buf:reset() -- NOTE: DO NOT yield until str_buf:get() - for name, value in pairs(headers) do - local name = replace_dashes_lower(name) + for name, value in pairs(values) do + if lower_func then + name = lower_func(name) + end if type(value) == "table" then tb_sort(value) @@ -610,20 +612,12 @@ do return str_buf:get() end - get_queries_key = function(queries) - str_buf:reset() - - -- NOTE: DO NOT yield until str_buf:get() - for name, value in pairs(queries) do - if type(value) == "table" then - tb_sort(value) - value = tb_concat(value, ", ") - end - - str_buf:putf("|%s=%s", name, value) - end + get_headers_key = function(headers) + return get_headers_or_queries_key(headers, replace_dashes_lower) + end - return str_buf:get() + get_queries_key = function(queries) + return get_headers_or_queries_key(queries) end end From 1b2b2c0a6a1592a785e13d7b1950efbd64e377ee Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Thu, 9 Nov 2023 17:16:18 +0800 Subject: [PATCH 095/249] refactor(tools): separate time functions from tools.utils (#11964) * refactor(tools): separate time functions from tools.utils * use ffi.new instead of ffi_new KAG-2955 --- kong-3.6.0-0.rockspec | 1 + kong/tools/time.lua | 101 +++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 102 ++---------------------------------------- 3 files changed, 105 insertions(+), 99 deletions(-) create mode 100644 kong/tools/time.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 375812703908..b787d85e6c93 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -172,6 +172,7 @@ build = { ["kong.tools.uuid"] = "kong/tools/uuid.lua", ["kong.tools.rand"] = "kong/tools/rand.lua", ["kong.tools.system"] = "kong/tools/system.lua", + ["kong.tools.time"] = "kong/tools/time.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/time.lua b/kong/tools/time.lua new file mode 100644 index 000000000000..5f52e5ff3cdd --- /dev/null +++ b/kong/tools/time.lua @@ -0,0 +1,101 @@ +local ffi = require "ffi" + + +local C = ffi.C +local tonumber = tonumber + + +ffi.cdef[[ +typedef long time_t; +typedef int clockid_t; +typedef struct timespec { + time_t tv_sec; /* seconds */ + long tv_nsec; /* nanoseconds */ +} nanotime; + +int clock_gettime(clockid_t clk_id, struct timespec *tp); +]] + + +local _M = {} + + +do + local NGX_ERROR = ngx.ERROR + + if not pcall(ffi.typeof, "ngx_uint_t") then + ffi.cdef [[ + typedef uintptr_t ngx_uint_t; + ]] + end + + if not pcall(ffi.typeof, "ngx_int_t") then + ffi.cdef [[ + typedef intptr_t ngx_int_t; + ]] + end + + -- ngx_str_t defined by lua-resty-core + local s = ffi.new("ngx_str_t[1]") + s[0].data = "10" + s[0].len = 2 + + if not pcall(function() C.ngx_parse_time(s, 0) end) then + ffi.cdef [[ + ngx_int_t ngx_parse_time(ngx_str_t *line, ngx_uint_t is_sec); + ]] + end + + function _M.nginx_conf_time_to_seconds(str) + s[0].data = str + s[0].len = #str + + local ret = C.ngx_parse_time(s, 1) + if ret == NGX_ERROR then + error("bad argument #1 'str'", 2) + end + + return tonumber(ret, 10) + end +end + + +do + local nanop = ffi.new("nanotime[1]") + function _M.time_ns() + -- CLOCK_REALTIME -> 0 + C.clock_gettime(0, nanop) + local t = nanop[0] + + return tonumber(t.tv_sec) * 1e9 + tonumber(t.tv_nsec) + end +end + + +do + local now = ngx.now + local update_time = ngx.update_time + local start_time = ngx.req.start_time + local monotonic_msec = require("resty.core.time").monotonic_msec + + function _M.get_now_ms() + return now() * 1000 -- time is kept in seconds with millisecond resolution. + end + + function _M.get_updated_now_ms() + update_time() + return now() * 1000 -- time is kept in seconds with millisecond resolution. + end + + function _M.get_start_time_ms() + return start_time() * 1000 -- time is kept in seconds with millisecond resolution. + end + + function _M.get_updated_monotonic_ms() + update_time() + return monotonic_msec() + end +end + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 6d9af9f60c09..56bff1c95cee 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -13,8 +13,6 @@ local pl_stringx = require "pl.stringx" local pl_path = require "pl.path" local pl_file = require "pl.file" -local C = ffi.C -local ffi_new = ffi.new local type = type local pairs = pairs local ipairs = ipairs @@ -32,19 +30,12 @@ local split = pl_stringx.split local re_match = ngx.re.match local setmetatable = setmetatable -ffi.cdef[[ -typedef long time_t; -typedef int clockid_t; -typedef struct timespec { - time_t tv_sec; /* seconds */ - long tv_nsec; /* nanoseconds */ -} nanotime; - -int clock_gettime(clockid_t clk_id, struct timespec *tp); +ffi.cdef[[ int gethostname(char *name, size_t len); ]] + local _M = {} @@ -732,46 +723,6 @@ do end -do - local NGX_ERROR = ngx.ERROR - - if not pcall(ffi.typeof, "ngx_uint_t") then - ffi.cdef [[ - typedef uintptr_t ngx_uint_t; - ]] - end - - if not pcall(ffi.typeof, "ngx_int_t") then - ffi.cdef [[ - typedef intptr_t ngx_int_t; - ]] - end - - -- ngx_str_t defined by lua-resty-core - local s = ffi_new("ngx_str_t[1]") - s[0].data = "10" - s[0].len = 2 - - if not pcall(function() C.ngx_parse_time(s, 0) end) then - ffi.cdef [[ - ngx_int_t ngx_parse_time(ngx_str_t *line, ngx_uint_t is_sec); - ]] - end - - function _M.nginx_conf_time_to_seconds(str) - s[0].data = str - s[0].len = #str - - local ret = C.ngx_parse_time(s, 1) - if ret == NGX_ERROR then - error("bad argument #1 'str'", 2) - end - - return tonumber(ret, 10) - end -end - - local get_mime_type local get_response_type local get_error_template @@ -1034,54 +985,6 @@ function _M.sort_by_handler_priority(a, b) end -local time_ns -do - local nanop = ffi_new("nanotime[1]") - function time_ns() - -- CLOCK_REALTIME -> 0 - C.clock_gettime(0, nanop) - local t = nanop[0] - - return tonumber(t.tv_sec) * 1e9 + tonumber(t.tv_nsec) - end -end -_M.time_ns = time_ns - - -local get_now_ms -local get_updated_now_ms -local get_start_time_ms -local get_updated_monotonic_ms -do - local now = ngx.now - local update_time = ngx.update_time - local start_time = ngx.req.start_time - local monotonic_msec = require("resty.core.time").monotonic_msec - - function get_now_ms() - return now() * 1000 -- time is kept in seconds with millisecond resolution. - end - - function get_updated_now_ms() - update_time() - return now() * 1000 -- time is kept in seconds with millisecond resolution. - end - - function get_start_time_ms() - return start_time() * 1000 -- time is kept in seconds with millisecond resolution. - end - - function get_updated_monotonic_ms() - update_time() - return monotonic_msec() - end -end -_M.get_now_ms = get_now_ms -_M.get_updated_now_ms = get_updated_now_ms -_M.get_start_time_ms = get_start_time_ms -_M.get_updated_monotonic_ms = get_updated_monotonic_ms - - do local modules = { "kong.tools.gzip", @@ -1092,6 +995,7 @@ do "kong.tools.uuid", "kong.tools.rand", "kong.tools.system", + "kong.tools.time", } for _, str in ipairs(modules) do From 53ab40a02d607cc6c2f750e8aed84b3f45f0ceaf Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Thu, 9 Nov 2023 17:20:24 +0800 Subject: [PATCH 096/249] refactor(tools): move sort_by_handler_priority to DAO (#11965) The function sort_by_handler_priority is only used in DAO and does not belong to any other category of functions in kong/tools/utils.lua, so it should be moved to DAO. KAG-2956 --- kong/db/dao/plugins.lua | 18 +++++++++++++++++- kong/tools/utils.lua | 16 ---------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/kong/db/dao/plugins.lua b/kong/db/dao/plugins.lua index f05c31d677a2..8790de32c2ca 100644 --- a/kong/db/dao/plugins.lua +++ b/kong/db/dao/plugins.lua @@ -5,7 +5,6 @@ local plugin_loader = require "kong.db.schema.plugin_loader" local reports = require "kong.reports" local plugin_servers = require "kong.runloop.plugin_servers" local version = require "version" -local sort_by_handler_priority = utils.sort_by_handler_priority local Plugins = {} @@ -336,6 +335,23 @@ function Plugins:load_plugin_schemas(plugin_set) end +--- +-- Sort by handler priority and check for collisions. In case of a collision +-- sorting will be applied based on the plugin's name. +-- @tparam table plugin table containing `handler` table and a `name` string +-- @tparam table plugin table containing `handler` table and a `name` string +-- @treturn boolean outcome of sorting +local sort_by_handler_priority = function (a, b) + local prio_a = a.handler.PRIORITY or 0 + local prio_b = b.handler.PRIORITY or 0 + if prio_a == prio_b and not + (prio_a == 0 or prio_b == 0) then + return a.name > b.name + end + return prio_a > prio_b +end + + -- Requires Plugins:load_plugin_schemas to be loaded first -- @return an array where each element has the format { name = "keyauth", handler = function() .. end }. Or nil, error function Plugins:get_handlers() diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 56bff1c95cee..3b0bda1540d4 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -968,22 +968,6 @@ local topological_sort do end _M.topological_sort = topological_sort ---- --- Sort by handler priority and check for collisions. In case of a collision --- sorting will be applied based on the plugin's name. --- @tparam table plugin table containing `handler` table and a `name` string --- @tparam table plugin table containing `handler` table and a `name` string --- @treturn boolean outcome of sorting -function _M.sort_by_handler_priority(a, b) - local prio_a = a.handler.PRIORITY or 0 - local prio_b = b.handler.PRIORITY or 0 - if prio_a == prio_b and not - (prio_a == 0 or prio_b == 0) then - return a.name > b.name - end - return prio_a > prio_b -end - do local modules = { From af4958e4e0452e210b51e7c36cadba11c730fdac Mon Sep 17 00:00:00 2001 From: Andy Zhang Date: Thu, 9 Nov 2023 22:15:07 +0800 Subject: [PATCH 097/249] docs(changelog): Post 3.5.0 changelog update (#11971) * docs(3.5.0): generate 3.5.0 changelog (#11801) * docs(3.5.0): generate 3.5.0 changelog --------- Co-authored-by: Douglas-Lee * docs(changelog): re-generate 3.5.0 changelog (#11870) * docs(CHANGELOG): update 3.5.0 changelog (#11872) * docs(changelog): update 3.5.0 changelog * docs(CHANGELOG): migrate changelogs from CHANGELOG.md to correct place (#11938) --------- Co-authored-by: Douglas-Lee --- CHANGELOG.md | 52 ---- changelog/3.5.0/3.5.0.md | 286 ++++++++++++++++++ changelog/3.5.0/kong/.gitkeep | 0 .../{unreleased => 3.5.0}/kong/10570.yml | 0 .../{unreleased => 3.5.0}/kong/11360-1.yml | 0 .../{unreleased => 3.5.0}/kong/11360-2.yml | 0 .../{unreleased => 3.5.0}/kong/11402.yml | 0 .../{unreleased => 3.5.0}/kong/11424.yml | 0 .../{unreleased => 3.5.0}/kong/11442.yml | 0 .../{unreleased => 3.5.0}/kong/11464.yml | 0 .../{unreleased => 3.5.0}/kong/11468.yml | 0 .../{unreleased => 3.5.0}/kong/11480.yml | 0 .../{unreleased => 3.5.0}/kong/11484.yml | 0 .../{unreleased => 3.5.0}/kong/11502.yml | 0 .../{unreleased => 3.5.0}/kong/11515.yml | 0 .../{unreleased => 3.5.0}/kong/11518.yml | 0 .../{unreleased => 3.5.0}/kong/11523.yml | 0 .../{unreleased => 3.5.0}/kong/11532.yml | 0 .../{unreleased => 3.5.0}/kong/11538.yml | 0 .../{unreleased => 3.5.0}/kong/11551-1.yml | 0 .../{unreleased => 3.5.0}/kong/11551-2.yml | 0 .../{unreleased => 3.5.0}/kong/11553.yml | 0 .../{unreleased => 3.5.0}/kong/11566.yml | 0 .../{unreleased => 3.5.0}/kong/11578.yml | 0 .../{unreleased => 3.5.0}/kong/11599.yml | 0 .../{unreleased => 3.5.0}/kong/11613.yml | 0 .../{unreleased => 3.5.0}/kong/11638.yml | 0 .../{unreleased => 3.5.0}/kong/11639.yml | 0 .../{unreleased => 3.5.0}/kong/11727.yml | 0 .../kong/aws_lambda_service_cache.yml | 0 .../kong/bump_openssl_3.1.4.yml | 0 .../kong/dedicated_config_processing.yml | 0 .../kong/fix-cve-2023-44487.yml | 0 .../kong/fix-opentelemetry-parent-id.yml | 0 .../kong/fix-tcp-log-sslhandshake.yml | 0 .../kong/fix_dns_enable_dns_no_sync.yml | 0 .../kong/fix_patch_order.yml | 0 .../kong/lapis_version_bump.yml | 0 .../kong/lua_kong_nginx_module_bump.yml | 0 .../kong/luajit_ldp_stp_fusion.yml | 0 .../kong/ngx_wasm_module_bump.yml | 0 .../kong/on_prem_dp_metadata.yml | 0 .../kong/per_reqeuest_deubgging.yml | 0 .../kong/plugin-configure-phase.yml | 0 .../kong/request-aware-table.yml | 0 .../{unreleased => 3.5.0}/kong/request_id.yml | 0 .../session_do_not_read_body_by_default.yml | 0 .../kong/vault-declarative.yml | 0 .../kong/vault-init-warmup.yml | 0 .../kong/vault-resurrect.yml | 0 .../kong/wasm-filter-config-schemas.yml | 0 .../kong/wasm-filter-json-config.yml | 0 .../kong/wasmtime_version_bump.yml | 0 changelog/unreleased/kong-manager/.gitkeep | 0 54 files changed, 286 insertions(+), 52 deletions(-) create mode 100644 changelog/3.5.0/3.5.0.md create mode 100644 changelog/3.5.0/kong/.gitkeep rename changelog/{unreleased => 3.5.0}/kong/10570.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11360-1.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11360-2.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11402.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11424.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11442.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11464.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11468.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11480.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11484.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11502.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11515.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11518.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11523.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11532.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11538.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11551-1.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11551-2.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11553.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11566.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11578.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11599.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11613.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11638.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11639.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/11727.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/aws_lambda_service_cache.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/bump_openssl_3.1.4.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/dedicated_config_processing.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/fix-cve-2023-44487.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/fix-opentelemetry-parent-id.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/fix-tcp-log-sslhandshake.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/fix_dns_enable_dns_no_sync.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/fix_patch_order.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/lapis_version_bump.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/lua_kong_nginx_module_bump.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/luajit_ldp_stp_fusion.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/ngx_wasm_module_bump.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/on_prem_dp_metadata.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/per_reqeuest_deubgging.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/plugin-configure-phase.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/request-aware-table.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/request_id.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/session_do_not_read_body_by_default.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/vault-declarative.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/vault-init-warmup.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/vault-resurrect.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/wasm-filter-config-schemas.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/wasm-filter-json-config.yml (100%) rename changelog/{unreleased => 3.5.0}/kong/wasmtime_version_bump.yml (100%) create mode 100644 changelog/unreleased/kong-manager/.gitkeep diff --git a/CHANGELOG.md b/CHANGELOG.md index b37b96a03df8..dfb1ebfa07d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,68 +14,16 @@ #### Core -- Support HTTP query parameters in expression routes. - [#11348](https://github.com/Kong/kong/pull/11348) - #### Plugins -- **AWS-Lambda**: the AWS-Lambda plugin has been refactored by using `lua-resty-aws` as an - underlying AWS library. The refactor simplifies the AWS-Lambda plugin code base and - adding support for multiple IAM authenticating scenarios. - [#11350](https://github.com/Kong/kong/pull/11350) -- **OpenTelemetry** and **Zipkin**: Support GCP X-Cloud-Trace-Context header - The field `header_type` now accepts the value `gcp` to propagate the - Google Cloud trace header - [#11254](https://github.com/Kong/kong/pull/11254) - ### Fixes #### Core -- Fixed critical level logs when starting external plugin servers. Those logs cannot be suppressed due to the limitation of OpenResty. We choose to remove the socket availability detection feature. - [#11372](https://github.com/Kong/kong/pull/11372) -- Fix an issue where a crashing Go plugin server process would cause subsequent - requests proxied through Kong to execute Go plugins with inconsistent configurations. - The issue only affects scenarios where the same Go plugin is applied to different Route - or Service entities. - [#11306](https://github.com/Kong/kong/pull/11306) -- Fix an issue where cluster_cert or cluster_ca_cert is inserted into lua_ssl_trusted_certificate before being base64 decoded. - [#11385](https://github.com/Kong/kong/pull/11385) -- Fix cache warmup mechanism not working in `acls` plugin groups config entity scenario. - [#11414](https://github.com/Kong/kong/pull/11414) -- Fix an issue that queue stops processing when a hard error is encountered in the handler function. - [#11423](https://github.com/Kong/kong/pull/11423) -- Fix an issue that query parameters are not forwarded in proxied request. - Thanks [@chirag-manwani](https://github.com/chirag-manwani) for contributing this change. - [#11328](https://github.com/Kong/kong/pull/11328) -- Fix an issue that response status code is not real upstream status when using kong.response function. - [#11437](https://github.com/Kong/kong/pull/11437) -- Removed a hardcoded proxy-wasm isolation level setting that was preventing the - `nginx_http_proxy_wasm_isolation` configuration value from taking effect. - [#11407](https://github.com/Kong/kong/pull/11407) - #### Plugins -- **OAuth2**: For OAuth2 plugin, `scope` has been taken into account as a new criterion of the request validation. When refreshing token with `refresh_token`, the scopes associated with the `refresh_token` provided in the request must be same with or a subset of the scopes configured in the OAuth2 plugin instance hit by the request. - [#11342](https://github.com/Kong/kong/pull/11342) -- When the worker is in shutdown mode and more data is immediately available without waiting for `max_coalescing_delay`, queues are now cleared in batches. - Thanks [@JensErat](https://github.com/JensErat) for contributing this change. - [#11376](https://github.com/Kong/kong/pull/11376) -- A race condition in the plugin queue could potentially crash the worker when `max_entries` was set to `max_batch_size`. - [#11378](https://github.com/Kong/kong/pull/11378) -- **AWS-Lambda**: fix an issue that the AWS-Lambda plugin cannot extract a json encoded proxy integration response. - [#11413](https://github.com/Kong/kong/pull/11413) - ### Dependencies -- Bumped lua-resty-aws from 1.3.0 to 1.3.1 - [#11419](https://github.com/Kong/kong/pull/11419) -- Bumped lua-resty-session from 4.0.4 to 4.0.5 - [#11416](https://github.com/Kong/kong/pull/11416) -- Bumped OpenSSL from 3.1.1 to 3.1.2 - [#11361](https://github.com/Kong/kong/pull/11361) - - ## 3.4.0 ### Breaking Changes diff --git a/changelog/3.5.0/3.5.0.md b/changelog/3.5.0/3.5.0.md new file mode 100644 index 000000000000..c9b715739191 --- /dev/null +++ b/changelog/3.5.0/3.5.0.md @@ -0,0 +1,286 @@ +## Kong + + +### Performance +#### Configuration + +- Bumped the default value of `upstream_keepalive_pool_size` to `512` and `upstream_keepalive_max_requests` to `1000` + [#11515](https://github.com/Kong/kong/issues/11515) +#### Core + +- refactor workspace id and name retrieval + [#11442](https://github.com/Kong/kong/issues/11442) + +### Breaking Changes +#### Plugin + +- **Session**: a new configuration field `read_body_for_logout` was added with a default value of `false`, that changes behavior of `logout_post_arg` in a way that it is not anymore considered if the `read_body_for_logout` is not explicitly set to `true`. This is to avoid session plugin from reading request bodies by default on e.g. `POST` request for logout detection. + [#10333](https://github.com/Kong/kong/issues/10333) + + +### Dependencies +#### Core + +- Bumped resty.openssl from 0.8.23 to 0.8.25 + [#11518](https://github.com/Kong/kong/issues/11518) + +- Fix incorrect LuaJIT register allocation for IR_*LOAD on ARM64 + [#11638](https://github.com/Kong/kong/issues/11638) + +- Fix LDP/STP fusing for unaligned accesses on ARM64 + [#11639](https://github.com/Kong/kong/issues/11639) + + +- Bump lua-kong-nginx-module from 0.6.0 to 0.8.0 + [#11663](https://github.com/Kong/kong/issues/11663) + +- Fix incorrect LuaJIT LDP/STP fusion on ARM64 which may sometimes cause incorrect logic + [#11537](https://github.com/Kong/kong/issues/11537) + +#### Default + +- Bumped lua-resty-healthcheck from 1.6.2 to 1.6.3 + [#11360](https://github.com/Kong/kong/issues/11360) + +- Bumped OpenResty from 1.21.4.1 to 1.21.4.2 + [#11360](https://github.com/Kong/kong/issues/11360) + +- Bumped LuaSec from 1.3.1 to 1.3.2 + [#11553](https://github.com/Kong/kong/issues/11553) + + +- Bumped lua-resty-aws from 1.3.1 to 1.3.5 + [#11613](https://github.com/Kong/kong/issues/11613) + + +- bump OpenSSL from 3.1.1 to 3.1.4 + [#11844](https://github.com/Kong/kong/issues/11844) + + +- Bumped kong-lapis from 1.14.0.2 to 1.14.0.3 + [#11849](https://github.com/Kong/kong/issues/11849) + + +- Bumped ngx_wasm_module to latest rolling release version. + [#11678](https://github.com/Kong/kong/issues/11678) + +- Bump Wasmtime version to 12.0.2 + [#11738](https://github.com/Kong/kong/issues/11738) + +- Bumped lua-resty-aws from 1.3.0 to 1.3.1 + [#11419](https://github.com/Kong/kong/pull/11419) + +- Bumped lua-resty-session from 4.0.4 to 4.0.5 + [#11416](https://github.com/Kong/kong/pull/11416) + + +### Features +#### Core + +- Add a new endpoint `/schemas/vaults/:name` to retrieve the schema of a vault. + [#11727](https://github.com/Kong/kong/issues/11727) + +- rename `privileged_agent` to `dedicated_config_processing. Enable `dedicated_config_processing` by default + [#11784](https://github.com/Kong/kong/issues/11784) + +- Support observing the time consumed by some components in the given request. + [#11627](https://github.com/Kong/kong/issues/11627) + +- Plugins can now implement `Plugin:configure(configs)` function that is called whenever there is a change in plugin entities. An array of current plugin configurations is passed to the function, or `nil` in case there is no active configurations for the plugin. + [#11703](https://github.com/Kong/kong/issues/11703) + +- Add a request-aware table able to detect accesses from different requests. + [#11017](https://github.com/Kong/kong/issues/11017) + +- A unique Request ID is now populated in the error log, access log, error templates, log serializer, and in a new X-Kong-Request-Id header (configurable for upstream/downstream using the `headers` and `headers_upstream` configuration options). + [#11663](https://github.com/Kong/kong/issues/11663) + +- Add support for optional Wasm filter configuration schemas + [#11568](https://github.com/Kong/kong/issues/11568) + +- Support JSON in Wasm filter configuration + [#11697](https://github.com/Kong/kong/issues/11697) + +- Support HTTP query parameters in expression routes. + [#11348](https://github.com/Kong/kong/pull/11348) + +#### Plugin + +- **response-ratelimiting**: add support for secret rotation with redis connection + [#10570](https://github.com/Kong/kong/issues/10570) + + +- **CORS**: Support the `Access-Control-Request-Private-Network` header in crossing-origin pre-light requests + [#11523](https://github.com/Kong/kong/issues/11523) + +- add scan_count to redis storage schema + [#11532](https://github.com/Kong/kong/issues/11532) + + +- **AWS-Lambda**: the AWS-Lambda plugin has been refactored by using `lua-resty-aws` as an + underlying AWS library. The refactor simplifies the AWS-Lambda plugin code base and + adding support for multiple IAM authenticating scenarios. + [#11350](https://github.com/Kong/kong/pull/11350) + +- **OpenTelemetry** and **Zipkin**: Support GCP X-Cloud-Trace-Context header + The field `header_type` now accepts the value `gcp` to propagate the + Google Cloud trace header + [#11254](https://github.com/Kong/kong/pull/11254) + +#### Clustering + +- **Clustering**: Allow configuring DP metadata labels for on-premise CP Gateway + [#11625](https://github.com/Kong/kong/issues/11625) + +### Fixes +#### Configuration + +- The default value of `dns_no_sync` option has been changed to `on` + [#11871](https://github.com/Kong/kong/issues/11871) + +#### Core + +- Fix an issue that the TTL of the key-auth plugin didnt work in DB-less and Hybrid mode. + [#11464](https://github.com/Kong/kong/issues/11464) + +- Fix a problem that abnormal socket connection will be reused when querying Postgres database. + [#11480](https://github.com/Kong/kong/issues/11480) + +- Fix upstream ssl failure when plugins use response handler + [#11502](https://github.com/Kong/kong/issues/11502) + +- Fix an issue that protocol `tls_passthrough` can not work with expressions flavor + [#11538](https://github.com/Kong/kong/issues/11538) + +- Fix a bug that will cause a failure of sending tracing data to datadog when value of x-datadog-parent-id header in requests is a short dec string + [#11599](https://github.com/Kong/kong/issues/11599) + +- Apply Nginx patch for detecting HTTP/2 stream reset attacks early (CVE-2023-44487) + [#11743](https://github.com/Kong/kong/issues/11743) + +- fix the building failure when applying patches + [#11696](https://github.com/Kong/kong/issues/11696) + +- Vault references can be used in Dbless mode in declarative config + [#11845](https://github.com/Kong/kong/issues/11845) + + +- Properly warmup Vault caches on init + [#11827](https://github.com/Kong/kong/issues/11827) + + +- Vault resurrect time is respected in case a vault secret is deleted from a vault + [#11852](https://github.com/Kong/kong/issues/11852) + +- Fixed critical level logs when starting external plugin servers. Those logs cannot be suppressed due to the limitation of OpenResty. We choose to remove the socket availability detection feature. + [#11372](https://github.com/Kong/kong/pull/11372) + +- Fix an issue where a crashing Go plugin server process would cause subsequent + requests proxied through Kong to execute Go plugins with inconsistent configurations. + The issue only affects scenarios where the same Go plugin is applied to different Route + or Service entities. + [#11306](https://github.com/Kong/kong/pull/11306) + +- Fix an issue where cluster_cert or cluster_ca_cert is inserted into lua_ssl_trusted_certificate before being base64 decoded. + [#11385](https://github.com/Kong/kong/pull/11385) + +- Fix cache warmup mechanism not working in `acls` plugin groups config entity scenario. + [#11414](https://github.com/Kong/kong/pull/11414) + +- Fix an issue that queue stops processing when a hard error is encountered in the handler function. + [#11423](https://github.com/Kong/kong/pull/11423) + +- Fix an issue that query parameters are not forwarded in proxied request. + Thanks [@chirag-manwani](https://github.com/chirag-manwani) for contributing this change. + [#11328](https://github.com/Kong/kong/pull/11328) + +- Fix an issue that response status code is not real upstream status when using kong.response function. + [#11437](https://github.com/Kong/kong/pull/11437) + +- Removed a hardcoded proxy-wasm isolation level setting that was preventing the + `nginx_http_proxy_wasm_isolation` configuration value from taking effect. + [#11407](https://github.com/Kong/kong/pull/11407) + +#### PDK + +- Fix several issues in Vault and refactor the Vault code base: - Make DAOs to fallback to empty string when resolving Vault references fail - Use node level mutex when rotation references - Refresh references on config changes - Update plugin referenced values only once per request - Pass only the valid config options to vault implementations - Resolve multi-value secrets only once when rotating them - Do not start vault secrets rotation timer on control planes - Re-enable negative caching - Reimplement the kong.vault.try function - Remove references from rotation in case their configuration has changed + [#11652](https://github.com/Kong/kong/issues/11652) + +- Fix response body gets repeated when `kong.response.get_raw_body()` is called multiple times in a request lifecycle. + [#11424](https://github.com/Kong/kong/issues/11424) + +- Tracing: fix an issue that resulted in some parent spans to end before their children due to different precision of their timestamps + [#11484](https://github.com/Kong/kong/issues/11484) + +- Fix a bug related to data interference between requests in the kong.log.serialize function. + [#11566](https://github.com/Kong/kong/issues/11566) +#### Plugin + +- **Opentelemetry**: fix an issue that resulted in invalid parent IDs in the propagated tracing headers + [#11468](https://github.com/Kong/kong/issues/11468) + +- **AWS-Lambda**: let plugin-level proxy take effect on EKS IRSA credential provider + [#11551](https://github.com/Kong/kong/issues/11551) + +- Cache the AWS lambda service by those lambda service related fields + [#11821](https://github.com/Kong/kong/issues/11821) + +- **Opentelemetry**: fix an issue that resulted in traces with invalid parent IDs when `balancer` instrumentation was enabled + [#11830](https://github.com/Kong/kong/issues/11830) + + +- **tcp-log**: fix an issue of unnecessary handshakes when reusing TLS connection + [#11848](https://github.com/Kong/kong/issues/11848) + +- **OAuth2**: For OAuth2 plugin, `scope` has been taken into account as a new criterion of the request validation. When refreshing token with `refresh_token`, the scopes associated with the `refresh_token` provided in the request must be same with or a subset of the scopes configured in the OAuth2 plugin instance hit by the request. + [#11342](https://github.com/Kong/kong/pull/11342) + +- When the worker is in shutdown mode and more data is immediately available without waiting for `max_coalescing_delay`, queues are now cleared in batches. + Thanks [@JensErat](https://github.com/JensErat) for contributing this change. + [#11376](https://github.com/Kong/kong/pull/11376) + +- A race condition in the plugin queue could potentially crash the worker when `max_entries` was set to `max_batch_size`. + [#11378](https://github.com/Kong/kong/pull/11378) + +- **AWS-Lambda**: fix an issue that the AWS-Lambda plugin cannot extract a json encoded proxy integration response. + [#11413](https://github.com/Kong/kong/pull/11413) + +#### Default + +- Restore lapis & luarocks-admin bins + [#11578](https://github.com/Kong/kong/issues/11578) +## Kong-Manager + + + + + + +### Features +#### Default + +- Add `JSON` and `YAML` formats in entity config cards. + [#111](https://github.com/Kong/kong-manager/issues/111) + + +- Plugin form fields now display descriptions from backend schema. + [#66](https://github.com/Kong/kong-manager/issues/66) + + +- Add the `protocols` field in plugin form. + [#93](https://github.com/Kong/kong-manager/issues/93) + + +- The upstream target list shows the `Mark Healthy` and `Mark Unhealthy` action items when certain conditions are met. + [#86](https://github.com/Kong/kong-manager/issues/86) + + +### Fixes +#### Default + +- Fix incorrect port number in Port Details. + [#103](https://github.com/Kong/kong-manager/issues/103) + + +- Fix a bug where the `proxy-cache` plugin cannot be installed. + [#104](https://github.com/Kong/kong-manager/issues/104) diff --git a/changelog/3.5.0/kong/.gitkeep b/changelog/3.5.0/kong/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/changelog/unreleased/kong/10570.yml b/changelog/3.5.0/kong/10570.yml similarity index 100% rename from changelog/unreleased/kong/10570.yml rename to changelog/3.5.0/kong/10570.yml diff --git a/changelog/unreleased/kong/11360-1.yml b/changelog/3.5.0/kong/11360-1.yml similarity index 100% rename from changelog/unreleased/kong/11360-1.yml rename to changelog/3.5.0/kong/11360-1.yml diff --git a/changelog/unreleased/kong/11360-2.yml b/changelog/3.5.0/kong/11360-2.yml similarity index 100% rename from changelog/unreleased/kong/11360-2.yml rename to changelog/3.5.0/kong/11360-2.yml diff --git a/changelog/unreleased/kong/11402.yml b/changelog/3.5.0/kong/11402.yml similarity index 100% rename from changelog/unreleased/kong/11402.yml rename to changelog/3.5.0/kong/11402.yml diff --git a/changelog/unreleased/kong/11424.yml b/changelog/3.5.0/kong/11424.yml similarity index 100% rename from changelog/unreleased/kong/11424.yml rename to changelog/3.5.0/kong/11424.yml diff --git a/changelog/unreleased/kong/11442.yml b/changelog/3.5.0/kong/11442.yml similarity index 100% rename from changelog/unreleased/kong/11442.yml rename to changelog/3.5.0/kong/11442.yml diff --git a/changelog/unreleased/kong/11464.yml b/changelog/3.5.0/kong/11464.yml similarity index 100% rename from changelog/unreleased/kong/11464.yml rename to changelog/3.5.0/kong/11464.yml diff --git a/changelog/unreleased/kong/11468.yml b/changelog/3.5.0/kong/11468.yml similarity index 100% rename from changelog/unreleased/kong/11468.yml rename to changelog/3.5.0/kong/11468.yml diff --git a/changelog/unreleased/kong/11480.yml b/changelog/3.5.0/kong/11480.yml similarity index 100% rename from changelog/unreleased/kong/11480.yml rename to changelog/3.5.0/kong/11480.yml diff --git a/changelog/unreleased/kong/11484.yml b/changelog/3.5.0/kong/11484.yml similarity index 100% rename from changelog/unreleased/kong/11484.yml rename to changelog/3.5.0/kong/11484.yml diff --git a/changelog/unreleased/kong/11502.yml b/changelog/3.5.0/kong/11502.yml similarity index 100% rename from changelog/unreleased/kong/11502.yml rename to changelog/3.5.0/kong/11502.yml diff --git a/changelog/unreleased/kong/11515.yml b/changelog/3.5.0/kong/11515.yml similarity index 100% rename from changelog/unreleased/kong/11515.yml rename to changelog/3.5.0/kong/11515.yml diff --git a/changelog/unreleased/kong/11518.yml b/changelog/3.5.0/kong/11518.yml similarity index 100% rename from changelog/unreleased/kong/11518.yml rename to changelog/3.5.0/kong/11518.yml diff --git a/changelog/unreleased/kong/11523.yml b/changelog/3.5.0/kong/11523.yml similarity index 100% rename from changelog/unreleased/kong/11523.yml rename to changelog/3.5.0/kong/11523.yml diff --git a/changelog/unreleased/kong/11532.yml b/changelog/3.5.0/kong/11532.yml similarity index 100% rename from changelog/unreleased/kong/11532.yml rename to changelog/3.5.0/kong/11532.yml diff --git a/changelog/unreleased/kong/11538.yml b/changelog/3.5.0/kong/11538.yml similarity index 100% rename from changelog/unreleased/kong/11538.yml rename to changelog/3.5.0/kong/11538.yml diff --git a/changelog/unreleased/kong/11551-1.yml b/changelog/3.5.0/kong/11551-1.yml similarity index 100% rename from changelog/unreleased/kong/11551-1.yml rename to changelog/3.5.0/kong/11551-1.yml diff --git a/changelog/unreleased/kong/11551-2.yml b/changelog/3.5.0/kong/11551-2.yml similarity index 100% rename from changelog/unreleased/kong/11551-2.yml rename to changelog/3.5.0/kong/11551-2.yml diff --git a/changelog/unreleased/kong/11553.yml b/changelog/3.5.0/kong/11553.yml similarity index 100% rename from changelog/unreleased/kong/11553.yml rename to changelog/3.5.0/kong/11553.yml diff --git a/changelog/unreleased/kong/11566.yml b/changelog/3.5.0/kong/11566.yml similarity index 100% rename from changelog/unreleased/kong/11566.yml rename to changelog/3.5.0/kong/11566.yml diff --git a/changelog/unreleased/kong/11578.yml b/changelog/3.5.0/kong/11578.yml similarity index 100% rename from changelog/unreleased/kong/11578.yml rename to changelog/3.5.0/kong/11578.yml diff --git a/changelog/unreleased/kong/11599.yml b/changelog/3.5.0/kong/11599.yml similarity index 100% rename from changelog/unreleased/kong/11599.yml rename to changelog/3.5.0/kong/11599.yml diff --git a/changelog/unreleased/kong/11613.yml b/changelog/3.5.0/kong/11613.yml similarity index 100% rename from changelog/unreleased/kong/11613.yml rename to changelog/3.5.0/kong/11613.yml diff --git a/changelog/unreleased/kong/11638.yml b/changelog/3.5.0/kong/11638.yml similarity index 100% rename from changelog/unreleased/kong/11638.yml rename to changelog/3.5.0/kong/11638.yml diff --git a/changelog/unreleased/kong/11639.yml b/changelog/3.5.0/kong/11639.yml similarity index 100% rename from changelog/unreleased/kong/11639.yml rename to changelog/3.5.0/kong/11639.yml diff --git a/changelog/unreleased/kong/11727.yml b/changelog/3.5.0/kong/11727.yml similarity index 100% rename from changelog/unreleased/kong/11727.yml rename to changelog/3.5.0/kong/11727.yml diff --git a/changelog/unreleased/kong/aws_lambda_service_cache.yml b/changelog/3.5.0/kong/aws_lambda_service_cache.yml similarity index 100% rename from changelog/unreleased/kong/aws_lambda_service_cache.yml rename to changelog/3.5.0/kong/aws_lambda_service_cache.yml diff --git a/changelog/unreleased/kong/bump_openssl_3.1.4.yml b/changelog/3.5.0/kong/bump_openssl_3.1.4.yml similarity index 100% rename from changelog/unreleased/kong/bump_openssl_3.1.4.yml rename to changelog/3.5.0/kong/bump_openssl_3.1.4.yml diff --git a/changelog/unreleased/kong/dedicated_config_processing.yml b/changelog/3.5.0/kong/dedicated_config_processing.yml similarity index 100% rename from changelog/unreleased/kong/dedicated_config_processing.yml rename to changelog/3.5.0/kong/dedicated_config_processing.yml diff --git a/changelog/unreleased/kong/fix-cve-2023-44487.yml b/changelog/3.5.0/kong/fix-cve-2023-44487.yml similarity index 100% rename from changelog/unreleased/kong/fix-cve-2023-44487.yml rename to changelog/3.5.0/kong/fix-cve-2023-44487.yml diff --git a/changelog/unreleased/kong/fix-opentelemetry-parent-id.yml b/changelog/3.5.0/kong/fix-opentelemetry-parent-id.yml similarity index 100% rename from changelog/unreleased/kong/fix-opentelemetry-parent-id.yml rename to changelog/3.5.0/kong/fix-opentelemetry-parent-id.yml diff --git a/changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml b/changelog/3.5.0/kong/fix-tcp-log-sslhandshake.yml similarity index 100% rename from changelog/unreleased/kong/fix-tcp-log-sslhandshake.yml rename to changelog/3.5.0/kong/fix-tcp-log-sslhandshake.yml diff --git a/changelog/unreleased/kong/fix_dns_enable_dns_no_sync.yml b/changelog/3.5.0/kong/fix_dns_enable_dns_no_sync.yml similarity index 100% rename from changelog/unreleased/kong/fix_dns_enable_dns_no_sync.yml rename to changelog/3.5.0/kong/fix_dns_enable_dns_no_sync.yml diff --git a/changelog/unreleased/kong/fix_patch_order.yml b/changelog/3.5.0/kong/fix_patch_order.yml similarity index 100% rename from changelog/unreleased/kong/fix_patch_order.yml rename to changelog/3.5.0/kong/fix_patch_order.yml diff --git a/changelog/unreleased/kong/lapis_version_bump.yml b/changelog/3.5.0/kong/lapis_version_bump.yml similarity index 100% rename from changelog/unreleased/kong/lapis_version_bump.yml rename to changelog/3.5.0/kong/lapis_version_bump.yml diff --git a/changelog/unreleased/kong/lua_kong_nginx_module_bump.yml b/changelog/3.5.0/kong/lua_kong_nginx_module_bump.yml similarity index 100% rename from changelog/unreleased/kong/lua_kong_nginx_module_bump.yml rename to changelog/3.5.0/kong/lua_kong_nginx_module_bump.yml diff --git a/changelog/unreleased/kong/luajit_ldp_stp_fusion.yml b/changelog/3.5.0/kong/luajit_ldp_stp_fusion.yml similarity index 100% rename from changelog/unreleased/kong/luajit_ldp_stp_fusion.yml rename to changelog/3.5.0/kong/luajit_ldp_stp_fusion.yml diff --git a/changelog/unreleased/kong/ngx_wasm_module_bump.yml b/changelog/3.5.0/kong/ngx_wasm_module_bump.yml similarity index 100% rename from changelog/unreleased/kong/ngx_wasm_module_bump.yml rename to changelog/3.5.0/kong/ngx_wasm_module_bump.yml diff --git a/changelog/unreleased/kong/on_prem_dp_metadata.yml b/changelog/3.5.0/kong/on_prem_dp_metadata.yml similarity index 100% rename from changelog/unreleased/kong/on_prem_dp_metadata.yml rename to changelog/3.5.0/kong/on_prem_dp_metadata.yml diff --git a/changelog/unreleased/kong/per_reqeuest_deubgging.yml b/changelog/3.5.0/kong/per_reqeuest_deubgging.yml similarity index 100% rename from changelog/unreleased/kong/per_reqeuest_deubgging.yml rename to changelog/3.5.0/kong/per_reqeuest_deubgging.yml diff --git a/changelog/unreleased/kong/plugin-configure-phase.yml b/changelog/3.5.0/kong/plugin-configure-phase.yml similarity index 100% rename from changelog/unreleased/kong/plugin-configure-phase.yml rename to changelog/3.5.0/kong/plugin-configure-phase.yml diff --git a/changelog/unreleased/kong/request-aware-table.yml b/changelog/3.5.0/kong/request-aware-table.yml similarity index 100% rename from changelog/unreleased/kong/request-aware-table.yml rename to changelog/3.5.0/kong/request-aware-table.yml diff --git a/changelog/unreleased/kong/request_id.yml b/changelog/3.5.0/kong/request_id.yml similarity index 100% rename from changelog/unreleased/kong/request_id.yml rename to changelog/3.5.0/kong/request_id.yml diff --git a/changelog/unreleased/kong/session_do_not_read_body_by_default.yml b/changelog/3.5.0/kong/session_do_not_read_body_by_default.yml similarity index 100% rename from changelog/unreleased/kong/session_do_not_read_body_by_default.yml rename to changelog/3.5.0/kong/session_do_not_read_body_by_default.yml diff --git a/changelog/unreleased/kong/vault-declarative.yml b/changelog/3.5.0/kong/vault-declarative.yml similarity index 100% rename from changelog/unreleased/kong/vault-declarative.yml rename to changelog/3.5.0/kong/vault-declarative.yml diff --git a/changelog/unreleased/kong/vault-init-warmup.yml b/changelog/3.5.0/kong/vault-init-warmup.yml similarity index 100% rename from changelog/unreleased/kong/vault-init-warmup.yml rename to changelog/3.5.0/kong/vault-init-warmup.yml diff --git a/changelog/unreleased/kong/vault-resurrect.yml b/changelog/3.5.0/kong/vault-resurrect.yml similarity index 100% rename from changelog/unreleased/kong/vault-resurrect.yml rename to changelog/3.5.0/kong/vault-resurrect.yml diff --git a/changelog/unreleased/kong/wasm-filter-config-schemas.yml b/changelog/3.5.0/kong/wasm-filter-config-schemas.yml similarity index 100% rename from changelog/unreleased/kong/wasm-filter-config-schemas.yml rename to changelog/3.5.0/kong/wasm-filter-config-schemas.yml diff --git a/changelog/unreleased/kong/wasm-filter-json-config.yml b/changelog/3.5.0/kong/wasm-filter-json-config.yml similarity index 100% rename from changelog/unreleased/kong/wasm-filter-json-config.yml rename to changelog/3.5.0/kong/wasm-filter-json-config.yml diff --git a/changelog/unreleased/kong/wasmtime_version_bump.yml b/changelog/3.5.0/kong/wasmtime_version_bump.yml similarity index 100% rename from changelog/unreleased/kong/wasmtime_version_bump.yml rename to changelog/3.5.0/kong/wasmtime_version_bump.yml diff --git a/changelog/unreleased/kong-manager/.gitkeep b/changelog/unreleased/kong-manager/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 From ef957a6e3797b873679c7f0e152ec060a8f942d4 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Thu, 9 Nov 2023 17:23:39 +0200 Subject: [PATCH 098/249] feat(db): allow primary key passed as full entity to DAOs (#11695) ### Summary Previously you needed to write code like this: ```lua local route = kong.db.routes:select_by_name("my-route") kong.db.routes:update({ id = route.id }, { paths = { "/test" } }) kong.db.routes:delete({ id = route.id }) ``` with this change you can write it like this: ```lua local route = kong.db.routes:select_by_name("my-route") kong.db.routes:update(route, { paths = { "/test" } }) kong.db.routes:delete(route) ``` You can pass full entity to all the places that previously required the just the primary key. Signed-off-by: Aapo Talvensaari --- .../unreleased/kong/dao-pk-as-entity.yml | 3 + kong/clustering/control_plane.lua | 2 +- kong/db/dao/certificates.lua | 4 +- kong/db/dao/init.lua | 20 ++- kong/db/dao/snis.lua | 1 + kong/db/dao/targets.lua | 2 +- kong/pdk/client.lua | 2 +- kong/plugins/acme/client.lua | 17 +- kong/plugins/acme/storage/kong.lua | 4 +- kong/plugins/oauth2/access.lua | 10 +- kong/plugins/proxy-cache/api.lua | 8 +- kong/plugins/proxy-cache/handler.lua | 4 +- kong/runloop/balancer/upstreams.lua | 2 +- .../03-db/02-db_core_entities_spec.lua | 166 ++++++------------ spec/02-integration/03-db/03-plugins_spec.lua | 8 +- .../03-db/08-declarative_spec.lua | 22 ++- .../03-db/10-db_unique_foreign_spec.lua | 52 ++---- .../03-db/11-db_transformations_spec.lua | 16 +- .../03-db/12-dao_hooks_spec.lua | 4 +- .../03-db/13-cluster_status_spec.lua | 2 +- spec/02-integration/03-db/18-keys_spec.lua | 4 +- .../02-integration/03-db/19-key-sets_spec.lua | 18 +- .../04-admin_api/03-consumers_routes_spec.lua | 19 +- .../04-admin_api/04-plugins_routes_spec.lua | 16 +- .../06-certificates_routes_spec.lua | 10 +- .../04-admin_api/09-routes_routes_spec.lua | 60 +++---- .../04-admin_api/10-services_routes_spec.lua | 26 +-- .../04-admin_api/17-foreign-entity_spec.lua | 32 ++-- .../13-vaults/01-vault_spec.lua | 8 +- spec/02-integration/20-wasm/02-db_spec.lua | 2 +- .../10-basic-auth/05-declarative_spec.lua | 8 +- spec/03-plugins/16-jwt/02-api_spec.lua | 2 +- spec/03-plugins/25-oauth2/01-schema_spec.lua | 12 +- spec/03-plugins/25-oauth2/03-access_spec.lua | 6 +- spec/03-plugins/29-acme/01-client_spec.lua | 6 +- 35 files changed, 234 insertions(+), 344 deletions(-) create mode 100644 changelog/unreleased/kong/dao-pk-as-entity.yml diff --git a/changelog/unreleased/kong/dao-pk-as-entity.yml b/changelog/unreleased/kong/dao-pk-as-entity.yml new file mode 100644 index 000000000000..7a741ed3a7c6 --- /dev/null +++ b/changelog/unreleased/kong/dao-pk-as-entity.yml @@ -0,0 +1,3 @@ +message: Allow primary key passed as a full entity to DAO functions. +type: feature +scope: Core diff --git a/kong/clustering/control_plane.lua b/kong/clustering/control_plane.lua index f4395979716b..220ba94a78d9 100644 --- a/kong/clustering/control_plane.lua +++ b/kong/clustering/control_plane.lua @@ -227,7 +227,7 @@ function _M:handle_cp_websocket() local purge_delay = self.conf.cluster_data_plane_purge_delay local update_sync_status = function() local ok - ok, err = kong.db.clustering_data_planes:upsert({ id = dp_id, }, { + ok, err = kong.db.clustering_data_planes:upsert({ id = dp_id }, { last_seen = last_seen, config_hash = config_hash ~= "" and config_hash diff --git a/kong/db/dao/certificates.lua b/kong/db/dao/certificates.lua index 80d23ae6ae4a..b6ca5b2b0998 100644 --- a/kong/db/dao/certificates.lua +++ b/kong/db/dao/certificates.lua @@ -69,7 +69,7 @@ function _Certificates:insert(cert, options) cert.snis = name_list or cjson.empty_array if name_list then - local ok, err, err_t = self.db.snis:insert_list({ id = cert.id }, name_list, options) + local ok, err, err_t = self.db.snis:insert_list(cert, name_list, options) if not ok then return nil, err, err_t end @@ -196,7 +196,7 @@ function _Certificates:page(size, offset, options) for i=1, #certs do local cert = certs[i] - local snis, err, err_t = self.db.snis:list_for_certificate({ id = cert.id }) + local snis, err, err_t = self.db.snis:list_for_certificate(cert) if not snis then return nil, err, err_t end diff --git a/kong/db/dao/init.lua b/kong/db/dao/init.lua index b6c28bf2795a..31f6414f65e6 100644 --- a/kong/db/dao/init.lua +++ b/kong/db/dao/init.lua @@ -973,13 +973,14 @@ function DAO:truncate() end -function DAO:select(primary_key, options) - validate_primary_key_type(primary_key) +function DAO:select(pk_or_entity, options) + validate_primary_key_type(pk_or_entity) if options ~= nil then validate_options_type(options) end + local primary_key = self.schema:extract_pk_values(pk_or_entity) local ok, errors = self.schema:validate_primary_key(primary_key) if not ok then local err_t = self.errors:invalid_primary_key(errors) @@ -1163,14 +1164,15 @@ function DAO:insert(entity, options) end -function DAO:update(primary_key, entity, options) - validate_primary_key_type(primary_key) +function DAO:update(pk_or_entity, entity, options) + validate_primary_key_type(pk_or_entity) validate_entity_type(entity) if options ~= nil then validate_options_type(options) end + local primary_key = self.schema:extract_pk_values(pk_or_entity) local ok, errors = self.schema:validate_primary_key(primary_key) if not ok then local err_t = self.errors:invalid_primary_key(errors) @@ -1215,14 +1217,15 @@ function DAO:update(primary_key, entity, options) end -function DAO:upsert(primary_key, entity, options) - validate_primary_key_type(primary_key) +function DAO:upsert(pk_or_entity, entity, options) + validate_primary_key_type(pk_or_entity) validate_entity_type(entity) if options ~= nil then validate_options_type(options) end + local primary_key = self.schema:extract_pk_values(pk_or_entity) local ok, errors = self.schema:validate_primary_key(primary_key) if not ok then local err_t = self.errors:invalid_primary_key(errors) @@ -1272,13 +1275,14 @@ function DAO:upsert(primary_key, entity, options) end -function DAO:delete(primary_key, options) - validate_primary_key_type(primary_key) +function DAO:delete(pk_or_entity, options) + validate_primary_key_type(pk_or_entity) if options ~= nil then validate_options_type(options) end + local primary_key = self.schema:extract_pk_values(pk_or_entity) local ok, errors = self.schema:validate_primary_key(primary_key) if not ok then local err_t = self.errors:invalid_primary_key(errors) diff --git a/kong/db/dao/snis.lua b/kong/db/dao/snis.lua index 947eff4c1ab1..e65e549dd9b9 100644 --- a/kong/db/dao/snis.lua +++ b/kong/db/dao/snis.lua @@ -47,6 +47,7 @@ end -- Creates one instance of SNI for each name in name_list -- All created instances will be associated to the given certificate function _SNIs:insert_list(cert_pk, name_list) + cert_pk = self.db.certificates.schema:extract_pk_values(cert_pk) for _, name in ipairs(name_list) do local _, err, err_t = self:insert({ name = name, diff --git a/kong/db/dao/targets.lua b/kong/db/dao/targets.lua index f6168919dc15..c4369be3f9ad 100644 --- a/kong/db/dao/targets.lua +++ b/kong/db/dao/targets.lua @@ -70,7 +70,7 @@ function _TARGETS:upsert(pk, entity, options) if existent.target == entity.target then -- if the upserting entity is newer, update if entity.created_at > existent.created_at then - local ok, err, err_t = self.super.delete(self, { id = existent.id }, opts) + local ok, err, err_t = self.super.delete(self, existent, opts) if ok then return self.super.insert(self, entity, options) end diff --git a/kong/pdk/client.lua b/kong/pdk/client.lua index dd4467131b49..9f74620c5a24 100644 --- a/kong/pdk/client.lua +++ b/kong/pdk/client.lua @@ -192,7 +192,7 @@ local function new(self) end if utils.is_valid_uuid(consumer_id) then - local result, err = kong.db.consumers:select { id = consumer_id } + local result, err = kong.db.consumers:select({ id = consumer_id }) if result then return result diff --git a/kong/plugins/acme/client.lua b/kong/plugins/acme/client.lua index 826f0a030502..8f3378377d5b 100644 --- a/kong/plugins/acme/client.lua +++ b/kong/plugins/acme/client.lua @@ -193,9 +193,7 @@ local function save_dao(host, key, cert) }) if err then - local ok, err_2 = kong.db.certificates:delete({ - id = cert_entity.id, - }) + local ok, err_2 = kong.db.certificates:delete(cert_entity) if not ok then kong.log.warn("error cleaning up certificate entity ", cert_entity.id, ": ", err_2) end @@ -203,12 +201,9 @@ local function save_dao(host, key, cert) end if old_sni_entity and old_sni_entity.certificate then - local id = old_sni_entity.certificate.id - local ok, err = kong.db.certificates:delete({ - id = id, - }) + local ok, err = kong.db.certificates:delete(old_sni_entity.certificate) if not ok then - kong.log.warn("error deleting expired certificate entity ", id, ": ", err) + kong.log.warn("error deleting expired certificate entity ", old_sni_entity.certificate.id, ": ", err) end end end @@ -228,7 +223,7 @@ end local function get_account_key(conf) local kid = conf.key_id - local lookup = {kid = kid} + local lookup = { kid = kid } if conf.key_set then local key_set, key_set_err = kong.db.key_sets:select_by_name(conf.key_set) @@ -237,7 +232,7 @@ local function get_account_key(conf) return nil, "could not load keyset: " .. key_set_err end - lookup.set = {id = key_set.id} + lookup.set = { id = key_set.id } end local cache_key = kong.db.keys:cache_key(lookup) @@ -393,7 +388,7 @@ local function load_certkey(conf, host) return nil, "DAO returns empty SNI entity or Certificte entity" end - local cert_entity, err = kong.db.certificates:select({ id = sni_entity.certificate.id }) + local cert_entity, err = kong.db.certificates:select(sni_entity.certificate) if err then kong.log.info("can't read certificate ", sni_entity.certificate.id, " from db", ", deleting renew config") diff --git a/kong/plugins/acme/storage/kong.lua b/kong/plugins/acme/storage/kong.lua index cf45fff1e7e9..42099f68f253 100644 --- a/kong/plugins/acme/storage/kong.lua +++ b/kong/plugins/acme/storage/kong.lua @@ -55,9 +55,7 @@ function _M:delete(k) return end - local _, err = self.dao:delete({ - id = v.id - }) + local _, err = self.dao:delete(v) return err end diff --git a/kong/plugins/oauth2/access.lua b/kong/plugins/oauth2/access.lua index 0a2ff97f8303..2acdc741ad10 100644 --- a/kong/plugins/oauth2/access.lua +++ b/kong/plugins/oauth2/access.lua @@ -107,9 +107,7 @@ local function generate_token(conf, service, credential, authenticated_userid, local refresh_token local token, err if existing_token and conf.reuse_refresh_token then - token, err = kong.db.oauth2_tokens:update({ - id = existing_token.id - }, { + token, err = kong.db.oauth2_tokens:update(existing_token, { access_token = random_string(), expires_in = token_expiration, created_at = timestamp.get_utc() / 1000 @@ -676,7 +674,7 @@ local function issue_token(conf) auth_code.scope, state) -- Delete authorization code so it cannot be reused - kong.db.oauth2_authorization_codes:delete({ id = auth_code.id }) + kong.db.oauth2_authorization_codes:delete(auth_code) end end @@ -785,7 +783,7 @@ local function issue_token(conf) token.scope, state, false, token) -- Delete old token if refresh token not persisted if not conf.reuse_refresh_token then - kong.db.oauth2_tokens:delete({ id = token.id }) + kong.db.oauth2_tokens:delete(token) end end end @@ -894,7 +892,7 @@ end local function load_oauth2_credential_into_memory(credential_id) - local result, err = kong.db.oauth2_credentials:select { id = credential_id } + local result, err = kong.db.oauth2_credentials:select({ id = credential_id }) if err then return nil, err end diff --git a/kong/plugins/proxy-cache/api.lua b/kong/plugins/proxy-cache/api.lua index aaf9aacafe80..cb1178424124 100644 --- a/kong/plugins/proxy-cache/api.lua +++ b/kong/plugins/proxy-cache/api.lua @@ -129,9 +129,7 @@ return { resource = "proxy-cache", GET = function(self) - local plugin, err = kong.db.plugins:select { - id = self.params.plugin_id, - } + local plugin, err = kong.db.plugins:select({ id = self.params.plugin_id }) if err then return kong.response.exit(500, err) end @@ -156,9 +154,7 @@ return { return kong.response.exit(200, cache_val) end, DELETE = function(self) - local plugin, err = kong.db.plugins:select { - id = self.params.plugin_id, - } + local plugin, err = kong.db.plugins:select({ id = self.params.plugin_id }) if err then return kong.response.exit(500, err) end diff --git a/kong/plugins/proxy-cache/handler.lua b/kong/plugins/proxy-cache/handler.lua index e6ff113b3f27..0ba89dc7ca02 100644 --- a/kong/plugins/proxy-cache/handler.lua +++ b/kong/plugins/proxy-cache/handler.lua @@ -261,9 +261,7 @@ function ProxyCacheHandler:init_worker() kong.log.err("handling purge of '", data, "'") local plugin_id, cache_key = unpack(utils.split(data, ":")) - local plugin, err = kong.db.plugins:select({ - id = plugin_id, - }) + local plugin, err = kong.db.plugins:select({ id = plugin_id }) if err then kong.log.err("error in retrieving plugins: ", err) return diff --git a/kong/runloop/balancer/upstreams.lua b/kong/runloop/balancer/upstreams.lua index 9c085675b327..b6606c7d1d66 100644 --- a/kong/runloop/balancer/upstreams.lua +++ b/kong/runloop/balancer/upstreams.lua @@ -62,7 +62,7 @@ end -- @param upstream_id string -- @return the upstream table, or nil+error local function load_upstream_into_memory(upstream_id) - local upstream, err = kong.db.upstreams:select({id = upstream_id}, GLOBAL_QUERY_OPTS) + local upstream, err = kong.db.upstreams:select({ id = upstream_id }, GLOBAL_QUERY_OPTS) if not upstream then return nil, err end diff --git a/spec/02-integration/03-db/02-db_core_entities_spec.lua b/spec/02-integration/03-db/02-db_core_entities_spec.lua index 08532e29be55..88a16896dbab 100644 --- a/spec/02-integration/03-db/02-db_core_entities_spec.lua +++ b/spec/02-integration/03-db/02-db_core_entities_spec.lua @@ -809,7 +809,7 @@ for _, strategy in helpers.each_strategy() do service = bp.services:insert(), })) - local route_in_db = assert(db.routes:select({ id = route.id })) + local route_in_db = assert(db.routes:select(route)) assert.truthy(now - route_in_db.created_at < 0.1) assert.truthy(now - route_in_db.updated_at < 0.1) end) @@ -995,7 +995,7 @@ for _, strategy in helpers.each_strategy() do local route_inserted = bp.routes:insert({ hosts = { "example.com" }, }) - local route, err, err_t = db.routes:select({ id = route_inserted.id }) + local route, err, err_t = db.routes:select(route_inserted) assert.is_nil(err_t) assert.is_nil(err) assert.same(route_inserted, route) @@ -1017,9 +1017,7 @@ for _, strategy in helpers.each_strategy() do service = bp.services:insert(), }) assert.is_nil(err) - local route, err, err_t = db.routes:select({ - id = route_inserted.id - }) + local route, err, err_t = db.routes:select(route_inserted) assert.is_nil(err_t) assert.is_nil(err) @@ -1043,8 +1041,7 @@ for _, strategy in helpers.each_strategy() do it("errors on invalid values", function() local route = bp.routes:insert({ hosts = { "example.com" } }) - local pk = { id = route.id } - local new_route, err, err_t = db.routes:update(pk, { + local new_route, err, err_t = db.routes:update(route, { protocols = { "http", 123 }, }) assert.is_nil(new_route) @@ -1092,7 +1089,7 @@ for _, strategy in helpers.each_strategy() do -- ngx.sleep(1) - local new_route, err, err_t = db.routes:update({ id = route.id }, { + local new_route, err, err_t = db.routes:update(route, { protocols = { "https" }, hosts = { "example.com" }, regex_priority = 5, @@ -1132,7 +1129,7 @@ for _, strategy in helpers.each_strategy() do path_handling = "v0", }) - local new_route, err, err_t = db.routes:update({ id = route.id }, { + local new_route, err, err_t = db.routes:update(route, { methods = ngx.null }) assert.is_nil(err_t) @@ -1161,7 +1158,7 @@ for _, strategy in helpers.each_strategy() do methods = { "GET" }, }) - local new_route, _, err_t = db.routes:update({ id = route.id }, { + local new_route, _, err_t = db.routes:update(route, { hosts = ngx.null, methods = ngx.null, }) @@ -1189,7 +1186,7 @@ for _, strategy in helpers.each_strategy() do snis = { "example.org" }, }) - local new_route, _, err_t = db.routes:update({ id = route.id }, { + local new_route, _, err_t = db.routes:update(route, { protocols = { "http" }, hosts = ngx.null, methods = ngx.null, @@ -1222,7 +1219,7 @@ for _, strategy in helpers.each_strategy() do paths = ngx.null, }, { nulls = true }) - local new_route, err, err_t = db.routes:update({ id = route.id }, { + local new_route, err, err_t = db.routes:update(route, { hosts = { "example2.com" }, }, { nulls = true }) assert.is_nil(err_t) @@ -1244,7 +1241,7 @@ for _, strategy in helpers.each_strategy() do methods = { "GET" }, }) - local new_route, _, err_t = db.routes:update({ id = route.id }, { + local new_route, _, err_t = db.routes:update(route, { hosts = ngx.null, methods = ngx.null, }) @@ -1291,16 +1288,12 @@ for _, strategy in helpers.each_strategy() do hosts = { "example.com" }, }) - local ok, err, err_t = db.routes:delete({ - id = route.id - }) + local ok, err, err_t = db.routes:delete(route) assert.is_nil(err_t) assert.is_nil(err) assert.is_true(ok) - local route_in_db, err, err_t = db.routes:select({ - id = route.id - }) + local route_in_db, err, err_t = db.routes:select(route) assert.is_nil(err_t) assert.is_nil(err) assert.is_nil(route_in_db) @@ -1620,9 +1613,7 @@ for _, strategy in helpers.each_strategy() do host = "example.com" })) - local service_in_db, err, err_t = db.services:select({ - id = service.id - }) + local service_in_db, err, err_t = db.services:select(service) assert.is_nil(err_t) assert.is_nil(err) assert.equal("example.com", service_in_db.host) @@ -1674,8 +1665,7 @@ for _, strategy in helpers.each_strategy() do it("errors on invalid values", function() local service = assert(db.services:insert({ host = "service.test" })) - local pk = { id = service.id } - local new_service, err, err_t = db.services:update(pk, { protocol = 123 }) + local new_service, err, err_t = db.services:update(service, { protocol = 123 }) assert.is_nil(new_service) local message = "schema violation (protocol: expected a string)" assert.equal(fmt("[%s] %s", strategy, message), err) @@ -1708,16 +1698,12 @@ for _, strategy in helpers.each_strategy() do host = "service.com" })) - local updated_service, err, err_t = db.services:update({ - id = service.id - }, { protocol = "https" }) + local updated_service, err, err_t = db.services:update(service, { protocol = "https" }) assert.is_nil(err_t) assert.is_nil(err) assert.equal("https", updated_service.protocol) - local service_in_db, err, err_t = db.services:select({ - id = service.id - }) + local service_in_db, err, err_t = db.services:select(service) assert.is_nil(err_t) assert.is_nil(err) assert.equal("https", service_in_db.protocol) @@ -1741,9 +1727,7 @@ for _, strategy in helpers.each_strategy() do assert.is_nil(err_t) -- update insert 2 with insert 1 name - local updated_service, _, err_t = db.services:update({ - id = service.id, - }, { name = "service" }) + local updated_service, _, err_t = db.services:update(service, { name = "service" }) assert.is_nil(updated_service) assert.same({ code = Errors.codes.UNIQUE_VIOLATION, @@ -1761,11 +1745,11 @@ for _, strategy in helpers.each_strategy() do local s1, s2 before_each(function() if s1 then - local ok, err = db.services:delete({ id = s1.id }) + local ok, err = db.services:delete(s1) assert(ok, tostring(err)) end if s2 then - local ok, err = db.services:delete({ id = s2.id }) + local ok, err = db.services:delete(s2) assert(ok, tostring(err)) end @@ -1820,16 +1804,12 @@ for _, strategy in helpers.each_strategy() do host = "service.com" })) - local updated_service, err, err_t = db.services:update({ - id = service.id - }, { protocol = "https" }) + local updated_service, err, err_t = db.services:update(service, { protocol = "https" }) assert.is_nil(err_t) assert.is_nil(err) assert.equal("https", updated_service.protocol) - local service_in_db, err, err_t = db.services:select({ - id = service.id - }) + local service_in_db, err, err_t = db.services:select(service) assert.is_nil(err_t) assert.is_nil(err) assert.equal("https", service_in_db.protocol) @@ -1843,9 +1823,7 @@ for _, strategy in helpers.each_strategy() do assert.is_nil(err) assert.equal("https", updated_service.protocol) - local service_in_db, err, err_t = db.services:select({ - id = updated_service.id - }) + local service_in_db, err, err_t = db.services:select(updated_service) assert.is_nil(err_t) assert.is_nil(err) assert.equal("https", service_in_db.protocol) @@ -1899,16 +1877,12 @@ for _, strategy in helpers.each_strategy() do host = "example.com" })) - local ok, err, err_t = db.services:delete({ - id = service.id - }) + local ok, err, err_t = db.services:delete(service) assert.is_nil(err_t) assert.is_nil(err) assert.is_true(ok) - local service_in_db, err, err_t = db.services:select({ - id = service.id - }) + local service_in_db, err, err_t = db.services:select(service) assert.is_nil(err_t) assert.is_nil(err) assert.is_nil(service_in_db) @@ -1946,9 +1920,7 @@ for _, strategy in helpers.each_strategy() do assert.is_nil(err) assert.is_true(ok) - local service_in_db, err, err_t = db.services:select({ - id = service.id - }) + local service_in_db, err, err_t = db.services:select(service) assert.is_nil(err_t) assert.is_nil(err) assert.is_nil(service_in_db) @@ -2002,7 +1974,7 @@ for _, strategy in helpers.each_strategy() do response_buffering = true, }, route) - local route_in_db, err, err_t = db.routes:select({ id = route.id }, { nulls = true }) + local route_in_db, err, err_t = db.routes:select(route, { nulls = true }) assert.is_nil(err_t) assert.is_nil(err) assert.same(route, route_in_db) @@ -2014,7 +1986,7 @@ for _, strategy in helpers.each_strategy() do local route = bp.routes:insert({ service = service1, methods = { "GET" } }) - local new_route, err, err_t = db.routes:update({ id = route.id }, { + local new_route, err, err_t = db.routes:update(route, { service = service2 }) assert.is_nil(err_t) @@ -2025,7 +1997,7 @@ for _, strategy in helpers.each_strategy() do it(":update() detaches a Route from an existing Service", function() local service1 = bp.services:insert({ host = "service1.com" }) local route = bp.routes:insert({ service = service1, methods = { "GET" } }) - local new_route, err, err_t = db.routes:update({ id = route.id }, { + local new_route, err, err_t = db.routes:update(route, { service = ngx.null }) assert.is_nil(err_t) @@ -2045,7 +2017,7 @@ for _, strategy in helpers.each_strategy() do hosts = { "example.com" }, }) - local new_route, err, err_t = db.routes:update({ id = route.id }, { + local new_route, err, err_t = db.routes:update(route, { service = service }) assert.is_nil(new_route) @@ -2075,7 +2047,7 @@ for _, strategy in helpers.each_strategy() do bp.routes:insert({ service = service, methods = { "GET" } }) - local ok, err, err_t = db.services:delete({ id = service.id }) + local ok, err, err_t = db.services:delete(service) assert.is_nil(ok) local message = "an existing 'routes' entity references this 'services' entity" assert.equal(fmt("[%s] %s", strategy, message), err) @@ -2097,14 +2069,12 @@ for _, strategy in helpers.each_strategy() do local route = bp.routes:insert({ service = service, methods = { "GET" } }) - local ok, err, err_t = db.routes:delete({ id = route.id }) + local ok, err, err_t = db.routes:delete(route) assert.is_nil(err_t) assert.is_nil(err) assert.is_true(ok) - local service_in_db, err, err_t = db.services:select({ - id = service.id - }) + local service_in_db, err, err_t = db.services:select(service) assert.is_nil(err_t) assert.is_nil(err) assert.same(service, service_in_db) @@ -2163,9 +2133,7 @@ for _, strategy in helpers.each_strategy() do -- different service } - local rows, err, err_t = db.routes:page_for_service { - id = service.id, - } + local rows, err, err_t = db.routes:page_for_service(service) assert.is_nil(err_t) assert.is_nil(err) assert.same({ route1 }, rows) @@ -2181,9 +2149,7 @@ for _, strategy in helpers.each_strategy() do methods = { "GET" }, } - local rows, err, err_t = db.routes:page_for_service { - id = service.id, - } + local rows, err, err_t = db.routes:page_for_service(service) assert.is_nil(err_t) assert.is_nil(err) @@ -2221,18 +2187,14 @@ for _, strategy in helpers.each_strategy() do end) it("= 100", function() - local rows, err, err_t = db.routes:page_for_service { - id = service.id, - } + local rows, err, err_t = db.routes:page_for_service(service) assert.is_nil(err_t) assert.is_nil(err) assert.equal(100, #rows) end) it("max page_size = 1000", function() - local _, _, err_t = db.routes:page_for_service({ - id = service.id, - }, 1002) + local _, _, err_t = db.routes:page_for_service(service, 1002) assert.same({ code = Errors.codes.INVALID_SIZE, message = "size must be an integer between 1 and 1000", @@ -2256,9 +2218,7 @@ for _, strategy in helpers.each_strategy() do end) it("fetches all rows in one page", function() - local rows, err, err_t, offset = db.routes:page_for_service { - id = service.id, - } + local rows, err, err_t, offset = db.routes:page_for_service(service) assert.is_nil(err_t) assert.is_nil(err) assert.is_nil(offset) @@ -2283,17 +2243,15 @@ for _, strategy in helpers.each_strategy() do end) it("fetches rows always in same order", function() - local rows1 = db.routes:page_for_service { id = service.id } - local rows2 = db.routes:page_for_service { id = service.id } + local rows1 = db.routes:page_for_service(service) + local rows2 = db.routes:page_for_service(service) assert.is_table(rows1) assert.is_table(rows2) assert.same(rows1, rows2) end) it("returns offset when page_size < total", function() - local rows, err, err_t, offset = db.routes:page_for_service({ - id = service.id, - }, 5) + local rows, err, err_t, offset = db.routes:page_for_service(service, 5) assert.is_nil(err_t) assert.is_nil(err) assert.is_table(rows) @@ -2302,9 +2260,7 @@ for _, strategy in helpers.each_strategy() do end) it("fetches subsequent pages with offset", function() - local rows_1, err, err_t, offset = db.routes:page_for_service({ - id = service.id, - }, 5) + local rows_1, err, err_t, offset = db.routes:page_for_service(service, 5) assert.is_nil(err_t) assert.is_nil(err) assert.is_table(rows_1) @@ -2313,9 +2269,7 @@ for _, strategy in helpers.each_strategy() do local page_size = 5 - local rows_2, err, err_t, offset = db.routes:page_for_service({ - id = service.id, - }, page_size, offset) + local rows_2, err, err_t, offset = db.routes:page_for_service(service, page_size, offset) assert.is_nil(err_t) assert.is_nil(err) @@ -2333,24 +2287,18 @@ for _, strategy in helpers.each_strategy() do end) it("fetches same page with same offset", function() - local _, err, err_t, offset = db.routes:page_for_service({ - id = service.id, - }, 3) + local _, err, err_t, offset = db.routes:page_for_service(service, 3) assert.is_nil(err_t) assert.is_nil(err) assert.is_string(offset) - local rows_a, err, err_t = db.routes:page_for_service({ - id = service.id, - }, 3, offset) + local rows_a, err, err_t = db.routes:page_for_service(service, 3, offset) assert.is_nil(err_t) assert.is_nil(err) assert.is_table(rows_a) assert.equal(3, #rows_a) - local rows_b, err, err_t = db.routes:page_for_service({ - id = service.id, - }, 3, offset) + local rows_b, err, err_t = db.routes:page_for_service(service, 3, offset) assert.is_nil(err_t) assert.is_nil(err) assert.is_table(rows_b) @@ -2367,9 +2315,7 @@ for _, strategy in helpers.each_strategy() do repeat local err, err_t - rows, err, err_t, offset = db.routes:page_for_service({ - id = service.id, - }, 3, offset) + rows, err, err_t, offset = db.routes:page_for_service(service, 3, offset) assert.is_nil(err_t) assert.is_nil(err) @@ -2382,9 +2328,7 @@ for _, strategy in helpers.each_strategy() do end) it("fetches first page with invalid offset", function() - local rows, err, err_t = db.routes:page_for_service({ - id = service.id, - }, 3, "hello") + local rows, err, err_t = db.routes:page_for_service(service, 3, "hello") assert.is_nil(rows) local message = "'hello' is not a valid offset: " .. "bad base64 encoding" @@ -2412,9 +2356,7 @@ for _, strategy in helpers.each_strategy() do end) it("overrides the defaults", function() - local rows, err, err_t, offset = db.routes:page_for_service({ - id = service.id, - }, nil, nil, { + local rows, err, err_t, offset = db.routes:page_for_service(service, nil, nil, { pagination = { page_size = 5, max_page_size = 5, @@ -2425,9 +2367,7 @@ for _, strategy in helpers.each_strategy() do assert.is_not_nil(offset) assert.equal(5, #rows) - rows, err, err_t, offset = db.routes:page_for_service({ - id = service.id, - }, nil, offset, { + rows, err, err_t, offset = db.routes:page_for_service(service, nil, offset, { pagination = { page_size = 6, max_page_size = 6, @@ -2465,17 +2405,13 @@ for _, strategy in helpers.each_strategy() do describe(":page_for_upstream()", function() it("return value 'offset' is a string", function() - local page, _, _, offset = db.targets:page_for_upstream({ - id = upstream.id, - }, 1) + local page, _, _, offset = db.targets:page_for_upstream(upstream, 1) assert.not_nil(page) assert.is_string(offset) end) it("respects nulls=true on targets too", function() - local page = db.targets:page_for_upstream({ - id = upstream.id, - }, 1, nil, { nulls = true }) + local page = db.targets:page_for_upstream(upstream, 1, nil, { nulls = true }) assert.not_nil(page) assert.equal(cjson.null, page[1].tags) end) diff --git a/spec/02-integration/03-db/03-plugins_spec.lua b/spec/02-integration/03-db/03-plugins_spec.lua index 474bfb15dfcd..b844835cac27 100644 --- a/spec/02-integration/03-db/03-plugins_spec.lua +++ b/spec/02-integration/03-db/03-plugins_spec.lua @@ -160,13 +160,13 @@ for _, strategy in helpers.each_strategy() do end) it("returns an error when updating mismatched plugins", function() - local p, _, err_t = db.plugins:update({ id = global_plugin.id }, + local p, _, err_t = db.plugins:update(global_plugin, { route = { id = route.id } }) assert.is_nil(p) assert.equals(err_t.fields.protocols, "must match the associated route's protocols") - local p, _, err_t = db.plugins:update({ id = global_plugin.id }, + local p, _, err_t = db.plugins:update(global_plugin, { service = { id = service.id } }) assert.is_nil(p) assert.equals(err_t.fields.protocols, @@ -176,13 +176,13 @@ for _, strategy in helpers.each_strategy() do describe(":upsert()", function() it("returns an error when upserting mismatched plugins", function() - local p, _, err_t = db.plugins:upsert({ id = global_plugin.id }, + local p, _, err_t = db.plugins:upsert(global_plugin, { route = { id = route.id }, protocols = { "http" } }) assert.is_nil(p) assert.equals(err_t.fields.protocols, "must match the associated route's protocols") - local p, _, err_t = db.plugins:upsert({ id = global_plugin.id }, + local p, _, err_t = db.plugins:upsert(global_plugin, { service = { id = service.id }, protocols = { "http" } }) assert.is_nil(p) assert.equals(err_t.fields.protocols, diff --git a/spec/02-integration/03-db/08-declarative_spec.lua b/spec/02-integration/03-db/08-declarative_spec.lua index 8e82da62ba3a..8e7480af5ef3 100644 --- a/spec/02-integration/03-db/08-declarative_spec.lua +++ b/spec/02-integration/03-db/08-declarative_spec.lua @@ -208,16 +208,16 @@ for _, strategy in helpers.each_strategy() do assert(declarative.load_into_db({ snis = { [sni_def.id] = sni_def }, certificates = { [certificate_def.id] = certificate_def }, - routes = { + routes = { [route_def.id] = route_def, [disabled_route_def.id] = disabled_route_def, }, - services = { + services = { [service_def.id] = service_def, [disabled_service_def.id] = disabled_service_def, }, consumers = { [consumer_def.id] = consumer_def }, - plugins = { + plugins = { [plugin_def.id] = plugin_def, [disabled_service_plugin_def.id] = disabled_service_plugin_def, [disabled_plugin_def.id] = disabled_plugin_def, @@ -239,7 +239,7 @@ for _, strategy in helpers.each_strategy() do assert.equals(sni_def.id, sni.id) assert.equals(certificate_def.id, sni.certificate.id) - local cert = assert(db.certificates:select({ id = certificate_def.id })) + local cert = assert(db.certificates:select(certificate_def)) assert.equals(certificate_def.id, cert.id) assert.same(ssl_fixtures.key, cert.key) assert.same(ssl_fixtures.cert, cert.cert) @@ -260,23 +260,23 @@ for _, strategy in helpers.each_strategy() do assert.equals("andru", consumer_def.username) assert.equals("donalds", consumer_def.custom_id) - local plugin = assert(db.plugins:select({ id = plugin_def.id }, { nulls = true })) + local plugin = assert(db.plugins:select(plugin_def, { nulls = true })) assert.equals(plugin_def.id, plugin.id) assert.equals(service.id, plugin.service.id) assert.equals("acl", plugin.name) assert.same(plugin_def.config, plugin.config) - local acl = assert(db.acls:select({ id = acl_def.id })) + local acl = assert(db.acls:select(acl_def)) assert.equals(consumer_def.id, acl.consumer.id) assert.equals("The A Team", acl.group) - local basicauth_credential = assert(db.basicauth_credentials:select({ id = basicauth_credential_def.id })) + local basicauth_credential = assert(db.basicauth_credentials:select(basicauth_credential_def)) assert.equals(basicauth_credential_def.id, basicauth_credential.id) assert.equals(consumer.id, basicauth_credential.consumer.id) assert.equals("james", basicauth_credential.username) assert.equals(crypto.hash(consumer.id, "secret"), basicauth_credential.password) - local basicauth_hashed_credential = assert(db.basicauth_credentials:select({ id = basicauth_hashed_credential_def.id })) + local basicauth_hashed_credential = assert(db.basicauth_credentials:select(basicauth_hashed_credential_def)) assert.equals(basicauth_hashed_credential_def.id, basicauth_hashed_credential.id) assert.equals(consumer.id, basicauth_hashed_credential.consumer.id) assert.equals("bond", basicauth_hashed_credential.username) @@ -392,7 +392,7 @@ for _, strategy in helpers.each_strategy() do assert.same(plugin_def.config, plugin.config) --[[ FIXME this case is known to cause an issue - local plugin_with_null = assert(db.plugins:select({ id = plugin_with_null_def.id }, { nulls = true })) + local plugin_with_null = assert(db.plugins:select(plugin_with_null_def, { nulls = true })) assert.equals(plugin_with_null_def.id, plugin_with_null.id) assert.equals(service.id, plugin_with_null.service.id) assert.equals("correlation-id", plugin_with_null.name) @@ -503,7 +503,7 @@ for _, strategy in helpers.each_strategy() do assert.same(plugin_def.config, plugin.config) --[[ FIXME this case is known to cause an issue - local plugin_with_null = assert(db.plugins:select({ id = plugin_with_null_def.id }, { nulls = true })) + local plugin_with_null = assert(db.plugins:select(plugin_with_null_def, { nulls = true })) assert.equals(plugin_with_null_def.id, plugin_with_null.id) assert.equals(service.id, plugin_with_null.service.id) assert.equals("correlation-id", plugin_with_null.name) @@ -533,5 +533,3 @@ for _, strategy in helpers.each_strategy() do end) end) end - - diff --git a/spec/02-integration/03-db/10-db_unique_foreign_spec.lua b/spec/02-integration/03-db/10-db_unique_foreign_spec.lua index 37f47fe5beeb..8a154b0b1e1c 100644 --- a/spec/02-integration/03-db/10-db_unique_foreign_spec.lua +++ b/spec/02-integration/03-db/10-db_unique_foreign_spec.lua @@ -66,9 +66,7 @@ for _, strategy in helpers.each_strategy() do -- I/O it("returns existing Unique Foreign", function() for i = 1, 5 do - local unique_reference, err, err_t = db.unique_references:select_by_unique_foreign({ - id = unique_foreigns[i].id, - }) + local unique_reference, err, err_t = db.unique_references:select_by_unique_foreign(unique_foreigns[i]) assert.is_nil(err) assert.is_nil(err_t) @@ -99,9 +97,7 @@ for _, strategy in helpers.each_strategy() do end) it("errors on invalid values", function() - local unique_reference, err, err_t = db.unique_references:update_by_unique_foreign({ - id = unique_foreigns[1].id, - }, { + local unique_reference, err, err_t = db.unique_references:update_by_unique_foreign(unique_foreigns[1], { note = 123, }) assert.is_nil(unique_reference) @@ -135,27 +131,21 @@ for _, strategy in helpers.each_strategy() do end) it("updates an existing Unique Reference", function() - local unique_reference, err, err_t = db.unique_references:update_by_unique_foreign({ - id = unique_foreigns[1].id, - }, { + local unique_reference, err, err_t = db.unique_references:update_by_unique_foreign(unique_foreigns[1], { note = "note updated", }) assert.is_nil(err_t) assert.is_nil(err) assert.equal("note updated", unique_reference.note) - local unique_reference_in_db, err, err_t = db.unique_references:select({ - id = unique_reference.id - }) + local unique_reference_in_db, err, err_t = db.unique_references:select(unique_reference) assert.is_nil(err_t) assert.is_nil(err) assert.equal("note updated", unique_reference_in_db.note) end) it("cannot update a Unique Reference to be an already existing Unique Foreign", function() - local updated_service, _, err_t = db.unique_references:update_by_unique_foreign({ - id = unique_foreigns[1].id, - }, { + local updated_service, _, err_t = db.unique_references:update_by_unique_foreign(unique_foreigns[1], { unique_foreign = { id = unique_foreigns[2].id, } @@ -184,9 +174,7 @@ for _, strategy in helpers.each_strategy() do end) it("errors on invalid values", function() - local unique_reference, err, err_t = db.unique_references:upsert_by_unique_foreign({ - id = unique_foreigns[1].id, - }, { + local unique_reference, err, err_t = db.unique_references:upsert_by_unique_foreign(unique_foreigns[1], { note = 123, }) assert.is_nil(unique_reference) @@ -220,18 +208,14 @@ for _, strategy in helpers.each_strategy() do end) it("upserts an existing Unique Reference", function() - local unique_reference, err, err_t = db.unique_references:upsert_by_unique_foreign({ - id = unique_foreigns[1].id, - }, { + local unique_reference, err, err_t = db.unique_references:upsert_by_unique_foreign(unique_foreigns[1], { note = "note updated", }) assert.is_nil(err_t) assert.is_nil(err) assert.equal("note updated", unique_reference.note) - local unique_reference_in_db, err, err_t = db.unique_references:select({ - id = unique_reference.id - }) + local unique_reference_in_db, err, err_t = db.unique_references:select(unique_reference) assert.is_nil(err_t) assert.is_nil(err) assert.equal("note updated", unique_reference_in_db.note) @@ -241,9 +225,7 @@ for _, strategy in helpers.each_strategy() do -- TODO: this is slightly unexpected, but it has its uses when thinking about idempotency -- of `PUT`. This has been like that with other DAO methods do, but perhaps we want -- to revisit this later. - local unique_reference, err, err_t = db.unique_references:upsert_by_unique_foreign({ - id = unique_foreigns[1].id, - }, { + local unique_reference, err, err_t = db.unique_references:upsert_by_unique_foreign(unique_foreigns[1], { unique_foreign = { id = unique_foreigns[2].id, } @@ -257,9 +239,7 @@ for _, strategy in helpers.each_strategy() do describe(":update()", function() it("cannot update a Unique Reference to be an already existing Unique Foreign", function() - local updated_unique_reference, _, err_t = db.unique_references:update({ - id = unique_references[1].id, - }, { + local updated_unique_reference, _, err_t = db.unique_references:update(unique_references[1], { unique_foreign = { id = unique_foreigns[2].id, } @@ -284,9 +264,7 @@ for _, strategy in helpers.each_strategy() do name = "new unique foreign", })) - local updated_unique_reference, err, err_t = db.unique_references:update({ - id = unique_references[1].id, - }, { + local updated_unique_reference, err, err_t = db.unique_references:update(unique_references[1], { note = "updated note", unique_foreign = { id = unique_foreign.id, @@ -335,16 +313,12 @@ for _, strategy in helpers.each_strategy() do end) it("deletes an existing Unique Reference", function() - local ok, err, err_t = db.unique_references:delete_by_unique_foreign({ - id = unique_foreign.id, - }) + local ok, err, err_t = db.unique_references:delete_by_unique_foreign(unique_foreign) assert.is_nil(err_t) assert.is_nil(err) assert.is_true(ok) - local unique_reference, err, err_t = db.unique_references:select({ - id = unique_reference.id - }) + local unique_reference, err, err_t = db.unique_references:select(unique_reference) assert.is_nil(err_t) assert.is_nil(err) assert.is_nil(unique_reference) diff --git a/spec/02-integration/03-db/11-db_transformations_spec.lua b/spec/02-integration/03-db/11-db_transformations_spec.lua index 6351d65b8afd..df47610a4eb5 100644 --- a/spec/02-integration/03-db/11-db_transformations_spec.lua +++ b/spec/02-integration/03-db/11-db_transformations_spec.lua @@ -40,14 +40,14 @@ for _, strategy in helpers.each_strategy() do name = "test" })) - local newdao, err = db.transformations:update({ id = dao.id }, { + local newdao, err = db.transformations:update(dao, { secret = "dog", }) assert.equal(nil, newdao) assert.equal(errmsg, err) - assert(db.transformations:delete({ id = dao.id })) + assert(db.transformations:delete(dao)) end) it("updating hash_secret requires secret", function() @@ -55,14 +55,14 @@ for _, strategy in helpers.each_strategy() do name = "test" })) - local newdao, err = db.transformations:update({ id = dao.id }, { + local newdao, err = db.transformations:update(dao, { hash_secret = true, }) assert.equal(nil, newdao) assert.equal(errmsg, err) - assert(db.transformations:delete({ id = dao.id })) + assert(db.transformations:delete(dao)) end) end) @@ -74,12 +74,12 @@ for _, strategy in helpers.each_strategy() do assert.equal("abc", dao.case) - local newdao = assert(db.transformations:update({ id = dao.id }, { + local newdao = assert(db.transformations:update(dao, { case = "aBc", })) assert.equal("abc", newdao.case) - assert(db.transformations:delete({ id = dao.id })) + assert(db.transformations:delete(dao)) end) it("vault references are resolved after transformations", function() @@ -94,7 +94,7 @@ for _, strategy in helpers.each_strategy() do name = "test", })) - local newdao = assert(db.transformations:update({ id = dao.id }, { + local newdao = assert(db.transformations:update(dao, { meta = "{vault://env/meta-value}", })) @@ -102,7 +102,7 @@ for _, strategy in helpers.each_strategy() do assert.same({ meta = "{vault://env/meta-value}", }, newdao["$refs"]) - assert(db.transformations:delete({ id = dao.id })) + assert(db.transformations:delete(dao)) end) end) diff --git a/spec/02-integration/03-db/12-dao_hooks_spec.lua b/spec/02-integration/03-db/12-dao_hooks_spec.lua index 9ac341a0b286..df0745226214 100644 --- a/spec/02-integration/03-db/12-dao_hooks_spec.lua +++ b/spec/02-integration/03-db/12-dao_hooks_spec.lua @@ -183,7 +183,7 @@ for _, strategy in helpers.each_strategy() do hooks.clear_hooks() end) - assert(db.routes:select( {id = r1.id} )) + assert(db.routes:select(r1)) assert.spy(pre_hook).was_called(1) assert.spy(post_hook).was_called(1) end) @@ -266,7 +266,7 @@ for _, strategy in helpers.each_strategy() do hooks.clear_hooks() end) - assert(db.routes:update({ id = r1.id }, { + assert(db.routes:update(r1, { protocols = { "http" }, hosts = { "host1" }, service = s1, diff --git a/spec/02-integration/03-db/13-cluster_status_spec.lua b/spec/02-integration/03-db/13-cluster_status_spec.lua index f486b763ec3e..3734df8f8b0a 100644 --- a/spec/02-integration/03-db/13-cluster_status_spec.lua +++ b/spec/02-integration/03-db/13-cluster_status_spec.lua @@ -18,7 +18,7 @@ for _, strategy in helpers.each_strategy() do end) it("can update the row", function() - local p, err = db.clustering_data_planes:update({ id = cs.id, }, { config_hash = "a9a166c59873245db8f1a747ba9a80a7", }) + local p, err = db.clustering_data_planes:update(cs, { config_hash = "a9a166c59873245db8f1a747ba9a80a7", }) assert.is_truthy(p) assert.is_nil(err) end) diff --git a/spec/02-integration/03-db/18-keys_spec.lua b/spec/02-integration/03-db/18-keys_spec.lua index 5cac149a1e77..7ac214faa6db 100644 --- a/spec/02-integration/03-db/18-keys_spec.lua +++ b/spec/02-integration/03-db/18-keys_spec.lua @@ -45,7 +45,7 @@ for _, strategy in helpers.all_strategies() do }) assert(key) assert.is_nil(err) - local key_o, s_err = db.keys:select({ id = key.id }) + local key_o, s_err = db.keys:select(key) assert.is_nil(s_err) assert.same("string", type(key_o.jwk)) end) @@ -60,7 +60,7 @@ for _, strategy in helpers.all_strategies() do private_key = pem_priv } }) - local key_o, err = db.keys:select({ id = init_pem_key.id }) + local key_o, err = db.keys:select(init_pem_key) assert.is_nil(err) assert.same('456', key_o.kid) assert.same(pem_priv, key_o.pem.private_key) diff --git a/spec/02-integration/03-db/19-key-sets_spec.lua b/spec/02-integration/03-db/19-key-sets_spec.lua index 8c3dbc4e8237..60a8b658b08c 100644 --- a/spec/02-integration/03-db/19-key-sets_spec.lua +++ b/spec/02-integration/03-db/19-key-sets_spec.lua @@ -27,7 +27,7 @@ for _, strategy in helpers.all_strategies() do end) it(":select returns an item", function() - local key_set, err = kong.db.key_sets:select({ id = keyset.id }) + local key_set, err = kong.db.key_sets:select(keyset) assert.is_nil(err) assert(key_set.name == keyset.name) end) @@ -46,15 +46,13 @@ for _, strategy in helpers.all_strategies() do } assert.is_nil(err) assert(key_set.name == "that") - local ok, d_err = kong.db.key_sets:delete { - id = key_set.id - } + local ok, d_err = kong.db.key_sets:delete(key_set) assert.is_nil(d_err) assert.is_truthy(ok) end) it(":update updates a keyset's fields", function() - local key_set, err = kong.db.key_sets:update({ id = keyset.id }, { + local key_set, err = kong.db.key_sets:update(keyset, { name = "changed" }) assert.is_nil(err) @@ -75,17 +73,15 @@ for _, strategy in helpers.all_strategies() do } assert.is_nil(ins_err) -- verify creation - local key_select, select_err = kong.db.keys:select({ id = key.id }) + local key_select, select_err = kong.db.keys:select(key) assert.is_nil(select_err) assert.is_not_nil(key_select) -- delete the set - local ok, d_err = kong.db.key_sets:delete { - id = key_set.id - } + local ok, d_err = kong.db.key_sets:delete(key_set) assert.is_true(ok) assert.is_nil(d_err) -- verify if key is gone - local key_select_deleted, select_deleted_err = kong.db.keys:select({ id = key.id }) + local key_select_deleted, select_deleted_err = kong.db.keys:select(key) assert.is_nil(select_deleted_err) assert.is_nil(key_select_deleted) end) @@ -119,7 +115,7 @@ for _, strategy in helpers.all_strategies() do local rows = {} local i = 1 - for row, err_t in kong.db.keys:each_for_set({id = key_set.id}) do + for row, err_t in kong.db.keys:each_for_set(key_set) do assert.is_nil(err_t) rows[i] = row i = i + 1 diff --git a/spec/02-integration/04-admin_api/03-consumers_routes_spec.lua b/spec/02-integration/04-admin_api/03-consumers_routes_spec.lua index f7747bcf63ae..31d66bf29be4 100644 --- a/spec/02-integration/04-admin_api/03-consumers_routes_spec.lua +++ b/spec/02-integration/04-admin_api/03-consumers_routes_spec.lua @@ -373,7 +373,7 @@ describe("Admin API (#" .. strategy .. "): ", function() assert.equal(consumer.id, json.id) assert.truthy(consumer.updated_at < json.updated_at) - local in_db = assert(db.consumers:select({ id = consumer.id }, { nulls = true })) + local in_db = assert(db.consumers:select(consumer, { nulls = true })) assert.same(json, in_db) end end) @@ -394,7 +394,7 @@ describe("Admin API (#" .. strategy .. "): ", function() assert.equal(new_username, json.username) assert.equal(consumer.id, json.id) - local in_db = assert(db.consumers:select({ id = consumer.id }, { nulls = true })) + local in_db = assert(db.consumers:select(consumer, { nulls = true })) assert.same(json, in_db) end end) @@ -416,7 +416,7 @@ describe("Admin API (#" .. strategy .. "): ", function() assert.equal(consumer.custom_id, json.custom_id) assert.equal(consumer.id, json.id) - local in_db = assert(db.consumers:select({ id = consumer.id }, { nulls = true })) + local in_db = assert(db.consumers:select(consumer, { nulls = true })) assert.same(json, in_db) end end) @@ -511,7 +511,7 @@ describe("Admin API (#" .. strategy .. "): ", function() local json = cjson.decode(body) assert.equal(new_username, json.username) - local in_db = assert(db.consumers:select({ id = consumer.id }, { nulls = true })) + local in_db = assert(db.consumers:select(consumer, { nulls = true })) assert.same(json, in_db) end end) @@ -834,7 +834,7 @@ describe("Admin API (#" .. strategy .. "): ", function() assert.equal("updated", json.config.value) assert.equal(plugin.id, json.id) - local in_db = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) @@ -844,8 +844,7 @@ describe("Admin API (#" .. strategy .. "): ", function() local plugin = bp.rewriter_plugins:insert({ consumer = { id = consumer.id }}) local err - plugin, err = db.plugins:update( - { id = plugin.id }, + plugin, err = db.plugins:update(plugin, { name = "rewriter", route = plugin.route, @@ -896,7 +895,7 @@ describe("Admin API (#" .. strategy .. "): ", function() local json = cjson.decode(body) assert.False(json.enabled) - plugin = assert(db.plugins:select{ id = plugin.id }) + plugin = assert(db.plugins:select(plugin)) assert.False(plugin.enabled) end end) @@ -989,9 +988,7 @@ describe("Admin API (#" .. strategy .. "): ", function() assert.equal("updated", json.config.value) assert.equal(plugin.id, json.id) - local in_db = assert(db.plugins:select({ - id = plugin.id, - }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) end diff --git a/spec/02-integration/04-admin_api/04-plugins_routes_spec.lua b/spec/02-integration/04-admin_api/04-plugins_routes_spec.lua index 62905841d4a7..2cdd40ce1588 100644 --- a/spec/02-integration/04-admin_api/04-plugins_routes_spec.lua +++ b/spec/02-integration/04-admin_api/04-plugins_routes_spec.lua @@ -276,7 +276,7 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.False(json.enabled) - local in_db = assert(db.plugins:select({ id = plugins[1].id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugins[1], { nulls = true })) assert.same(json, in_db) end) it("updates a plugin by instance_name", function() @@ -290,11 +290,11 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.False(json.enabled) - local in_db = assert(db.plugins:select({ id = plugins[2].id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugins[2], { nulls = true })) assert.same(json, in_db) end) it("updates a plugin bis", function() - local plugin = assert(db.plugins:select({ id = plugins[2].id }, { nulls = true })) + local plugin = assert(db.plugins:select(plugins[2], { nulls = true })) plugin.enabled = not plugin.enabled plugin.created_at = nil @@ -325,7 +325,7 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.same(ngx.null, json.service) - local in_db = assert(db.plugins:select({ id = plugins[2].id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugins[2], { nulls = true })) assert.same(json, in_db) end) it("does not infer json input", function() @@ -341,7 +341,7 @@ for _, strategy in helpers.each_strategy() do end) describe("errors", function() it("handles invalid input", function() - local before = assert(db.plugins:select({ id = plugins[1].id }, { nulls = true })) + local before = assert(db.plugins:select(plugins[1], { nulls = true })) local res = assert(client:send { method = "PATCH", path = "/plugins/" .. plugins[1].id, @@ -358,12 +358,12 @@ for _, strategy in helpers.each_strategy() do code = 2, }, body) - local after = assert(db.plugins:select({ id = plugins[1].id }, { nulls = true })) + local after = assert(db.plugins:select(plugins[1], { nulls = true })) assert.same(before, after) assert.same({"testkey"}, after.config.key_names) end) it("handles invalid config, see #9224", function() - local before = assert(db.plugins:select({ id = plugins[1].id }, { nulls = true })) + local before = assert(db.plugins:select(plugins[1], { nulls = true })) local res = assert(client:send { method = "PATCH", path = "/plugins/" .. plugins[1].id, @@ -380,7 +380,7 @@ for _, strategy in helpers.each_strategy() do code = 2, }, body) - local after = assert(db.plugins:select({ id = plugins[1].id }, { nulls = true })) + local after = assert(db.plugins:select(plugins[1], { nulls = true })) assert.same(before, after) assert.same({"testkey"}, after.config.key_names) end) diff --git a/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua b/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua index 7ae78b6a0c04..d8baf1aeae63 100644 --- a/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua +++ b/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua @@ -397,7 +397,7 @@ describe("Admin API: #" .. strategy, function() assert.same({ n1, n2 }, json.snis) json.snis = nil - local in_db = assert(db.certificates:select({ id = json.id }, { nulls = true })) + local in_db = assert(db.certificates:select(json, { nulls = true })) assert.same(json, in_db) end) @@ -422,7 +422,7 @@ describe("Admin API: #" .. strategy, function() assert.same({ n1, n2 }, json.snis) json.snis = nil - local in_db = assert(db.certificates:select({ id = json.id }, { nulls = true })) + local in_db = assert(db.certificates:select(json, { nulls = true })) assert.same(json, in_db) end) @@ -446,7 +446,7 @@ describe("Admin API: #" .. strategy, function() json.snis = nil - local in_db = assert(db.certificates:select({ id = certificate.id }, { nulls = true })) + local in_db = assert(db.certificates:select(certificate, { nulls = true })) assert.same(json, in_db) end) @@ -472,7 +472,7 @@ describe("Admin API: #" .. strategy, function() json.snis = nil - local in_db = assert(db.certificates:select({ id = certificate.id }, { nulls = true })) + local in_db = assert(db.certificates:select(certificate, { nulls = true })) assert.same(json, in_db) end) @@ -1244,7 +1244,7 @@ describe("Admin API: #" .. strategy, function() local json = cjson.decode(body) assert.same(n2, json.name) - local in_db = assert(db.snis:select({ id = sni.id }, { nulls = true })) + local in_db = assert(db.snis:select(sni, { nulls = true })) assert.same(json, in_db) assert.truthy(sni.updated_at < json.updated_at) end) diff --git a/spec/02-integration/04-admin_api/09-routes_routes_spec.lua b/spec/02-integration/04-admin_api/09-routes_routes_spec.lua index f8bc82090584..38d0c8969f04 100644 --- a/spec/02-integration/04-admin_api/09-routes_routes_spec.lua +++ b/spec/02-integration/04-admin_api/09-routes_routes_spec.lua @@ -814,7 +814,7 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.methods) assert.equal(route.id, json.id) - local in_db = assert(db.routes:select({ id = route.id }, { nulls = true })) + local in_db = assert(db.routes:select(route, { nulls = true })) assert.same(json, in_db) end end) @@ -850,7 +850,7 @@ for _, strategy in helpers.each_strategy() do local in_db = assert(db.routes:select_by_name(route.name, { nulls = true })) assert.same(json, in_db) - db.routes:delete({ id = route.id }) + db.routes:delete(route) end end) @@ -1058,7 +1058,7 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.methods) assert.equal(route.id, json.id) - local in_db = assert(db.routes:select({ id = route.id }, { nulls = true })) + local in_db = assert(db.routes:select(route, { nulls = true })) assert.same(json, in_db) end end) @@ -1091,10 +1091,10 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.methods) assert.equal(route.id, json.id) - local in_db = assert(db.routes:select({ id = route.id }, { nulls = true })) + local in_db = assert(db.routes:select(route, { nulls = true })) assert.same(json, in_db) - db.routes:delete({ id = route.id }) + db.routes:delete(route) end end) @@ -1114,7 +1114,7 @@ for _, strategy in helpers.each_strategy() do assert.True(json.strip_path) assert.equal(route.id, json.id) - local in_db = assert(db.routes:select({id = route.id}, { nulls = true })) + local in_db = assert(db.routes:select(route, { nulls = true })) assert.same(json, in_db) end end) @@ -1144,7 +1144,7 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.methods) assert.equal(route.id, json.id) - local in_db = assert(db.routes:select({id = route.id}, { nulls = true })) + local in_db = assert(db.routes:select(route, { nulls = true })) assert.same(json, in_db) end end) @@ -1168,7 +1168,7 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.methods) assert.equal(route.id, json.id) - local in_db = assert(db.routes:select({id = route.id}, { nulls = true })) + local in_db = assert(db.routes:select(route, { nulls = true })) assert.same(json, in_db) end) @@ -1227,10 +1227,10 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.service) assert.equal(route.id, json.id) - local in_db = assert(db.routes:select({ id = route.id }, { nulls = true })) + local in_db = assert(db.routes:select(route, { nulls = true })) assert.same(json, in_db) - db.routes:delete({ id = route.id }) + db.routes:delete(route) end end) @@ -1288,7 +1288,7 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(204, res) assert.equal("", body) - local in_db, err = db.routes:select({id = route.id}, { nulls = true }) + local in_db, err = db.routes:select(route, { nulls = true }) assert.is_nil(err) assert.is_nil(in_db) end) @@ -1302,7 +1302,7 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(204, res) assert.equal("", body) - local in_db, err = db.routes:select({id = route.id}, { nulls = true }) + local in_db, err = db.routes:select(route, { nulls = true }) assert.is_nil(err) assert.is_nil(in_db) end) @@ -1393,7 +1393,7 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.path) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) end end) @@ -1426,11 +1426,11 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.path) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) - db.routes:delete({ id = route.id }) - db.services:delete({ id = service.id }) + db.routes:delete(route) + db.services:delete(service) end end) @@ -1453,7 +1453,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("/foo", json.path) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) end end) @@ -1544,7 +1544,7 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.same("konghq.com", json.host) - local in_db = assert(db.services:select({ id = json.id }, { nulls = true })) + local in_db = assert(db.services:select(json, { nulls = true })) assert.same(json, in_db) end end) @@ -1577,7 +1577,7 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.path) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) end end) @@ -1610,11 +1610,11 @@ for _, strategy in helpers.each_strategy() do assert.same(cjson.null, json.path) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) - db.routes:delete({ id = route.id }) - db.services:delete({ id = service.id }) + db.routes:delete(route) + db.services:delete(service) end end) @@ -1637,7 +1637,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("/foo", json.path) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) end end) @@ -1835,7 +1835,7 @@ for _, strategy in helpers.each_strategy() do local route = bp.routes:insert({ paths = { "/my-route" } }) assert(db.plugins:insert { name = "key-auth", - route = { id = route.id }, + route = route, }) local res = assert(client:send { method = "GET", @@ -1850,7 +1850,7 @@ for _, strategy in helpers.each_strategy() do local route = bp.routes:insert({ name = "my-plugins-route", paths = { "/my-route" } }) assert(db.plugins:insert { name = "key-auth", - route = { id = route.id }, + route = route, }) local res = assert(client:send { method = "GET", @@ -1860,7 +1860,7 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.equal(1, #json.data) - db.routes:delete({ id = route.id }) + db.routes:delete(route) end) it("ignores an invalid body", function() @@ -1892,7 +1892,7 @@ for _, strategy in helpers.each_strategy() do local res = client:get("/routes/" .. route.id .. "/plugins/" .. plugin.id) local body = assert.res_status(200, res) local json = cjson.decode(body) - local in_db = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) it("retrieves a plugin by instance_name", function() @@ -1908,7 +1908,7 @@ for _, strategy in helpers.each_strategy() do local res = client:get("/routes/" .. route.id .. "/plugins/" .. plugin.instance_name) local body = assert.res_status(200, res) local json = cjson.decode(body) - local in_db = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) end) @@ -1922,7 +1922,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(client:delete("/routes/" .. route.id .. "/plugins/" .. plugin.id)) assert.res_status(204, res) - local in_db, err = db.plugins:select({id = plugin.id}, { nulls = true }) + local in_db, err = db.plugins:select(plugin, { nulls = true }) assert.is_nil(err) assert.is_nil(in_db) end) @@ -1935,7 +1935,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(client:delete("/routes/" .. route.id .. "/plugins/" .. plugin.instance_name)) assert.res_status(204, res) - local in_db, err = db.plugins:select({id = plugin.id}, { nulls = true }) + local in_db, err = db.plugins:select(plugin, { nulls = true }) assert.is_nil(err) assert.is_nil(in_db) end) diff --git a/spec/02-integration/04-admin_api/10-services_routes_spec.lua b/spec/02-integration/04-admin_api/10-services_routes_spec.lua index ed71fc38f7dd..644c92dc6f23 100644 --- a/spec/02-integration/04-admin_api/10-services_routes_spec.lua +++ b/spec/02-integration/04-admin_api/10-services_routes_spec.lua @@ -328,7 +328,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("https", json.protocol) assert.equal(service.id, json.id) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) end end) @@ -381,7 +381,7 @@ for _, strategy in helpers.each_strategy() do assert.equal(cjson.null, json.path) assert.equal(service.id, json.id) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) @@ -402,7 +402,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("/", json.path) assert.equal(service.id, json.id) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) local res = client:patch("/services/" .. service.id, { @@ -422,7 +422,7 @@ for _, strategy in helpers.each_strategy() do assert.equal(cjson.null, json.path) assert.equal(service.id, json.id) - local in_db = assert(db.services:select({ id = service.id }, { nulls = true })) + local in_db = assert(db.services:select(service, { nulls = true })) assert.same(json, in_db) end end) @@ -436,7 +436,7 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(204, res) assert.equal("", body) - local in_db, err = db.services:select({ id = service.id }, { nulls = true }) + local in_db, err = db.services:select(service, { nulls = true }) assert.is_nil(err) assert.is_nil(in_db) end) @@ -677,7 +677,7 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.False(json.enabled) - local in_db = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) it("updates a plugin bis", function() @@ -718,7 +718,7 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.same(ngx.null, json.service) - local in_db = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) @@ -734,7 +734,7 @@ for _, strategy in helpers.each_strategy() do config = { key_names = { "testkey" } }, }) - local before = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local before = assert(db.plugins:select(plugin, { nulls = true })) local res = assert(client:send { method = "PATCH", path = "/services/" .. service.id .. "/plugins/" .. plugin.id, @@ -750,7 +750,7 @@ for _, strategy in helpers.each_strategy() do }, code = 2, }, body) - local after = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local after = assert(db.plugins:select(plugin, { nulls = true })) assert.same(before, after) assert.same({"testkey"}, after.config.key_names) end) @@ -808,7 +808,7 @@ for _, strategy in helpers.each_strategy() do local res = client:get("/services/" .. service.id .. "/plugins/" .. plugin.id) local body = assert.res_status(200, res) local json = cjson.decode(body) - local in_db = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) it("retrieves a plugin by instance_name", function() @@ -820,7 +820,7 @@ for _, strategy in helpers.each_strategy() do local res = client:get("/services/" .. service.id .. "/plugins/" .. plugin.instance_name) local body = assert.res_status(200, res) local json = cjson.decode(body) - local in_db = assert(db.plugins:select({ id = plugin.id }, { nulls = true })) + local in_db = assert(db.plugins:select(plugin, { nulls = true })) assert.same(json, in_db) end) end) @@ -834,7 +834,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(client:delete("/services/" .. service.id .. "/plugins/" .. plugin.id)) assert.res_status(204, res) - local in_db, err = db.plugins:select({id = plugin.id}, { nulls = true }) + local in_db, err = db.plugins:select(plugin, { nulls = true }) assert.is_nil(err) assert.is_nil(in_db) end) @@ -847,7 +847,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(client:delete("/services/" .. service.id .. "/plugins/" .. plugin.instance_name)) assert.res_status(204, res) - local in_db, err = db.plugins:select({id = plugin.id}, { nulls = true }) + local in_db, err = db.plugins:select(plugin, { nulls = true }) assert.is_nil(err) assert.is_nil(in_db) end) diff --git a/spec/02-integration/04-admin_api/17-foreign-entity_spec.lua b/spec/02-integration/04-admin_api/17-foreign-entity_spec.lua index 9fd19dea3755..0c588774f152 100644 --- a/spec/02-integration/04-admin_api/17-foreign-entity_spec.lua +++ b/spec/02-integration/04-admin_api/17-foreign-entity_spec.lua @@ -76,8 +76,8 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.same(foreign_entity, json) - assert(db.foreign_references:delete({ id = foreign_reference.id })) - assert(db.foreign_entities:delete({ id = foreign_entity.id })) + assert(db.foreign_references:delete(foreign_reference)) + assert(db.foreign_entities:delete(foreign_entity)) end) it("retrieves by name", function() @@ -90,8 +90,8 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.same(foreign_entity, json) - assert(db.foreign_references:delete({ id = foreign_reference.id })) - assert(db.foreign_entities:delete({ id = foreign_entity.id })) + assert(db.foreign_references:delete(foreign_reference)) + assert(db.foreign_entities:delete(foreign_entity)) end) it("returns 404 if not found", function() @@ -116,8 +116,8 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, res) - assert(db.foreign_references:delete({ id = foreign_reference.id })) - assert(db.foreign_entities:delete({ id = foreign_entity.id })) + assert(db.foreign_references:delete(foreign_reference)) + assert(db.foreign_entities:delete(foreign_entity)) end) end) @@ -145,11 +145,11 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.equal(edited_name, json.name) - local in_db = assert(db.foreign_entities:select({ id = foreign_entity.id }, { nulls = true })) + local in_db = assert(db.foreign_entities:select(foreign_entity, { nulls = true })) assert.same(json, in_db) - assert(db.foreign_references:delete({ id = foreign_reference.id })) - assert(db.foreign_entities:delete({ id = foreign_entity.id })) + assert(db.foreign_references:delete(foreign_reference)) + assert(db.foreign_entities:delete(foreign_entity)) end end) @@ -175,11 +175,11 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.equal(edited_name, json.name) - local in_db = assert(db.foreign_entities:select({ id = foreign_entity.id }, { nulls = true })) + local in_db = assert(db.foreign_entities:select(foreign_entity, { nulls = true })) assert.same(json, in_db) - assert(db.foreign_references:delete({ id = foreign_reference.id })) - assert(db.foreign_entities:delete({ id = foreign_entity.id })) + assert(db.foreign_references:delete(foreign_reference)) + assert(db.foreign_entities:delete(foreign_entity)) end end) @@ -220,8 +220,8 @@ for _, strategy in helpers.each_strategy() do }, }, cjson.decode(body)) - assert(db.foreign_references:delete({ id = foreign_reference.id })) - assert(db.foreign_entities:delete({ id = foreign_entity.id })) + assert(db.foreign_references:delete(foreign_reference)) + assert(db.foreign_entities:delete(foreign_entity)) end end) end) @@ -236,8 +236,8 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(405, res) assert.same({ message = 'Method not allowed' }, cjson.decode(body)) - assert(db.foreign_references:delete({ id = foreign_reference.id })) - assert(db.foreign_entities:delete({ id = foreign_entity.id })) + assert(db.foreign_references:delete(foreign_reference)) + assert(db.foreign_entities:delete(foreign_entity)) end) it("returns HTTP 404 with non-existing foreign entity ", function() diff --git a/spec/02-integration/13-vaults/01-vault_spec.lua b/spec/02-integration/13-vaults/01-vault_spec.lua index 4277648e1e86..0457923e7c64 100644 --- a/spec/02-integration/13-vaults/01-vault_spec.lua +++ b/spec/02-integration/13-vaults/01-vault_spec.lua @@ -85,7 +85,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("{vault://unknown/missing-key}", certificate.key_alt) assert.is_nil(certificate["$refs"]) - certificate, err = db.certificates:select({ id = certificate.id }) + certificate, err = db.certificates:select(certificate) assert.is_nil(err) assert.equal(ssl_fixtures.cert, certificate.cert) assert.equal(ssl_fixtures.key, certificate.key) @@ -103,7 +103,7 @@ for _, strategy in helpers.each_strategy() do -- TODO: this is unexpected but schema.process_auto_fields uses currently -- the `nulls` parameter to detect if the call comes from Admin API -- for performance reasons - certificate, err = db.certificates:select({ id = certificate.id }, { nulls = true }) + certificate, err = db.certificates:select(certificate, { nulls = true }) assert.is_nil(err) assert.equal("{vault://test-vault/cert}", certificate.cert) assert.equal("{vault://test-vault/key}", certificate.key) @@ -142,7 +142,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("{vault://unknown/missing-key}", certificate.key_alt) assert.is_nil(certificate["$refs"]) - certificate, err = db.certificates:select({ id = certificate.id }) + certificate, err = db.certificates:select(certificate) assert.is_nil(err) assert.equal(ssl_fixtures.cert, certificate.cert) assert.equal(ssl_fixtures.key, certificate.key) @@ -156,7 +156,7 @@ for _, strategy in helpers.each_strategy() do -- TODO: this is unexpected but schema.process_auto_fields uses currently -- the `nulls` parameter to detect if the call comes from Admin API -- for performance reasons - certificate, err = db.certificates:select({ id = certificate.id }, { nulls = true }) + certificate, err = db.certificates:select(certificate, { nulls = true }) assert.is_nil(err) assert.equal("{vault://mock-vault/cert}", certificate.cert) assert.equal("{vault://mock-vault/key}", certificate.key) diff --git a/spec/02-integration/20-wasm/02-db_spec.lua b/spec/02-integration/20-wasm/02-db_spec.lua index b19b252ac6cb..be7e2ec7e2bd 100644 --- a/spec/02-integration/20-wasm/02-db_spec.lua +++ b/spec/02-integration/20-wasm/02-db_spec.lua @@ -264,7 +264,7 @@ describe("wasm DB entities [#" .. strategy .. "]", function() assert.is_nil(chain.tags) - chain = assert(dao:update({ id = chain.id }, { tags = { "foo" } })) + chain = assert(dao:update(chain, { tags = { "foo" } })) assert.same({ "foo" }, chain.tags) end) end) diff --git a/spec/03-plugins/10-basic-auth/05-declarative_spec.lua b/spec/03-plugins/10-basic-auth/05-declarative_spec.lua index e73d1eaf5037..c7a3de114857 100644 --- a/spec/03-plugins/10-basic-auth/05-declarative_spec.lua +++ b/spec/03-plugins/10-basic-auth/05-declarative_spec.lua @@ -127,19 +127,19 @@ for _, strategy in helpers.each_strategy() do assert.equals("andru", consumer_def.username) assert.equals("donalds", consumer_def.custom_id) - local plugin = assert(db.plugins:select({ id = plugin_def.id })) + local plugin = assert(db.plugins:select(plugin_def)) assert.equals(plugin_def.id, plugin.id) assert.equals(service.id, plugin.service.id) assert.equals("basic-auth", plugin.name) assert.same(plugin_def.config, plugin.config) - local basicauth_credential = assert(db.basicauth_credentials:select({ id = basicauth_credential_def.id })) + local basicauth_credential = assert(db.basicauth_credentials:select(basicauth_credential_def)) assert.equals(basicauth_credential_def.id, basicauth_credential.id) assert.equals(consumer.id, basicauth_credential.consumer.id) assert.equals("james", basicauth_credential.username) assert.equals(crypto.hash(consumer.id, "secret"), basicauth_credential.password) - local basicauth_hashed_credential = assert(db.basicauth_credentials:select({ id = basicauth_hashed_credential_def.id })) + local basicauth_hashed_credential = assert(db.basicauth_credentials:select(basicauth_hashed_credential_def)) assert.equals(basicauth_hashed_credential_def.id, basicauth_hashed_credential.id) assert.equals(consumer.id, basicauth_hashed_credential.consumer.id) assert.equals("bond", basicauth_hashed_credential.username) @@ -224,5 +224,3 @@ for _, strategy in helpers.each_strategy() do end) end) end - - diff --git a/spec/03-plugins/16-jwt/02-api_spec.lua b/spec/03-plugins/16-jwt/02-api_spec.lua index 2d1f016090c7..e7422a98ea9d 100644 --- a/spec/03-plugins/16-jwt/02-api_spec.lua +++ b/spec/03-plugins/16-jwt/02-api_spec.lua @@ -248,7 +248,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/jwt/", }) local body = cjson.decode(assert.res_status(200, res)) - assert.equal(7, #(body.data)) + assert.equal(6, #(body.data)) end) end) end) diff --git a/spec/03-plugins/25-oauth2/01-schema_spec.lua b/spec/03-plugins/25-oauth2/01-schema_spec.lua index 5d72c355a9de..f0de8317a158 100644 --- a/spec/03-plugins/25-oauth2/01-schema_spec.lua +++ b/spec/03-plugins/25-oauth2/01-schema_spec.lua @@ -189,31 +189,31 @@ for _, strategy in helpers.each_strategy() do service = { id = service.id }, }) - token, err = db.oauth2_tokens:select({ id = token.id }) + token, err = db.oauth2_tokens:select(token) assert.falsy(err) assert.truthy(token) - code, err = db.oauth2_authorization_codes:select({ id = code.id }) + code, err = db.oauth2_authorization_codes:select(code) assert.falsy(err) assert.truthy(code) - ok, err, err_t = db.services:delete({ id = service.id }) + ok, err, err_t = db.services:delete(service) assert.truthy(ok) assert.is_falsy(err_t) assert.is_falsy(err) -- no more service - service, err = db.services:select({ id = service.id }) + service, err = db.services:select(service) assert.falsy(err) assert.falsy(service) -- no more token - token, err = db.oauth2_tokens:select({ id = token.id }) + token, err = db.oauth2_tokens:select(token) assert.falsy(err) assert.falsy(token) -- no more code - code, err = db.oauth2_authorization_codes:select({ id = code.id }) + code, err = db.oauth2_authorization_codes:select(code) assert.falsy(err) assert.falsy(code) end) diff --git a/spec/03-plugins/25-oauth2/03-access_spec.lua b/spec/03-plugins/25-oauth2/03-access_spec.lua index fcb187319f4c..cde494c43060 100644 --- a/spec/03-plugins/25-oauth2/03-access_spec.lua +++ b/spec/03-plugins/25-oauth2/03-access_spec.lua @@ -2883,7 +2883,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() local db_code, err = db.oauth2_authorization_codes:select_by_code(code) assert.is_nil(err) db_code.plugin = ngx.null - local _, _, err = db.oauth2_authorization_codes:update({ id = db_code.id }, db_code) + local _, _, err = db.oauth2_authorization_codes:update(db_code, db_code) assert.is_nil(err) local res = assert(proxy_ssl_client:send { method = "POST", @@ -3732,9 +3732,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() -- check refreshing sets created_at so access token doesn't expire - db.oauth2_tokens:update({ - id = new_refresh_token.id - }, { + db.oauth2_tokens:update(new_refresh_token, { created_at = 123, -- set time as expired }) diff --git a/spec/03-plugins/29-acme/01-client_spec.lua b/spec/03-plugins/29-acme/01-client_spec.lua index f77b712201fa..e5ff149e15b5 100644 --- a/spec/03-plugins/29-acme/01-client_spec.lua +++ b/spec/03-plugins/29-acme/01-client_spec.lua @@ -299,7 +299,7 @@ for _, strategy in helpers.each_strategy() do end) it("create new certificate", function() - new_cert, err = db.certificates:select({ id = new_sni.certificate.id }) + new_cert, err = db.certificates:select(new_sni.certificate) assert.is_nil(err) assert.same(new_cert.key, key) assert.same(new_cert.cert, crt) @@ -324,14 +324,14 @@ for _, strategy in helpers.each_strategy() do end) it("creates new certificate", function() - new_cert, err = db.certificates:select({ id = new_sni.certificate.id }) + new_cert, err = db.certificates:select(new_sni.certificate) assert.is_nil(err) assert.same(new_cert.key, key) assert.same(new_cert.cert, crt) end) it("deletes old certificate", function() - new_cert, err = db.certificates:select({ id = cert.id }) + new_cert, err = db.certificates:select(cert) assert.is_nil(err) assert.is_nil(new_cert) end) From 13d3d57e21a5893e45cf0dbb812ea44ec5ce2ef1 Mon Sep 17 00:00:00 2001 From: Chrono Date: Fri, 10 Nov 2023 11:29:30 +0800 Subject: [PATCH 099/249] refactor(pdk): move ffi.cdef gethostname from tools into pdk (#11967) gethostname is only used by pdk, it should not be in utils. --- kong/pdk/node.lua | 5 +++++ kong/tools/utils.lua | 6 ------ 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/kong/pdk/node.lua b/kong/pdk/node.lua index fd9a5a7f9122..54e074b8f44d 100644 --- a/kong/pdk/node.lua +++ b/kong/pdk/node.lua @@ -27,6 +27,11 @@ local shms = {} local n_workers = ngx.worker.count() +ffi.cdef[[ +int gethostname(char *name, size_t len); +]] + + for shm_name, shm in pairs(shared) do insert(shms, { zone = shm, diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 3b0bda1540d4..41adc2ae82a3 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -8,7 +8,6 @@ -- @license [Apache 2.0](https://opensource.org/licenses/Apache-2.0) -- @module kong.tools.utils -local ffi = require "ffi" local pl_stringx = require "pl.stringx" local pl_path = require "pl.path" local pl_file = require "pl.file" @@ -31,11 +30,6 @@ local re_match = ngx.re.match local setmetatable = setmetatable -ffi.cdef[[ -int gethostname(char *name, size_t len); -]] - - local _M = {} From 8211b9d563483f60285a147a5f16a96f6863fe59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Fri, 10 Nov 2023 13:54:00 +0100 Subject: [PATCH 100/249] feat(testing): add reconfiguration completion detection mechanism (#11941) This change adds a new response header Kong-Transaction-Id to the Admin API. It contains the (ever incrementing) PostgreSQL transaction ID of the change that was made. The value can then be put into the If-Kong-Transaction-Id variable in a request to the proxy path. The request will be rejected with a 503 error if the proxy path has not been reconfigured yet with this or a later transaction id. The mechanism is useful in testing, when changes are made through the Admin API and the effects on the proxy path are then to be verified. Rather than waiting for a static period or retrying the proxy path request until the expected result is received, the proxy path client specifies the last transaction ID received from the Admin API in the If-Kong-Transaction-Id header and retries the request if a 503 error is received. Both the generation of the Kong-Transaction-Id header and the check for If-Kong-Transaction-Id are enabled only when Kong is running in debug mode. --- .../reconfiguration-completion-detection.yml | 3 + kong/clustering/config_helper.lua | 13 +- kong/clustering/control_plane.lua | 11 ++ kong/clustering/data_plane.lua | 5 +- kong/db/declarative/import.lua | 7 +- kong/global.lua | 13 +- kong/init.lua | 4 + kong/runloop/handler.lua | 131 +++++++-------- .../03-db/15-connection_pool_spec.lua | 1 + .../04-admin_api/02-kong_routes_spec.lua | 2 + .../24-reconfiguration-completion_spec.lua | 156 ++++++++++++++++++ 11 files changed, 269 insertions(+), 77 deletions(-) create mode 100644 changelog/unreleased/reconfiguration-completion-detection.yml create mode 100644 spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua diff --git a/changelog/unreleased/reconfiguration-completion-detection.yml b/changelog/unreleased/reconfiguration-completion-detection.yml new file mode 100644 index 000000000000..4389fd362a78 --- /dev/null +++ b/changelog/unreleased/reconfiguration-completion-detection.yml @@ -0,0 +1,3 @@ +message: Provide mechanism to detect completion of reconfiguration on the proxy path +type: feature +scope: Core diff --git a/kong/clustering/config_helper.lua b/kong/clustering/config_helper.lua index 790f3e72c15d..82e94b357023 100644 --- a/kong/clustering/config_helper.lua +++ b/kong/clustering/config_helper.lua @@ -202,7 +202,12 @@ local function fill_empty_hashes(hashes) end end -function _M.update(declarative_config, config_table, config_hash, hashes) +function _M.update(declarative_config, msg) + + local config_table = msg.config_table + local config_hash = msg.config_hash + local hashes = msg.hashes + assert(type(config_table) == "table") if not config_hash then @@ -236,11 +241,15 @@ function _M.update(declarative_config, config_table, config_hash, hashes) -- executed by worker 0 local res - res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes) + res, err = declarative.load_into_cache_with_events(entities, meta, new_hash, hashes, msg.current_transaction_id) if not res then return nil, err end + if kong.configuration.log_level == "debug" then + ngx_log(ngx.DEBUG, _log_prefix, "loaded configuration with transaction ID " .. msg.current_transaction_id) + end + return true end diff --git a/kong/clustering/control_plane.lua b/kong/clustering/control_plane.lua index 220ba94a78d9..b3af1142ac43 100644 --- a/kong/clustering/control_plane.lua +++ b/kong/clustering/control_plane.lua @@ -11,6 +11,7 @@ local compat = require("kong.clustering.compat") local constants = require("kong.constants") local events = require("kong.clustering.events") local calculate_config_hash = require("kong.clustering.config_helper").calculate_config_hash +local global = require("kong.global") local string = string @@ -123,6 +124,12 @@ function _M:export_deflated_reconfigure_payload() hashes = hashes, } + local current_transaction_id + if kong.configuration.log_level == "debug" then + current_transaction_id = global.get_current_transaction_id() + payload.current_transaction_id = current_transaction_id + end + self.reconfigure_payload = payload payload, err = cjson_encode(payload) @@ -143,6 +150,10 @@ function _M:export_deflated_reconfigure_payload() self.current_config_hash = config_hash self.deflated_reconfigure_payload = payload + if kong.configuration.log_level == "debug" then + ngx_log(ngx_DEBUG, _log_prefix, "exported configuration with transaction id " .. current_transaction_id) + end + return payload, nil, config_hash end diff --git a/kong/clustering/data_plane.lua b/kong/clustering/data_plane.lua index 93d7e8ef60eb..f82dda86bfc8 100644 --- a/kong/clustering/data_plane.lua +++ b/kong/clustering/data_plane.lua @@ -213,10 +213,7 @@ function _M:communicate(premature) msg.timestamp and " with timestamp: " .. msg.timestamp or "", log_suffix) - local config_table = assert(msg.config_table) - - local pok, res, err = pcall(config_helper.update, self.declarative_config, - config_table, msg.config_hash, msg.hashes) + local pok, res, err = pcall(config_helper.update, self.declarative_config, msg) if pok then ping_immediately = true end diff --git a/kong/db/declarative/import.lua b/kong/db/declarative/import.lua index 4908e3d6a8e3..68cf31d08704 100644 --- a/kong/db/declarative/import.lua +++ b/kong/db/declarative/import.lua @@ -507,7 +507,7 @@ do local DECLARATIVE_LOCK_KEY = "declarative:lock" -- make sure no matter which path it exits, we released the lock. - load_into_cache_with_events = function(entities, meta, hash, hashes) + load_into_cache_with_events = function(entities, meta, hash, hashes, transaction_id) local kong_shm = ngx.shared.kong local ok, err = kong_shm:add(DECLARATIVE_LOCK_KEY, 0, DECLARATIVE_LOCK_TTL) @@ -522,6 +522,11 @@ do end ok, err = load_into_cache_with_events_no_lock(entities, meta, hash, hashes) + + if ok and transaction_id then + ok, err = kong_shm:set("declarative:current_transaction_id", transaction_id) + end + kong_shm:delete(DECLARATIVE_LOCK_KEY) return ok, err diff --git a/kong/global.lua b/kong/global.lua index cdceaa7f58ef..0acfda1698ce 100644 --- a/kong/global.lua +++ b/kong/global.lua @@ -68,7 +68,8 @@ end local _GLOBAL = { - phases = phase_checker.phases, + phases = phase_checker.phases, + CURRENT_TRANSACTION_ID = 0, } @@ -294,4 +295,14 @@ function _GLOBAL.init_timing() end +function _GLOBAL.get_current_transaction_id() + local rows, err = kong.db.connector:query("select txid_current() as _pg_transaction_id") + if not rows then + return nil, "could not query postgres for current transaction id: " .. err + else + return tonumber(rows[1]._pg_transaction_id) + end +end + + return _GLOBAL diff --git a/kong/init.lua b/kong/init.lua index 8fb8f605be13..0f50cf353466 100644 --- a/kong/init.lua +++ b/kong/init.lua @@ -1831,6 +1831,10 @@ local function serve_content(module) ngx.header["Access-Control-Allow-Origin"] = ngx.req.get_headers()["Origin"] or "*" + if kong.configuration.log_level == "debug" then + ngx.header["Kong-Transaction-Id"] = kong_global.get_current_transaction_id() + end + lapis.serve(module) ctx.KONG_ADMIN_CONTENT_ENDED_AT = get_updated_now_ms() diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index 250d712f55b9..3cdbfa507fcb 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -13,8 +13,7 @@ local concurrency = require "kong.concurrency" local lrucache = require "resty.lrucache" local ktls = require "resty.kong.tls" local request_id = require "kong.tracing.request_id" - - +local global = require "kong.global" local PluginsIterator = require "kong.runloop.plugins_iterator" @@ -748,6 +747,8 @@ do wasm.set_state(wasm_state) end + global.CURRENT_TRANSACTION_ID = kong_shm:get("declarative:current_transaction_id") or 0 + return true end) -- concurrency.with_coroutine_mutex @@ -765,11 +766,6 @@ do end -local function register_events() - events.register_events(reconfigure_handler) -end - - local balancer_prepare do local function sleep_once_for_balancer_init() @@ -921,7 +917,7 @@ return { return end - register_events() + events.register_events(reconfigure_handler) -- initialize balancers for active healthchecks timer_at(0, function() @@ -967,84 +963,62 @@ return { if strategy ~= "off" then local worker_state_update_frequency = kong.configuration.worker_state_update_frequency or 1 - local router_async_opts = { - name = "router", - timeout = 0, - on_timeout = "return_true", - } - - local function rebuild_router_timer(premature) + local function rebuild_timer(premature) if premature then return end - -- Don't wait for the semaphore (timeout = 0) when updating via the - -- timer. - -- If the semaphore is locked, that means that the rebuild is - -- already ongoing. - local ok, err = rebuild_router(router_async_opts) - if not ok then - log(ERR, "could not rebuild router via timer: ", err) - end - end - - local _, err = kong.timer:named_every("router-rebuild", - worker_state_update_frequency, - rebuild_router_timer) - if err then - log(ERR, "could not schedule timer to rebuild router: ", err) - end - - local plugins_iterator_async_opts = { - name = "plugins_iterator", - timeout = 0, - on_timeout = "return_true", - } - - local function rebuild_plugins_iterator_timer(premature) - if premature then - return + -- Before rebuiding the internal structures, retrieve the current PostgreSQL transaction ID to make it the + -- current transaction ID after the rebuild has finished. + local rebuild_transaction_id, err = global.get_current_transaction_id() + if not rebuild_transaction_id then + log(ERR, err) end - local _, err = rebuild_plugins_iterator(plugins_iterator_async_opts) - if err then - log(ERR, "could not rebuild plugins iterator via timer: ", err) + local router_update_status, err = rebuild_router({ + name = "router", + timeout = 0, + on_timeout = "return_true", + }) + if not router_update_status then + log(ERR, "could not rebuild router via timer: ", err) end - end - - local _, err = kong.timer:named_every("plugins-iterator-rebuild", - worker_state_update_frequency, - rebuild_plugins_iterator_timer) - if err then - log(ERR, "could not schedule timer to rebuild plugins iterator: ", err) - end - - if wasm.enabled() then - local wasm_async_opts = { - name = "wasm", + local plugins_iterator_update_status, err = rebuild_plugins_iterator({ + name = "plugins_iterator", timeout = 0, on_timeout = "return_true", - } - - local function rebuild_wasm_filter_chains_timer(premature) - if premature then - return - end + }) + if not plugins_iterator_update_status then + log(ERR, "could not rebuild plugins iterator via timer: ", err) + end - local _, err = rebuild_wasm_state(wasm_async_opts) - if err then + if wasm.enabled() then + local wasm_update_status, err = rebuild_wasm_state({ + name = "wasm", + timeout = 0, + on_timeout = "return_true", + }) + if not wasm_update_status then log(ERR, "could not rebuild wasm filter chains via timer: ", err) end end - local _, err = kong.timer:named_every("wasm-filter-chains-rebuild", - worker_state_update_frequency, - rebuild_wasm_filter_chains_timer) - if err then - log(ERR, "could not schedule timer to rebuild wasm filter chains: ", err) + if rebuild_transaction_id then + -- Yield to process any pending invalidations + utils.yield() + + log(DEBUG, "configuration processing completed for transaction ID " .. rebuild_transaction_id) + global.CURRENT_TRANSACTION_ID = rebuild_transaction_id end end + + local _, err = kong.timer:named_every("rebuild", + worker_state_update_frequency, + rebuild_timer) + if err then + log(ERR, "could not schedule timer to rebuild: ", err) + end end end, }, @@ -1134,6 +1108,25 @@ return { }, access = { before = function(ctx) + if kong.configuration.log_level == "debug" then + -- If this is a version-conditional request, abort it if this dataplane has not processed at least the + -- specified configuration version yet. + local if_kong_transaction_id = kong.request and kong.request.get_header('if-kong-transaction-id') + if if_kong_transaction_id then + if_kong_transaction_id = tonumber(if_kong_transaction_id) + if if_kong_transaction_id and if_kong_transaction_id >= global.CURRENT_TRANSACTION_ID then + return kong.response.error( + 503, + "Service Unavailable", + { + ["X-Kong-Reconfiguration-Status"] = "pending", + ["Retry-After"] = tostring(kong.configuration.worker_state_update_frequency or 1), + } + ) + end + end + end + -- if there is a gRPC service in the context, don't re-execute the pre-access -- phase handler - it has been executed before the internal redirect if ctx.service and (ctx.service.protocol == "grpc" or diff --git a/spec/02-integration/03-db/15-connection_pool_spec.lua b/spec/02-integration/03-db/15-connection_pool_spec.lua index 9b247d801a6b..306e12ce21fa 100644 --- a/spec/02-integration/03-db/15-connection_pool_spec.lua +++ b/spec/02-integration/03-db/15-connection_pool_spec.lua @@ -22,6 +22,7 @@ for pool_size, backlog_size in ipairs({ 0, 3 }) do nginx_worker_processes = 1, pg_pool_size = pool_size, pg_backlog = backlog_size, + log_level = "info", })) client = helpers.admin_client() end) diff --git a/spec/02-integration/04-admin_api/02-kong_routes_spec.lua b/spec/02-integration/04-admin_api/02-kong_routes_spec.lua index dce6ce2d7a50..22736c6b953d 100644 --- a/spec/02-integration/04-admin_api/02-kong_routes_spec.lua +++ b/spec/02-integration/04-admin_api/02-kong_routes_spec.lua @@ -50,6 +50,8 @@ describe("Admin API - Kong routes with strategy #" .. strategy, function() res2.headers["Date"] = nil res1.headers["X-Kong-Admin-Latency"] = nil res2.headers["X-Kong-Admin-Latency"] = nil + res1.headers["Kong-Transaction-Id"] = nil + res2.headers["Kong-Transaction-Id"] = nil assert.same(res1.headers, res2.headers) end) diff --git a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua new file mode 100644 index 000000000000..1b29eaca496a --- /dev/null +++ b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua @@ -0,0 +1,156 @@ +local helpers = require "spec.helpers" +local cjson = require "cjson" + +describe("Admin API - Reconfiguration Completion -", function() + + local WORKER_STATE_UPDATE_FREQ = 1 + + local admin_client + local proxy_client + + local function run_tests() + + local res = admin_client:post("/plugins", { + body = { + name = "request-termination", + config = { + status_code = 200, + body = "kong terminated the request", + } + }, + headers = { ["Content-Type"] = "application/json" }, + }) + assert.res_status(201, res) + + res = admin_client:post("/services", { + body = { + name = "test-service", + url = "http://127.0.0.1", + }, + headers = { ["Content-Type"] = "application/json" }, + }) + local body = assert.res_status(201, res) + local service = cjson.decode(body) + + -- We're running the route setup in `eventually` to cover for the unlikely case that reconfiguration completes + -- between adding the route and requesting the path through the proxy path. + + local next_path do + local path_suffix = 0 + function next_path() + path_suffix = path_suffix + 1 + return "/" .. tostring(path_suffix) + end + end + + local service_path + local kong_transaction_id + + assert.eventually(function() + service_path = next_path() + + res = admin_client:post("/services/" .. service.id .. "/routes", { + body = { + paths = { service_path } + }, + headers = { ["Content-Type"] = "application/json" }, + }) + assert.res_status(201, res) + kong_transaction_id = res.headers['kong-transaction-id'] + assert.is_string(kong_transaction_id) + + res = proxy_client:get(service_path, + { + headers = { + ["If-Kong-Transaction-Id"] = kong_transaction_id + } + }) + assert.res_status(503, res) + assert.equals("pending", res.headers['x-kong-reconfiguration-status']) + local retry_after = tonumber(res.headers['retry-after']) + ngx.sleep(retry_after) + end) + .has_no_error() + + assert.eventually(function() + res = proxy_client:get(service_path, + { + headers = { + ["If-Kong-Transaction-Id"] = kong_transaction_id + } + }) + body = assert.res_status(200, res) + assert.equals("kong terminated the request", body) + end) + .has_no_error() + end + + describe("#traditional mode -", function() + lazy_setup(function() + helpers.get_db_utils() + assert(helpers.start_kong({ + worker_consistency = "eventual", + worker_state_update_frequency = WORKER_STATE_UPDATE_FREQ, + })) + admin_client = helpers.admin_client() + proxy_client = helpers.proxy_client() + end) + + teardown(function() + if admin_client then + admin_client:close() + end + if proxy_client then + proxy_client:close() + end + helpers.stop_kong() + end) + + it("rejects proxy requests if worker state has not been updated yet", run_tests) + end) + + describe("#hybrid mode -", function() + lazy_setup(function() + helpers.get_db_utils() + + assert(helpers.start_kong({ + role = "control_plane", + database = "postgres", + prefix = "cp", + cluster_cert = "spec/fixtures/kong_clustering.crt", + cluster_cert_key = "spec/fixtures/kong_clustering.key", + lua_ssl_trusted_certificate = "spec/fixtures/kong_clustering.crt", + cluster_listen = "127.0.0.1:9005", + cluster_telemetry_listen = "127.0.0.1:9006", + nginx_conf = "spec/fixtures/custom_nginx.template", + })) + + assert(helpers.start_kong({ + role = "data_plane", + database = "off", + prefix = "dp", + cluster_cert = "spec/fixtures/kong_clustering.crt", + cluster_cert_key = "spec/fixtures/kong_clustering.key", + lua_ssl_trusted_certificate = "spec/fixtures/kong_clustering.crt", + cluster_control_plane = "127.0.0.1:9005", + cluster_telemetry_endpoint = "127.0.0.1:9006", + proxy_listen = "0.0.0.0:9002", + })) + admin_client = helpers.admin_client() + proxy_client = helpers.proxy_client("127.0.0.1", 9002) + end) + + teardown(function() + if admin_client then + admin_client:close() + end + if proxy_client then + proxy_client:close() + end + helpers.stop_kong("dp") + helpers.stop_kong("cp") + end) + + it("rejects proxy requests if worker state has not been updated yet", run_tests) + end) +end) From c7c44a274f6fceb40551fce14be93da0945fe676 Mon Sep 17 00:00:00 2001 From: Zhongwei Yao Date: Thu, 9 Nov 2023 17:51:59 -0800 Subject: [PATCH 101/249] chore(patches): fix ldoc intermittent fail caused by LuaJIT --- ...uaJIT-2.1-20230410_08_ldoc_error_fix.patch | 22 +++++++++++++++++++ .../kong/fix-ldoc-intermittent-fail.yml | 3 +++ 2 files changed, 25 insertions(+) create mode 100644 build/openresty/patches/LuaJIT-2.1-20230410_08_ldoc_error_fix.patch create mode 100644 changelog/unreleased/kong/fix-ldoc-intermittent-fail.yml diff --git a/build/openresty/patches/LuaJIT-2.1-20230410_08_ldoc_error_fix.patch b/build/openresty/patches/LuaJIT-2.1-20230410_08_ldoc_error_fix.patch new file mode 100644 index 000000000000..b8d999c25b1a --- /dev/null +++ b/build/openresty/patches/LuaJIT-2.1-20230410_08_ldoc_error_fix.patch @@ -0,0 +1,22 @@ +From 65c849390702b1150d52e64db86cbc6b3c98413e Mon Sep 17 00:00:00 2001 +From: Mike Pall +Date: Thu, 9 Nov 2023 11:02:36 +0100 +Subject: [PATCH] Invalidate SCEV entry when returning to lower frame. + +Thanks to Zhongwei Yao. #1115 +--- + src/lj_record.c | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/bundle/LuaJIT-2.1-20230410/src/lj_record.c b/bundle/LuaJIT-2.1-20230410/src/lj_record.c +index a49f942a..0122105b 100644 +--- a/bundle/LuaJIT-2.1-20230410/src/lj_record.c ++++ b/bundle/LuaJIT-2.1-20230410/src/lj_record.c +@@ -975,6 +975,7 @@ + emitir(IRTG(IR_RETF, IRT_PGC), trpt, trpc); + J->retdepth++; + J->needsnap = 1; ++ J->scev.idx = REF_NIL; + lj_assertJ(J->baseslot == 1+LJ_FR2, "bad baseslot for return"); + /* Shift result slots up and clear the slots of the new frame below. */ + memmove(J->base + cbase, J->base-1-LJ_FR2, sizeof(TRef)*nresults); diff --git a/changelog/unreleased/kong/fix-ldoc-intermittent-fail.yml b/changelog/unreleased/kong/fix-ldoc-intermittent-fail.yml new file mode 100644 index 000000000000..125cad64cf90 --- /dev/null +++ b/changelog/unreleased/kong/fix-ldoc-intermittent-fail.yml @@ -0,0 +1,3 @@ +message: fix ldoc intermittent failure caused by LuaJIT error. +type: bugfix +scope: Core From 8d0f9d2d1b1b851eedba675484e4f4dc44aa0c03 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Mon, 13 Nov 2023 07:34:59 +0200 Subject: [PATCH 102/249] chore(deps): bump busted from 2.1.2 to 2.2.0 (#11986) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Summary #### Features - Add Korean localization — @marocchino - Add --exclude-name-file and --log-success options — @hanshuebner (When combined can automate re-running only failed tests) - Add --name option to easily run single tests — @hanshuebner #### Bug Fixes - Remove unused luafilesystem dependency — @dundargoc - Correct installation and example documentation — @C3pa and @alerque - Use escape sequences to output UTF-8 characters in more environments — @Commandcracker - Output more standard tracing notation in gtest handler — @Tieske - Fix casting to string before encoding errors in JSON — @svermeulen - Correct TAP handler to not error on no test files — @notomo Signed-off-by: Aapo Talvensaari --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 451df447abb2..8f3cc3e11de3 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ OS := $(shell uname | awk '{print tolower($$0)}') MACHINE := $(shell uname -m) -DEV_ROCKS = "busted 2.1.2" "busted-htest 1.0.0" "luacheck 1.1.1" "lua-llthreads2 0.1.6" "ldoc 1.5.0" "luacov 0.15.0" +DEV_ROCKS = "busted 2.2.0" "busted-htest 1.0.0" "luacheck 1.1.1" "lua-llthreads2 0.1.6" "ldoc 1.5.0" "luacov 0.15.0" WIN_SCRIPTS = "bin/busted" "bin/kong" "bin/kong-health" BUSTED_ARGS ?= -v TEST_CMD ?= bin/busted $(BUSTED_ARGS) From 6a322168ea654ffd001e481c00d859f4f7d78026 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Mon, 13 Nov 2023 09:54:33 +0100 Subject: [PATCH 103/249] fix(tests): rename `Kong-Transaction-Id` header to `Kong-Test-Transaction-Id` and localize the `IS_DEBUG` flag (#12001) This is a non-functional change, and that should be obviously clear in the name. KAG-2759 --------- Co-authored-by: Datong Sun --- .../unreleased/reconfiguration-completion-detection.yml | 2 +- kong/init.lua | 2 +- kong/runloop/handler.lua | 6 ++++-- spec/02-integration/04-admin_api/02-kong_routes_spec.lua | 4 ++-- .../04-admin_api/24-reconfiguration-completion_spec.lua | 6 +++--- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/changelog/unreleased/reconfiguration-completion-detection.yml b/changelog/unreleased/reconfiguration-completion-detection.yml index 4389fd362a78..585195b81dcb 100644 --- a/changelog/unreleased/reconfiguration-completion-detection.yml +++ b/changelog/unreleased/reconfiguration-completion-detection.yml @@ -1,3 +1,3 @@ -message: Provide mechanism to detect completion of reconfiguration on the proxy path +message: Provide mechanism to detect completion of reconfiguration on the proxy path. This is for internal testing only. type: feature scope: Core diff --git a/kong/init.lua b/kong/init.lua index 0f50cf353466..22bd31688e0b 100644 --- a/kong/init.lua +++ b/kong/init.lua @@ -1832,7 +1832,7 @@ local function serve_content(module) ngx.header["Access-Control-Allow-Origin"] = ngx.req.get_headers()["Origin"] or "*" if kong.configuration.log_level == "debug" then - ngx.header["Kong-Transaction-Id"] = kong_global.get_current_transaction_id() + ngx.header["Kong-Test-Transaction-Id"] = kong_global.get_current_transaction_id() end lapis.serve(module) diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index 3cdbfa507fcb..8d8630d94fdb 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -86,6 +86,7 @@ local QUESTION_MARK = byte("?") local ARRAY_MT = require("cjson.safe").array_mt local HOST_PORTS = {} +local IS_DEBUG = false local SUBSYSTEMS = constants.PROTOCOLS_WITH_SUBSYSTEM @@ -893,6 +894,7 @@ return { init_worker = { before = function() + IS_DEBUG = (kong.configuration.log_level == "debug") -- TODO: PR #9337 may affect the following line local prefix = kong.configuration.prefix or ngx.config.prefix() @@ -1108,10 +1110,10 @@ return { }, access = { before = function(ctx) - if kong.configuration.log_level == "debug" then + if IS_DEBUG then -- If this is a version-conditional request, abort it if this dataplane has not processed at least the -- specified configuration version yet. - local if_kong_transaction_id = kong.request and kong.request.get_header('if-kong-transaction-id') + local if_kong_transaction_id = kong.request and kong.request.get_header('if-kong-test-transaction-id') if if_kong_transaction_id then if_kong_transaction_id = tonumber(if_kong_transaction_id) if if_kong_transaction_id and if_kong_transaction_id >= global.CURRENT_TRANSACTION_ID then diff --git a/spec/02-integration/04-admin_api/02-kong_routes_spec.lua b/spec/02-integration/04-admin_api/02-kong_routes_spec.lua index 22736c6b953d..66cc828503f6 100644 --- a/spec/02-integration/04-admin_api/02-kong_routes_spec.lua +++ b/spec/02-integration/04-admin_api/02-kong_routes_spec.lua @@ -50,8 +50,8 @@ describe("Admin API - Kong routes with strategy #" .. strategy, function() res2.headers["Date"] = nil res1.headers["X-Kong-Admin-Latency"] = nil res2.headers["X-Kong-Admin-Latency"] = nil - res1.headers["Kong-Transaction-Id"] = nil - res2.headers["Kong-Transaction-Id"] = nil + res1.headers["Kong-Test-Transaction-Id"] = nil + res2.headers["Kong-Test-Transaction-Id"] = nil assert.same(res1.headers, res2.headers) end) diff --git a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua index 1b29eaca496a..8f89d9c1d721 100644 --- a/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua +++ b/spec/02-integration/04-admin_api/24-reconfiguration-completion_spec.lua @@ -56,13 +56,13 @@ describe("Admin API - Reconfiguration Completion -", function() headers = { ["Content-Type"] = "application/json" }, }) assert.res_status(201, res) - kong_transaction_id = res.headers['kong-transaction-id'] + kong_transaction_id = res.headers['kong-test-transaction-id'] assert.is_string(kong_transaction_id) res = proxy_client:get(service_path, { headers = { - ["If-Kong-Transaction-Id"] = kong_transaction_id + ["If-Kong-Test-Transaction-Id"] = kong_transaction_id } }) assert.res_status(503, res) @@ -76,7 +76,7 @@ describe("Admin API - Reconfiguration Completion -", function() res = proxy_client:get(service_path, { headers = { - ["If-Kong-Transaction-Id"] = kong_transaction_id + ["If-Kong-Test-Transaction-Id"] = kong_transaction_id } }) body = assert.res_status(200, res) From b5c02a6d0d957a5dd65e538a6a44476ef8121459 Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 13 Nov 2023 17:00:19 +0800 Subject: [PATCH 104/249] style(pdk): remove outdated comments for `kong.singletons` (#11998) --- kong/pdk/init.lua | 4 ---- 1 file changed, 4 deletions(-) diff --git a/kong/pdk/init.lua b/kong/pdk/init.lua index 37187e23d5df..92d10c590291 100644 --- a/kong/pdk/init.lua +++ b/kong/pdk/init.lua @@ -103,10 +103,6 @@ -- @redirect kong.nginx ---- Singletons --- @section singletons - - --- -- Instance of Kong's DAO (the `kong.db` module). Contains accessor objects -- to various entities. From b1b5f949e67907876f0a062ac473fe1397b6dbd5 Mon Sep 17 00:00:00 2001 From: Vincenzo Vicaretti Date: Mon, 13 Nov 2023 17:42:13 +0100 Subject: [PATCH 105/249] feat(conf): inject nginx directives into kong's proxy location block (#11623) `nginx_location_*`: the new prefix allows for the dynamic injection of Nginx directives into the `/` location block within Kong's Proxy server block. --- .../unreleased/kong/inject-nginx-directives-location.yml | 3 +++ kong.conf.default | 2 ++ kong/conf_loader/init.lua | 5 +++++ kong/templates/nginx_kong.lua | 5 +++++ spec/01-unit/04-prefix_handler_spec.lua | 8 ++++++++ 5 files changed, 23 insertions(+) create mode 100644 changelog/unreleased/kong/inject-nginx-directives-location.yml diff --git a/changelog/unreleased/kong/inject-nginx-directives-location.yml b/changelog/unreleased/kong/inject-nginx-directives-location.yml new file mode 100644 index 000000000000..2e0a19e72c63 --- /dev/null +++ b/changelog/unreleased/kong/inject-nginx-directives-location.yml @@ -0,0 +1,3 @@ +message: Allow to inject Nginx directives into Kong's proxy location block +type: feature +scope: Configuration diff --git a/kong.conf.default b/kong.conf.default index c904d64a60d6..7bd463da33d5 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -1069,6 +1069,8 @@ # - `nginx_http_`: Injects `` in Kong's `http {}` block. # - `nginx_proxy_`: Injects `` in Kong's proxy # `server {}` block. +# - `nginx_location_`: Injects `` in Kong's proxy `/` +# location block (nested under Kong's proxy server {} block). # - `nginx_upstream_`: Injects `` in Kong's proxy # `upstream {}` block. # - `nginx_admin_`: Injects `` in Kong's Admin API diff --git a/kong/conf_loader/init.lua b/kong/conf_loader/init.lua index 9b04ed7a9fe2..29ac8d52a2f4 100644 --- a/kong/conf_loader/init.lua +++ b/kong/conf_loader/init.lua @@ -197,6 +197,11 @@ local DYNAMIC_KEY_NAMESPACES = { prefix = "nginx_proxy_", ignore = EMPTY, }, + { + injected_conf_name = "nginx_location_directives", + prefix = "nginx_location_", + ignore = EMPTY, + }, { injected_conf_name = "nginx_status_directives", prefix = "nginx_status_", diff --git a/kong/templates/nginx_kong.lua b/kong/templates/nginx_kong.lua index 7e9a04bb4f93..c12ba4b3f82e 100644 --- a/kong/templates/nginx_kong.lua +++ b/kong/templates/nginx_kong.lua @@ -157,6 +157,11 @@ server { proxy_buffering on; proxy_request_buffering on; + # injected nginx_location_* directives +> for _, el in ipairs(nginx_location_directives) do + $(el.name) $(el.value); +> end + proxy_set_header TE $upstream_te; proxy_set_header Host $upstream_host; proxy_set_header Upgrade $upstream_upgrade; diff --git a/spec/01-unit/04-prefix_handler_spec.lua b/spec/01-unit/04-prefix_handler_spec.lua index 0337917237a4..7cc4d9c56769 100644 --- a/spec/01-unit/04-prefix_handler_spec.lua +++ b/spec/01-unit/04-prefix_handler_spec.lua @@ -611,6 +611,14 @@ describe("NGINX conf compiler", function() assert.matches("large_client_header_buffers%s+16 24k;", nginx_conf) end) + it("injects nginx_location_* directives", function() + local conf = assert(conf_loader(nil, { + nginx_location_proxy_ignore_headers = "X-Accel-Redirect", + })) + local nginx_conf = prefix_handler.compile_kong_conf(conf) + assert.matches("proxy_ignore_headers%sX%-Accel%-Redirect;", nginx_conf) + end) + it("injects nginx_admin_* directives", function() local conf = assert(conf_loader(nil, { nginx_admin_large_client_header_buffers = "4 24k", From f9ff92e0840ecb9670d93801e948c92ca21a14d4 Mon Sep 17 00:00:00 2001 From: Michael Martin Date: Wed, 8 Nov 2023 12:00:06 -0800 Subject: [PATCH 106/249] chore(ci): add ngx_wasm_module bump workflow --- .github/workflows/update-ngx-wasm-module.yml | 136 +++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 .github/workflows/update-ngx-wasm-module.yml diff --git a/.github/workflows/update-ngx-wasm-module.yml b/.github/workflows/update-ngx-wasm-module.yml new file mode 100644 index 000000000000..d63714a4904b --- /dev/null +++ b/.github/workflows/update-ngx-wasm-module.yml @@ -0,0 +1,136 @@ +name: Update ngx_wasm_module dependency + +on: + workflow_dispatch: + schedule: + # run weekly + - cron: '0 0 * * 0' + +jobs: + update: + runs-on: ubuntu-22.04 + + permissions: + # required to create a branch and push commits + contents: write + # required to open a PR for updates + pull-requests: write + + steps: + - name: Checkout Kong source code + uses: actions/checkout@v4 + with: + ref: master + + - name: Detect current version of NGX_WASM_MODULE in .requirements + id: check-kong + run: | + SHA=$(sed -nre 's/^NGX_WASM_MODULE=([^ ]+) .*/\1/p' < .requirements) + echo "sha=$SHA" | tee -a "$GITHUB_OUTPUT" + + - name: Check Kong/ngx_wasm_module HEAD + id: check-repo + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + SHA=$(gh api repos/Kong/ngx_wasm_module/commits/main --jq '.sha') + echo "sha=$SHA" | tee -a "$GITHUB_OUTPUT" + + - name: Update .requirements and create a pull request + if: steps.check-kong.outputs.sha != steps.check-repo.outputs.sha + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + FROM: ${{ steps.check-kong.outputs.sha }} + TO: ${{ steps.check-repo.outputs.sha }} + run: | + set -x + gh auth status + gh auth setup-git + + # masquerade as dependabot for the purposes of this commit/PR + git config --global user.email \ + "49699333+dependabot[bot]@users.noreply.github.com" + git config --global user.name "dependabot[bot]" + + readonly BRANCH=chore/deps-bump-ngx-wasm-module + if gh api repos/Kong/kong/branches/"$BRANCH"; then + echo "branch ($BRANCH) already exists, exiting" + exit 1 + fi + + EXISTING_PRS=$( + gh pr list \ + --json id \ + --head "$BRANCH" \ + | jq '.[]' + ) + + if [[ -n ${EXISTING_PRS:-} ]]; then + echo "existing PR for $BRANCH already exists, exiting" + echo "$EXISTING_PRS" + exit 1 + fi + + git switch --create "$BRANCH" + + sed -i \ + -re "s/^NGX_WASM_MODULE=.*/NGX_WASM_MODULE=$TO/" \ + .requirements + + git add .requirements + + # create or update changelog file + readonly CHANGELOG_FILE=changelog/unreleased/kong/bump-ngx-wasm-module.yml + { + printf 'message: "Bump `ngx_wasm_module` to `%s`"\n' "$TO" + printf 'type: dependency\n' + } > "$CHANGELOG_FILE" + + git add "$CHANGELOG_FILE" + + gh api repos/Kong/ngx_wasm_module/compare/"$FROM...$TO" \ + --jq '.commits | reverse | .[] | { + sha: .sha[0:7], + url: .html_url, + message: ( .commit.message | split("\n") | .[0] ) + }' \ + > commits.json + + # craft commit message + readonly HEADER="chore(deps): bump ngx_wasm_module to $TO" + { + printf '%s\n\nChanges since %s:\n\n' \ + "$HEADER" "$FROM" + + jq -r '"* \(.sha) - \(.message)"' \ + < commits.json + } > commit.txt + + git commit --file commit.txt + git push origin HEAD + + # craft PR body + { + printf '## Changelog `%s...%s`\n\n' \ + "${FROM:0:7}" "${TO:0:7}" + + printf '[Compare on GitHub](%s/compare/%s...%s)\n\n' \ + "https://github.com/Kong/ngx_wasm_module" \ + "$FROM" "$TO" + + # turn the commits into links for the PR body + jq -r \ + '"* [`\(.sha)`](\(.url)) - \(.message)"' \ + < commits.json + + printf '\n\n' + printf '**IMPORTANT: Remember to scan this commit log for updates ' + printf 'to Wasmtime/V8/Wasmer and update `.requirements` manually ' + printf 'as needed**\n' + } > body.md + + gh pr create \ + --base master \ + --head "$BRANCH" \ + --title "$HEADER" \ + --body-file body.md From b90d50884ef983fc059b5c1897e82ac947f879b6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 14 Nov 2023 14:26:05 +0800 Subject: [PATCH 107/249] chore(deps): bump ngx_wasm_module to ddb3fa8f7cacc81557144cf22706484eabd79a84 (#12011) * chore(deps): bump ngx_wasm_module to ddb3fa8f7cacc81557144cf22706484eabd79a84 Changes since 21732b18fc46f409962ae77ddf01c713b568d078: * ddb3fa8 - docs(*) add AssemblyScript filter example and SDK fork * ecd7896 - refactor(proxy-wasm) improve pwexec resurrection and instance lifecycle * 9d304a1 - fix(proxy-wasm) free trapped instances early * 34c23c6 - fix(proxy-wasm) improve instance recycling robustness * e3d25c7 - chore(release) install setuptools on macOS * 689a460 - tests(*) add suites for client/upstream connection aborts * fa7c59b - misc(tcp) disable a debugging assertion * d6d04b9 - chore(util) add a patch for tcp_listen in HUP mode * 67f295b - misc(wrt) add Wasmtime version checks * ddf8105 - chore(deps) bump Wasmtime to 14.0.3 * de9eb4c - chore(ci) ignore release Dockerfiles changes * 84fb42b - chore(release) use Python 3.8+ in older distributions * 9538ad8 - chore(valgrind.supp) add a new suppression for headers-more-nginx-module * 28e282c - chore(deps) cargo update * 651728c - chore(deps) bump OpenSSL to 3.1.4 * 3cf7537 - chore(deps) bump Nginx to 1.25.3 * chore(deps): bump Wasmtime to 14.0.3 --------- Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Michael Martin --- .requirements | 4 ++-- build/openresty/wasmx/wasmx_repositories.bzl | 8 ++++---- changelog/unreleased/kong/bump-ngx-wasm-module.yml | 2 ++ changelog/unreleased/kong/bump-wasmtime.yml | 2 ++ 4 files changed, 10 insertions(+), 6 deletions(-) create mode 100644 changelog/unreleased/kong/bump-ngx-wasm-module.yml create mode 100644 changelog/unreleased/kong/bump-wasmtime.yml diff --git a/.requirements b/.requirements index 7f7cae2e52f4..42b0dbef5154 100644 --- a/.requirements +++ b/.requirements @@ -13,7 +13,7 @@ LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 ATC_ROUTER=7a2ad42d4246598ba1f753b6ae79cb1456040afa # 1.3.1 KONG_MANAGER=nightly -NGX_WASM_MODULE=21732b18fc46f409962ae77ddf01c713b568d078 # prerelease-0.1.1 +NGX_WASM_MODULE=ddb3fa8f7cacc81557144cf22706484eabd79a84 WASMER=3.1.1 -WASMTIME=12.0.2 +WASMTIME=14.0.3 V8=10.5.18 diff --git a/build/openresty/wasmx/wasmx_repositories.bzl b/build/openresty/wasmx/wasmx_repositories.bzl index 5996e6ebeb02..26314f2ebec4 100644 --- a/build/openresty/wasmx/wasmx_repositories.bzl +++ b/build/openresty/wasmx/wasmx_repositories.bzl @@ -42,12 +42,12 @@ wasm_runtimes = { }, "wasmtime": { "linux": { - "x86_64": "9e02cd4201d74c68a236664f883873335c7427e820ce4a44c47c1cc98ec9e553", - "aarch64": "daf6ca147b288cf915978f064853f403ca163b52806ae0a52ddd5bd91a5a2507", + "x86_64": "a1285b0e2e3c6edf9cb6c7f214a682780f01ca8746a5d03f162512169cdf1e50", + "aarch64": "ef527ed31c3f141b5949bfd2e766a908f44b66ee839d4f0f22e740186236fd48", }, "macos": { - "x86_64": "35a0d3590afb147f9b312820df87189a9a376cc5bddc2d90b8d7e57b412c7dc6", - "aarch64": "6b8a13fbe6c5440b30632a1f9178df1cdc07bbf34633a105666e506bc8db941d", + "x86_64": "c30ffb79f8097512fbe9ad02503dcdb0cd168eec2112b6951a013eed51050245", + "aarch64": "2834d667fc218925184db77fa91eca44d14f688a4972e2f365fe2b7c12e6d49f", }, }, } diff --git a/changelog/unreleased/kong/bump-ngx-wasm-module.yml b/changelog/unreleased/kong/bump-ngx-wasm-module.yml new file mode 100644 index 000000000000..1550fb88dd2f --- /dev/null +++ b/changelog/unreleased/kong/bump-ngx-wasm-module.yml @@ -0,0 +1,2 @@ +message: "Bump `ngx_wasm_module` to `ddb3fa8f7cacc81557144cf22706484eabd79a84`" +type: dependency diff --git a/changelog/unreleased/kong/bump-wasmtime.yml b/changelog/unreleased/kong/bump-wasmtime.yml new file mode 100644 index 000000000000..d525704cd423 --- /dev/null +++ b/changelog/unreleased/kong/bump-wasmtime.yml @@ -0,0 +1,2 @@ +message: "Bump `Wasmtime` version to `14.0.3`" +type: dependency From c3e09efd6e77711c9278b4321530ea632ca9bd9e Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 14 Nov 2023 08:36:10 +0200 Subject: [PATCH 108/249] perf(router): cooperatively yield when building statistics of routes (#12008) ### Summary There is a tight loop when building Router phone home statistics that can introduce latency spikes on worker 0. This commit adds yield to that loop. KAG-3062 Signed-off-by: Aapo Talvensaari --- kong/router/utils.lua | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/kong/router/utils.lua b/kong/router/utils.lua index c92c5814514a..e1b8d44381f4 100644 --- a/kong/router/utils.lua +++ b/kong/router/utils.lua @@ -1,13 +1,15 @@ local constants = require("kong.constants") local hostname_type = require("kong.tools.utils").hostname_type local normalize = require("kong.tools.uri").normalize +local yield = require("kong.tools.yield").yield -local type = type -local error = error -local find = string.find -local sub = string.sub -local byte = string.byte +local type = type +local error = error +local find = string.find +local sub = string.sub +local byte = string.byte +local get_phase = ngx.get_phase local SLASH = byte("/") @@ -291,7 +293,11 @@ do local v0 = 0 local v1 = 0 + local phase = get_phase() + for _, route in ipairs(routes) do + yield(true, phase) + local r = route.route local paths_t = r.paths or empty_table From 36f2abe5dae9d4b43c0320eb84b6fb859a945ef0 Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 14 Nov 2023 15:00:41 +0800 Subject: [PATCH 109/249] refactor(tools): remove reference of `gzip` module from `utils.lua` (#11985) KAG-3060 --- kong/clustering/compat/init.lua | 2 +- kong/clustering/control_plane.lua | 5 ++--- kong/clustering/data_plane.lua | 5 ++--- kong/tools/utils.lua | 1 - spec/01-unit/05-utils_spec.lua | 2 ++ spec/01-unit/19-hybrid/03-compat_spec.lua | 2 +- spec/helpers.lua | 4 +++- 7 files changed, 11 insertions(+), 10 deletions(-) diff --git a/kong/clustering/compat/init.lua b/kong/clustering/compat/init.lua index 9ae08eadc317..cb4b4245ebf4 100644 --- a/kong/clustering/compat/init.lua +++ b/kong/clustering/compat/init.lua @@ -10,7 +10,7 @@ local table_insert = table.insert local table_sort = table.sort local gsub = string.gsub local split = utils.split -local deflate_gzip = utils.deflate_gzip +local deflate_gzip = require("kong.tools.gzip").deflate_gzip local cjson_encode = cjson.encode local ngx = ngx diff --git a/kong/clustering/control_plane.lua b/kong/clustering/control_plane.lua index b3af1142ac43..423e33d74c50 100644 --- a/kong/clustering/control_plane.lua +++ b/kong/clustering/control_plane.lua @@ -5,7 +5,6 @@ local _MT = { __index = _M, } local semaphore = require("ngx.semaphore") local cjson = require("cjson.safe") local declarative = require("kong.db.declarative") -local utils = require("kong.tools.utils") local clustering_utils = require("kong.clustering.utils") local compat = require("kong.clustering.compat") local constants = require("kong.constants") @@ -41,8 +40,8 @@ local sleep = ngx.sleep local plugins_list_to_map = compat.plugins_list_to_map local update_compatible_payload = compat.update_compatible_payload -local deflate_gzip = utils.deflate_gzip -local yield = utils.yield +local deflate_gzip = require("kong.tools.gzip").deflate_gzip +local yield = require("kong.tools.yield").yield local connect_dp = clustering_utils.connect_dp diff --git a/kong/clustering/data_plane.lua b/kong/clustering/data_plane.lua index f82dda86bfc8..74f33d3b2584 100644 --- a/kong/clustering/data_plane.lua +++ b/kong/clustering/data_plane.lua @@ -8,7 +8,6 @@ local config_helper = require("kong.clustering.config_helper") local clustering_utils = require("kong.clustering.utils") local declarative = require("kong.db.declarative") local constants = require("kong.constants") -local utils = require("kong.tools.utils") local pl_stringx = require("pl.stringx") @@ -25,8 +24,8 @@ local cjson_decode = cjson.decode local cjson_encode = cjson.encode local exiting = ngx.worker.exiting local ngx_time = ngx.time -local inflate_gzip = utils.inflate_gzip -local yield = utils.yield +local inflate_gzip = require("kong.tools.gzip").inflate_gzip +local yield = require("kong.tools.yield").yield local ngx_ERR = ngx.ERR diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 41adc2ae82a3..397c498f9479 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -965,7 +965,6 @@ _M.topological_sort = topological_sort do local modules = { - "kong.tools.gzip", "kong.tools.table", "kong.tools.sha256", "kong.tools.yield", diff --git a/spec/01-unit/05-utils_spec.lua b/spec/01-unit/05-utils_spec.lua index 58af472e50eb..05deee5ab434 100644 --- a/spec/01-unit/05-utils_spec.lua +++ b/spec/01-unit/05-utils_spec.lua @@ -754,6 +754,8 @@ describe("Utils", function() end) describe("gzip_[de_in]flate()", function() + local utils = require "kong.tools.gzip" + it("empty string", function() local gz = assert(utils.deflate_gzip("")) assert.equal(utils.inflate_gzip(gz), "") diff --git a/spec/01-unit/19-hybrid/03-compat_spec.lua b/spec/01-unit/19-hybrid/03-compat_spec.lua index 11cc6e672783..48085ab24ecf 100644 --- a/spec/01-unit/19-hybrid/03-compat_spec.lua +++ b/spec/01-unit/19-hybrid/03-compat_spec.lua @@ -1,7 +1,7 @@ local compat = require("kong.clustering.compat") local helpers = require ("spec.helpers") local declarative = require("kong.db.declarative") -local inflate_gzip = require("kong.tools.utils").inflate_gzip +local inflate_gzip = require("kong.tools.gzip").inflate_gzip local cjson_decode = require("cjson.safe").decode local ssl_fixtures = require ("spec.fixtures.ssl") diff --git a/spec/helpers.lua b/spec/helpers.lua index 9b1e93672d3c..bfb71f98a069 100644 --- a/spec/helpers.lua +++ b/spec/helpers.lua @@ -3788,6 +3788,8 @@ local function clustering_client(opts) assert(opts.cert) assert(opts.cert_key) + local inflate_gzip = require("kong.tools.gzip").inflate_gzip + local c = assert(ws_client:new()) local uri = "wss://" .. opts.host .. ":" .. opts.port .. "/v1/outlet?node_id=" .. (opts.node_id or utils.uuid()) .. @@ -3820,7 +3822,7 @@ local function clustering_client(opts) c:close() if typ == "binary" then - local odata = assert(utils.inflate_gzip(data)) + local odata = assert(inflate_gzip(data)) local msg = assert(cjson.decode(odata)) return msg From c6b1900651224268a1f3c7d7ac4b59df23f9df0f Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 14 Nov 2023 12:01:14 +0200 Subject: [PATCH 110/249] docs(changelog): cooperatively yield when building statistics of routes (#12013) * docs(changelog): cooperatively yield when building statistics of routes ### Summary Adds missing changelog requested here: https://github.com/Kong/kong/pull/12008#issuecomment-1809618955 KAG-3062 --------- Signed-off-by: Aapo Talvensaari Co-authored-by: Datong Sun --- changelog/unreleased/kong/router-report-yield.yml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 changelog/unreleased/kong/router-report-yield.yml diff --git a/changelog/unreleased/kong/router-report-yield.yml b/changelog/unreleased/kong/router-report-yield.yml new file mode 100644 index 000000000000..3718cdee275a --- /dev/null +++ b/changelog/unreleased/kong/router-report-yield.yml @@ -0,0 +1,3 @@ +message: Cooperatively yield when building statistics of routes to reduce the impact to proxy path latency. +type: performance +scope: Performance From 9ffc223671e92149e75a7980fcbec8bd030356c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 08:39:36 +0000 Subject: [PATCH 111/249] chore(deps): bump korthout/backport-action from 2.1.0 to 2.1.1 Bumps [korthout/backport-action](https://github.com/korthout/backport-action) from 2.1.0 to 2.1.1. - [Release notes](https://github.com/korthout/backport-action/releases) - [Commits](https://github.com/korthout/backport-action/compare/cb79e4e5f46c7d7d653dd3d5fa8a9b0a945dfe4b...08bafb375e6e9a9a2b53a744b987e5d81a133191) --- updated-dependencies: - dependency-name: korthout/backport-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index c2cc8d2a5100..290eb67c8912 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -13,7 +13,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Create backport pull requests - uses: korthout/backport-action@cb79e4e5f46c7d7d653dd3d5fa8a9b0a945dfe4b # v2.1.0 + uses: korthout/backport-action@08bafb375e6e9a9a2b53a744b987e5d81a133191 # v2.1.1 with: github_token: ${{ secrets.PAT }} pull_title: '[backport -> ${target_branch}] ${pull_title}' From edbbc03dbcd173cc6d9057a1ddd5edccac181a69 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Nov 2023 06:59:59 +0000 Subject: [PATCH 112/249] chore(deps): bump tj-actions/changed-files from 40.1.0 to 40.1.1 Bumps [tj-actions/changed-files](https://github.com/tj-actions/changed-files) from 40.1.0 to 40.1.1. - [Release notes](https://github.com/tj-actions/changed-files/releases) - [Changelog](https://github.com/tj-actions/changed-files/blob/main/HISTORY.md) - [Commits](https://github.com/tj-actions/changed-files/compare/18c8a4ecebe93d32ed8a88e1d0c098f5f68c221b...25ef3926d147cd02fc7e931c1ef50772bbb0d25d) --- updated-dependencies: - dependency-name: tj-actions/changed-files dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/changelog-requirement.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/changelog-requirement.yml b/.github/workflows/changelog-requirement.yml index e735d0df2622..891f41451f55 100644 --- a/.github/workflows/changelog-requirement.yml +++ b/.github/workflows/changelog-requirement.yml @@ -21,7 +21,7 @@ jobs: - name: Find changelog files id: changelog-list - uses: tj-actions/changed-files@18c8a4ecebe93d32ed8a88e1d0c098f5f68c221b # v37 + uses: tj-actions/changed-files@25ef3926d147cd02fc7e931c1ef50772bbb0d25d # v37 with: files_yaml: | changelogs: From f6ceec1954b85cfb22168b45e8f1eb88c0137617 Mon Sep 17 00:00:00 2001 From: Water-Melon Date: Thu, 7 Sep 2023 21:40:45 -0700 Subject: [PATCH 113/249] fix(tests): bump some deps docker image to have arm64 support --- .github/workflows/build_and_test.yml | 5 ++--- kong/plugins/zipkin/README.md | 2 +- scripts/dependency_services/docker-compose-test-services.yml | 5 +++-- .../01-schema/11-declarative_config/03-flatten_spec.lua | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index ae7a234da9cc..d6ae528399d1 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -123,7 +123,6 @@ jobs: name: Postgres ${{ matrix.suite }} - ${{ matrix.split }} tests runs-on: ubuntu-22.04 needs: build - strategy: fail-fast: false matrix: @@ -156,7 +155,7 @@ jobs: --name kong_redis zipkin: - image: openzipkin/zipkin:2.19 + image: openzipkin/zipkin:2 ports: - 9411:9411 @@ -263,7 +262,7 @@ jobs: services: grpcbin: - image: moul/grpcbin + image: kong/grpcbin ports: - 15002:9000 - 15003:9001 diff --git a/kong/plugins/zipkin/README.md b/kong/plugins/zipkin/README.md index 38f6efa2599e..4769c997f4ee 100644 --- a/kong/plugins/zipkin/README.md +++ b/kong/plugins/zipkin/README.md @@ -2,7 +2,7 @@ Run postgres locally. - docker run -it -p 15002:9000 -p 15003:9001 moul/grpcbin + docker run -it -p 15002:9000 -p 15003:9001 kong/grpcbin docker run -p 9411:9411 -it openzipkin/zipkin:2.19 KONG_SPEC_TEST_GRPCBIN_PORT=15002 \ diff --git a/scripts/dependency_services/docker-compose-test-services.yml b/scripts/dependency_services/docker-compose-test-services.yml index 5091a95eb84a..823b0c6e3f92 100644 --- a/scripts/dependency_services/docker-compose-test-services.yml +++ b/scripts/dependency_services/docker-compose-test-services.yml @@ -33,14 +33,15 @@ services: timeout: 10s retries: 10 grpcbin: - image: moul/grpcbin + image: kong/grpcbin ports: - 127.0.0.1::9000 - 127.0.0.1::9001 zipkin: - image: openzipkin/zipkin:2.19 + image: openzipkin/zipkin:2 ports: - 127.0.0.1::9411 + command: --logging.level.zipkin2=DEBUG volumes: postgres-data: diff --git a/spec/01-unit/01-db/01-schema/11-declarative_config/03-flatten_spec.lua b/spec/01-unit/01-db/01-schema/11-declarative_config/03-flatten_spec.lua index 632062e9960d..4883b76dca5c 100644 --- a/spec/01-unit/01-db/01-schema/11-declarative_config/03-flatten_spec.lua +++ b/spec/01-unit/01-db/01-schema/11-declarative_config/03-flatten_spec.lua @@ -1763,7 +1763,7 @@ describe("declarative config: flatten", function() - username: foo jwt_secrets: - consumer: foo - key: "https://keycloak/auth/realms/foo" + key: "https://keycloak/realms/foo" algorithm: RS256 rsa_public_key: "]] .. key .. [[" ]])) @@ -1786,7 +1786,7 @@ describe("declarative config: flatten", function() }, created_at = 1234567890, id = "UUID", - key = "https://keycloak/auth/realms/foo", + key = "https://keycloak/realms/foo", rsa_public_key = key:gsub("\\n", "\n"), tags = null, } } From a13b6cd7f628f8fdcb27949573c0d003829115ea Mon Sep 17 00:00:00 2001 From: Water-Melon Date: Fri, 8 Sep 2023 02:00:27 -0700 Subject: [PATCH 114/249] fix(tests): improve test robusness around postgres when testing upon arm64 --- .github/workflows/build_and_test.yml | 5 ++++ .../02-cmd/10-migrations_spec.lua | 29 ++++++++++++++++++- .../03-db/15-connection_pool_spec.lua | 2 +- 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index d6ae528399d1..a3e98af0eea8 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -226,6 +226,11 @@ jobs: luarocks --version luarocks config + - name: Tune up postgres max_connections + run: | + # arm64 runners may use more connections due to more worker cores + psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;' + - name: Tests env: KONG_TEST_PG_DATABASE: kong diff --git a/spec/02-integration/02-cmd/10-migrations_spec.lua b/spec/02-integration/02-cmd/10-migrations_spec.lua index 72d9d678c183..bb896f15507d 100644 --- a/spec/02-integration/02-cmd/10-migrations_spec.lua +++ b/spec/02-integration/02-cmd/10-migrations_spec.lua @@ -189,7 +189,17 @@ for _, strategy in helpers.each_strategy() do assert.match("Executed migrations:", stdout, 1, true) if strategy ~= "off" then - local db = init_db() + -- to avoid postgresql error: + -- [PostgreSQL error] failed to retrieve PostgreSQL server_version_num: receive_message: + -- failed to get type: timeout + -- when testing on ARM64 platform which has low single-core performance + + local pok, db + helpers.wait_until(function() + pok, db = pcall(init_db) + return pok + end, 10) + -- valid CQL and SQL; don't expect to go over one page in CQL here local rows = db.connector:query([[SELECT * FROM schema_meta;]]) local n = 0 @@ -418,4 +428,21 @@ for _, strategy in helpers.each_strategy() do end) end) end) + + describe("sanity: make sure postgres server is not overloaded", function() + local do_it = strategy == "off" and pending or it + + do_it("", function() + helpers.wait_until(function() + local ok, err = pcall(init_db) + if err then + print(err) + end + return ok + end, 30, 1) + end) + + end) + end + diff --git a/spec/02-integration/03-db/15-connection_pool_spec.lua b/spec/02-integration/03-db/15-connection_pool_spec.lua index 306e12ce21fa..76850df3574a 100644 --- a/spec/02-integration/03-db/15-connection_pool_spec.lua +++ b/spec/02-integration/03-db/15-connection_pool_spec.lua @@ -1,7 +1,7 @@ local helpers = require "spec.helpers" local cjson = require "cjson" -for pool_size, backlog_size in ipairs({ 0, 3 }) do +for pool_size, backlog_size in ipairs({ 2, 3 }) do describe("#postgres Postgres connection pool with pool=" .. pool_size .. "and backlog=" .. backlog_size, function() local client lazy_setup(function() From e6f32f491d46a17eafff24cb13accbf178ea70ef Mon Sep 17 00:00:00 2001 From: Water-Melon Date: Thu, 12 Oct 2023 01:31:07 -0700 Subject: [PATCH 115/249] fix(tests): fix wait_until for zipkin test --- spec/03-plugins/34-zipkin/zipkin_spec.lua | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/spec/03-plugins/34-zipkin/zipkin_spec.lua b/spec/03-plugins/34-zipkin/zipkin_spec.lua index 5f4c5db2f1bd..12543bb70922 100644 --- a/spec/03-plugins/34-zipkin/zipkin_spec.lua +++ b/spec/03-plugins/34-zipkin/zipkin_spec.lua @@ -63,8 +63,17 @@ local function wait_for_spans(zipkin_client, number_of_spans, remoteServiceName, local spans = {} helpers.wait_until(function() if trace_id then - local res = assert(zipkin_client:get("/api/v2/trace/" .. trace_id)) - spans = cjson.decode(assert.response(res).has.status(200)) + local res, err = zipkin_client:get("/api/v2/trace/" .. trace_id) + if err then + return false, err + end + + local body = res:read_body() + if res.status ~= 200 then + return false + end + + spans = cjson.decode(body) return #spans == number_of_spans end @@ -75,7 +84,12 @@ local function wait_for_spans(zipkin_client, number_of_spans, remoteServiceName, } }) - local all_spans = cjson.decode(assert.response(res).has.status(200)) + local body = res:read_body() + if res.status ~= 200 then + return false + end + + local all_spans = cjson.decode(body) if #all_spans > 0 then spans = all_spans[1] return #spans == number_of_spans From 731cc82135770821adc4541b4daf87efa843f434 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Fri, 13 Oct 2023 02:07:15 -0700 Subject: [PATCH 116/249] fix(build): correctly detect cpu and cross build for LuaJIT debug build --- build/openresty/BUILD.openresty.bazel | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/build/openresty/BUILD.openresty.bazel b/build/openresty/BUILD.openresty.bazel index 698a702b492f..ae79fb938671 100644 --- a/build/openresty/BUILD.openresty.bazel +++ b/build/openresty/BUILD.openresty.bazel @@ -51,6 +51,10 @@ genrule( echo "$$flags" >$@ """.format(luajit_version = LUAJIT_VERSION), + # make sure to include `toolchain` so that this rule executes in target configuration + toolchains = [ + "@bazel_tools//tools/cpp:current_cc_toolchain", + ], ) rpath_flags = "-Wl,-rpath,%s/kong/lib -Wl,-rpath,%s/openresty/lualib" % ( @@ -75,7 +79,7 @@ make( "//conditions:default": [ ], }), - build_data = [ + data = [ ":luajit_xcflags", ], lib_source = ":luajit_srcs", From 9393b96f4f435c6b67846b1f018f2b5c5b1702f8 Mon Sep 17 00:00:00 2001 From: Water-Melon Date: Fri, 27 Oct 2023 11:03:27 +0000 Subject: [PATCH 117/249] fix(test): fix pdk flaky tests --- t/05-mlcache/03-peek.t | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/t/05-mlcache/03-peek.t b/t/05-mlcache/03-peek.t index 0ad33e0ddcb3..c5f57626bfce 100644 --- a/t/05-mlcache/03-peek.t +++ b/t/05-mlcache/03-peek.t @@ -673,7 +673,7 @@ stale: nil return 123 end)) - ngx.sleep(0.3) + ngx.sleep(0.31) local ttl, err, data, stale = cache:peek("my_key", true) if err then @@ -720,7 +720,7 @@ stale: true return end - ngx.sleep(0.3) + ngx.sleep(0.31) local ttl, err, data, stale = cache:peek("my_key", true) if err then @@ -762,7 +762,7 @@ stale: true return end - ngx.sleep(0.3) + ngx.sleep(0.31) for i = 1, 3 do remaining_ttl, err, data = cache:peek("key", true) @@ -808,7 +808,7 @@ data: 123 return end - ngx.sleep(0.3) + ngx.sleep(0.31) for i = 1, 3 do remaining_ttl, err, data = cache:peek("key", true) From fbcec4565ede99ba2019aca90beb0abcae33744e Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Tue, 14 Nov 2023 10:52:49 +0000 Subject: [PATCH 118/249] chore(pdk): doc a known issue of get_headers() (#12006) Adressing KAG-2602, #11546 Co-authored-by: Datong Sun --- kong/pdk/service/response.lua | 2 ++ 1 file changed, 2 insertions(+) diff --git a/kong/pdk/service/response.lua b/kong/pdk/service/response.lua index 7a47419f96fb..ec51fe4fac08 100644 --- a/kong/pdk/service/response.lua +++ b/kong/pdk/service/response.lua @@ -198,6 +198,8 @@ local function new(pdk, major_version) -- kong.log.inspect(headers.x_another[1]) -- "foo bar" -- kong.log.inspect(headers["X-Another"][2]) -- "baz" -- end + -- Note that this function returns a proxy table + -- which cannot be iterated with `pairs` or used as operand of `#`. function response.get_headers(max_headers) check_phase(header_body_log) From 2b8c69ed46fd3db631c425787f2d2270eeb45525 Mon Sep 17 00:00:00 2001 From: samugi Date: Sat, 11 Nov 2023 00:47:48 +0100 Subject: [PATCH 119/249] fix(tracing): move dns query patch to globalpatches The dns query lazy patch was only effective for cosockets, not for the upstream dns queries, because the patch happened too late when the `toip` function had already been cached in some modules (i.e. balancer) This change moves the patch to `globalpatches.lua` so that dns spans are correctly generated both for cosocket and upstream dns queries. --- .../kong/tracing-dns-query-patch.yml | 3 + kong/globalpatches.lua | 25 ++--- kong/tracing/instrumentation.lua | 45 ++++----- .../14-tracing/01-instrumentations_spec.lua | 92 ++++++++++--------- 4 files changed, 80 insertions(+), 85 deletions(-) create mode 100644 changelog/unreleased/kong/tracing-dns-query-patch.yml diff --git a/changelog/unreleased/kong/tracing-dns-query-patch.yml b/changelog/unreleased/kong/tracing-dns-query-patch.yml new file mode 100644 index 000000000000..46df1e7ba543 --- /dev/null +++ b/changelog/unreleased/kong/tracing-dns-query-patch.yml @@ -0,0 +1,3 @@ +message: "**Tracing**: dns spans are now correctly generated for upstream dns queries (in addition to cosocket ones)" +type: bugfix +scope: Core diff --git a/kong/globalpatches.lua b/kong/globalpatches.lua index 3fe131fcf550..812d3d74e4b8 100644 --- a/kong/globalpatches.lua +++ b/kong/globalpatches.lua @@ -511,13 +511,17 @@ return function(options) do -- cosockets connect patch for dns resolution for: cli, rbusted and OpenResty local sub = string.sub + local client = package.loaded["kong.resty.dns.client"] + if not client then + client = require("kong.tools.dns")() + end + --- Patch the TCP connect and UDP setpeername methods such that all -- connections will be resolved first by the internal DNS resolver. -- STEP 1: load code that should not be using the patched versions require "resty.dns.resolver" -- will cache TCP and UDP functions -- STEP 2: forward declaration of locals to hold stuff loaded AFTER patching - local toip -- STEP 3: store original unpatched versions local old_tcp = ngx.socket.tcp @@ -538,7 +542,7 @@ return function(options) local function resolve_connect(f, sock, host, port, opts) if sub(host, 1, 5) ~= "unix:" then local try_list - host, port, try_list = toip(host, port) + host, port, try_list = client.toip(host, port) if not host then return nil, "[cosocket] DNS resolution failed: " .. tostring(port) .. ". Tried: " .. tostring(try_list) @@ -588,21 +592,10 @@ return function(options) -- STEP 5: load code that should be using the patched versions, if any (because of dependency chain) do - local client = package.loaded["kong.resty.dns.client"] - if not client then - client = require("kong.tools.dns")() - end - - toip = client.toip - - -- DNS query is lazily patched, it will only be wrapped - -- when instrumentation module is initialized later and - -- `tracing_instrumentations` includes "dns_query" or set - -- to "all". + -- dns query patch local instrumentation = require "kong.tracing.instrumentation" - instrumentation.set_patch_dns_query_fn(toip, function(wrap) - toip = wrap - end) + client.toip = instrumentation.get_wrapped_dns_query(client.toip) + -- patch request_uri to record http_client spans instrumentation.http_client() end diff --git a/kong/tracing/instrumentation.lua b/kong/tracing/instrumentation.lua index ad352d0d8c6c..cbfbf25c9ad3 100644 --- a/kong/tracing/instrumentation.lua +++ b/kong/tracing/instrumentation.lua @@ -272,16 +272,18 @@ function _M.precreate_balancer_span(ctx) end -local patch_dns_query do local raw_func - local patch_callback - local function wrap(host, port) - local span = tracer.start_span("kong.dns", { - span_kind = 3, -- client - }) - local ip_addr, res_port, try_list = raw_func(host, port) + local function wrap(host, port, ...) + local span + if _M.dns_query ~= NOOP then + span = tracer.start_span("kong.dns", { + span_kind = 3, -- client + }) + end + + local ip_addr, res_port, try_list = raw_func(host, port, ...) if span then span:set_attribute("dns.record.domain", host) span:set_attribute("dns.record.port", port) @@ -292,23 +294,15 @@ do return ip_addr, res_port, try_list end - --- Patch DNS query - -- It will be called before Kong's config loader. - -- - -- `callback` is a function that accept a wrap function, - -- it could be used to replace the orignal func lazily. + --- Get Wrapped DNS Query + -- Called before Kong's config loader. -- - -- e.g. patch_dns_query(func, function(wrap) - -- toip = wrap - -- end) - function _M.set_patch_dns_query_fn(func, callback) - raw_func = func - patch_callback = callback - end - - -- patch lazily - patch_dns_query = function() - patch_callback(wrap) + -- returns a wrapper for the provided input function `f` + -- that stores dns info in the `kong.dns` span when the dns + -- instrumentation is enabled. + function _M.get_wrapped_dns_query(f) + raw_func = f + return wrap end -- append available_types @@ -425,11 +419,6 @@ function _M.init(config) sampling_rate = sampling_rate, }) tracer.set_global_tracer(tracer) - - -- global patch - if _M.dns_query ~= NOOP then - patch_dns_query() - end end end diff --git a/spec/02-integration/14-tracing/01-instrumentations_spec.lua b/spec/02-integration/14-tracing/01-instrumentations_spec.lua index 28a5ba4255a3..aab22792396e 100644 --- a/spec/02-integration/14-tracing/01-instrumentations_spec.lua +++ b/spec/02-integration/14-tracing/01-instrumentations_spec.lua @@ -4,23 +4,29 @@ local pretty = require "pl.pretty" local fmt = string.format -local function get_span(name, spans) +local function get_spans(name, spans) + local res = {} for _, span in ipairs(spans) do if span.name == name then - return span + res[#res+1] = span end end + return #res > 0 and res or nil end -local function assert_has_span(name, spans) - local span = get_span(name, spans) - assert.is_truthy(span, fmt("\nExpected to find %q span in:\n%s\n", +local function assert_has_spans(name, spans, count) + local res = get_spans(name, spans) + assert.is_truthy(res, fmt("\nExpected to find %q span in:\n%s\n", name, pretty.write(spans))) - return span + if count then + assert.equals(count, #res, fmt("\nExpected to find %d %q spans in:\n%s\n", + count, name, pretty.write(spans))) + end + return #res > 0 and res or nil end local function assert_has_no_span(name, spans) - local found = get_span(name, spans) + local found = get_spans(name, spans) assert.is_falsy(found, fmt("\nExpected not to find %q span in:\n%s\n", name, pretty.write(spans))) end @@ -152,8 +158,8 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - assert_has_span("kong", spans) - assert_has_span("kong.database.query", spans) + assert_has_spans("kong", spans, 1) + assert_has_spans("kong.database.query", spans) assert_has_no_span("kong.balancer", spans) assert_has_no_span("kong.dns", spans) @@ -186,8 +192,8 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - assert_has_span("kong", spans) - assert_has_span("kong.router", spans) + assert_has_spans("kong", spans, 1) + assert_has_spans("kong.router", spans, 1) assert_has_no_span("kong.balancer", spans) assert_has_no_span("kong.database.query", spans) @@ -220,8 +226,8 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - assert_has_span("kong", spans) - assert_has_span("kong.internal.request", spans) + assert_has_spans("kong", spans, 1) + assert_has_spans("kong.internal.request", spans, 1) assert_has_no_span("kong.balancer", spans) assert_has_no_span("kong.database.query", spans) @@ -254,8 +260,8 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - assert_has_span("kong", spans) - assert_has_span("kong.balancer", spans) + assert_has_spans("kong", spans, 1) + assert_has_spans("kong.balancer", spans, 1) assert_has_no_span("kong.database.query", spans) assert_has_no_span("kong.dns", spans) @@ -288,8 +294,8 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - assert_has_span("kong", spans) - assert_has_span("kong.rewrite.plugin." .. tcp_trace_plugin_name, spans) + assert_has_spans("kong", spans, 1) + assert_has_spans("kong.rewrite.plugin." .. tcp_trace_plugin_name, spans, 1) assert_has_no_span("kong.balancer", spans) assert_has_no_span("kong.database.query", spans) @@ -323,8 +329,8 @@ for _, strategy in helpers.each_strategy() do -- Making sure it's alright local spans = cjson.decode(res) - assert_has_span("kong", spans) - assert_has_span("kong.header_filter.plugin." .. tcp_trace_plugin_name, spans) + assert_has_spans("kong", spans, 1) + assert_has_spans("kong.header_filter.plugin." .. tcp_trace_plugin_name, spans, 1) assert_has_no_span("kong.balancer", spans) assert_has_no_span("kong.database.query", spans) @@ -348,7 +354,7 @@ for _, strategy in helpers.each_strategy() do -- Making sure it's alright local spans = cjson.decode(res) - local kong_span = assert_has_span("kong", spans) + local kong_span = assert_has_spans("kong", spans, 1)[1] assert_has_attributes(kong_span, { ["http.method"] = "GET", @@ -357,7 +363,7 @@ for _, strategy in helpers.each_strategy() do ["http.route"] = "/noproxy", ["http.url"] = "http://0.0.0.0/noproxy", }) - assert_has_span("kong.header_filter.plugin." .. tcp_trace_plugin_name, spans) + assert_has_spans("kong.header_filter.plugin." .. tcp_trace_plugin_name, spans, 1) assert_has_no_span("kong.balancer", spans) assert_has_no_span("kong.database.query", spans) assert_has_no_span("kong.router", spans) @@ -390,8 +396,8 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - assert_has_span("kong", spans) - assert_has_span("kong.dns", spans) + assert_has_spans("kong", spans, 1) + assert_has_spans("kong.dns", spans, 2) assert_has_no_span("kong.balancer", spans) assert_has_no_span("kong.database.query", spans) @@ -427,14 +433,14 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - local kong_span = assert_has_span("kong", spans) - local dns_span = assert_has_span("kong.dns", spans) - local balancer_span = assert_has_span("kong.balancer", spans) - local db_span = assert_has_span("kong.database.query", spans) - local int_req_span = assert_has_span("kong.internal.request", spans) - assert_has_span("kong.router", spans) - assert_has_span("kong.rewrite.plugin." .. tcp_trace_plugin_name, spans) - assert_has_span("kong.header_filter.plugin." .. tcp_trace_plugin_name, spans) + local kong_span = assert_has_spans("kong", spans, 1)[1] + local dns_spans = assert_has_spans("kong.dns", spans, 2) + local balancer_span = assert_has_spans("kong.balancer", spans, 1)[1] + local db_spans = assert_has_spans("kong.database.query", spans)[1] + local int_req_span = assert_has_spans("kong.internal.request", spans, 1)[1] + assert_has_spans("kong.router", spans, 1) + assert_has_spans("kong.rewrite.plugin." .. tcp_trace_plugin_name, spans, 1) + assert_has_spans("kong.header_filter.plugin." .. tcp_trace_plugin_name, spans, 1) -- span attributes check assert_has_attributes(kong_span, { @@ -449,11 +455,13 @@ for _, strategy in helpers.each_strategy() do ["kong.request.id"] = "^[0-9a-f]+$", }) - assert_has_attributes(dns_span, { - ["dns.record.domain"] = "[%w\\.]+", - ["dns.record.ip"] = "[%d\\.]+", - ["dns.record.port"] = "%d+" - }) + for _, dns_span in ipairs(dns_spans) do + assert_has_attributes(dns_span, { + ["dns.record.domain"] = "[%w\\.]+", + ["dns.record.ip"] = "[%d\\.]+", + ["dns.record.port"] = "%d+" + }) + end assert_has_attributes(balancer_span, { ["net.peer.ip"] = "127.0.0.1", @@ -461,10 +469,12 @@ for _, strategy in helpers.each_strategy() do ["net.peer.name"] = "127.0.0.1", }) - assert_has_attributes(db_span, { - ["db.statement"] = ".*", - ["db.system"] = "%w+", - }) + for _, db_span in ipairs(db_spans) do + assert_has_attributes(db_span, { + ["db.statement"] = ".*", + ["db.system"] = "%w+", + }) + end assert_has_attributes(int_req_span, { ["http.method"] = "GET", @@ -499,7 +509,7 @@ for _, strategy in helpers.each_strategy() do assert.is_string(res) local spans = cjson.decode(res) - assert_has_span("kong", spans) + assert_has_spans("kong", spans, 1) end) end) end) From 4d1fbbad21e5c04526f776886d702de8bc997332 Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Tue, 14 Nov 2023 16:49:17 +0000 Subject: [PATCH 120/249] fix(tracing): handle error when DNS query fails (#11935) --- .../fix_dns_instrument_error_handling.yml | 3 + kong/tracing/instrumentation.lua | 7 ++- .../14-tracing/01-instrumentations_spec.lua | 61 ++++++++++++++++++- 3 files changed, 69 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/fix_dns_instrument_error_handling.yml diff --git a/changelog/unreleased/kong/fix_dns_instrument_error_handling.yml b/changelog/unreleased/kong/fix_dns_instrument_error_handling.yml new file mode 100644 index 000000000000..b5e4010c5029 --- /dev/null +++ b/changelog/unreleased/kong/fix_dns_instrument_error_handling.yml @@ -0,0 +1,3 @@ +message: "**tracing:** Fixed an issue where a DNS query failure would cause a tracing failure." +type: bugfix +scope: Core diff --git a/kong/tracing/instrumentation.lua b/kong/tracing/instrumentation.lua index cbfbf25c9ad3..717b9121445b 100644 --- a/kong/tracing/instrumentation.lua +++ b/kong/tracing/instrumentation.lua @@ -287,7 +287,12 @@ do if span then span:set_attribute("dns.record.domain", host) span:set_attribute("dns.record.port", port) - span:set_attribute("dns.record.ip", ip_addr) + if ip_addr then + span:set_attribute("dns.record.ip", ip_addr) + else + span:record_error(res_port) + span:set_status(2) + end span:finish() end diff --git a/spec/02-integration/14-tracing/01-instrumentations_spec.lua b/spec/02-integration/14-tracing/01-instrumentations_spec.lua index aab22792396e..781c85cd8fb2 100644 --- a/spec/02-integration/14-tracing/01-instrumentations_spec.lua +++ b/spec/02-integration/14-tracing/01-instrumentations_spec.lua @@ -48,7 +48,7 @@ for _, strategy in helpers.each_strategy() do describe("tracing instrumentations spec #" .. strategy, function() - local function setup_instrumentations(types, custom_spans) + local function setup_instrumentations(types, custom_spans, post_func) local bp, _ = assert(helpers.get_db_utils(strategy, { "services", "routes", @@ -96,6 +96,10 @@ for _, strategy in helpers.each_strategy() do } }) + if post_func then + post_func(bp) + end + assert(helpers.start_kong { database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", @@ -512,5 +516,60 @@ for _, strategy in helpers.each_strategy() do assert_has_spans("kong", spans, 1) end) end) + + describe("#regression", function () + describe("nil attribute for dns_query when fail to query", function () + lazy_setup(function() + setup_instrumentations("dns_query", true, function(bp) + -- intentionally trigger a DNS query error + local service = bp.services:insert({ + name = "inexist-host-service", + host = "really-inexist-host", + port = 80, + }) + + bp.routes:insert({ + service = service, + protocols = { "http" }, + paths = { "/test" }, + }) + end) + end) + + lazy_teardown(function() + helpers.stop_kong() + end) + + it("contains the expected kong.dns span", function () + local thread = helpers.tcp_server(TCP_PORT) + local r = assert(proxy_client:send { + method = "GET", + path = "/test", + }) + assert.res_status(503, r) + + -- Getting back the TCP server input + local ok, res = thread:join() + assert.True(ok) + assert.is_string(res) + + local spans = cjson.decode(res) + assert_has_spans("kong", spans) + local dns_spans = assert_has_spans("kong.dns", spans) + local upstream_dns + for _, dns_span in ipairs(dns_spans) do + if dns_span.attributes["dns.record.domain"] == "really-inexist-host" then + upstream_dns = dns_span + break + end + end + + assert.is_not_nil(upstream_dns) + assert.is_nil(upstream_dns.attributes["dns.record.ip"]) + -- has error reported + assert.is_not_nil(upstream_dns.events) + end) + end) + end) end) end From 31f0cc9ff21c2c73cedc6991b0a4976d204df9d2 Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Wed, 15 Nov 2023 04:16:12 +0000 Subject: [PATCH 121/249] tests(key-auth): remove the use of `mockbin.com` during tests (#12017) mockbin.com redirects to insomnia official site and could trigger a security policy, which makes tests failing. KAG-3091 --- .../03-plugins/09-key-auth/02-access_spec.lua | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/spec/03-plugins/09-key-auth/02-access_spec.lua b/spec/03-plugins/09-key-auth/02-access_spec.lua index 8135569a1f8c..f176e7f246ca 100644 --- a/spec/03-plugins/09-key-auth/02-access_spec.lua +++ b/spec/03-plugins/09-key-auth/02-access_spec.lua @@ -1,14 +1,19 @@ -local helpers = require "spec.helpers" -local cjson = require "cjson" -local meta = require "kong.meta" -local utils = require "kong.tools.utils" +local helpers = require "spec.helpers" +local cjson = require "cjson" +local meta = require "kong.meta" +local utils = require "kong.tools.utils" +local http_mock = require "spec.helpers.http_mock" + +local MOCK_PORT = helpers.get_available_port() for _, strategy in helpers.each_strategy() do describe("Plugin: key-auth (access) [#" .. strategy .. "]", function() - local proxy_client + local mock, proxy_client local kong_cred lazy_setup(function() + mock = http_mock.new(MOCK_PORT) + mock:start() local bp = helpers.get_db_utils(strategy, { "routes", "services", @@ -51,8 +56,8 @@ for _, strategy in helpers.each_strategy() do local service7 = bp.services:insert{ protocol = "http", - port = 80, - host = "mockbin.com", + port = MOCK_PORT, + host = "localhost", } local route7 = bp.routes:insert { @@ -183,6 +188,7 @@ for _, strategy in helpers.each_strategy() do end helpers.stop_kong() + mock:stop() end) describe("Unauthorized", function() From a6d647566991e339ea5126113df4bef21fe0115d Mon Sep 17 00:00:00 2001 From: Xiaoch Date: Wed, 15 Nov 2023 14:41:52 +0800 Subject: [PATCH 122/249] fix(dns): eliminate asynchronous timer in `syncQuery()` to prevent deadlock risk (#11900) * Revert "fix(conf): set default value of `dns_no_sync` to `on` (#11869)" This reverts commit 3be2513a60b9f5f0a89631ff17c202e6113981c0. * fix(dns): introduce the synchronous query in syncQuery() to prevent hang risk Originally the first request to `syncQuery()` will trigger an asynchronous timer event, which added the risk of thread pool hanging. With this patch, cold synchronously DNS query will always happen in the current thread if current phase supports yielding. Fix FTI-5348 --------- Co-authored-by: Datong Sun --- .../unreleased/kong/fix_dns_blocking.yml | 3 + .../kong/fix_dns_disable_dns_no_sync.yml | 3 + kong.conf.default | 2 +- kong/resty/dns/client.lua | 146 +++++++++--------- kong/templates/kong_defaults.lua | 2 +- spec/01-unit/21-dns-client/02-client_spec.lua | 22 ++- t/03-dns-client/01-phases.t | 7 +- t/03-dns-client/02-timer-usage.t | 76 +++++---- 8 files changed, 137 insertions(+), 124 deletions(-) create mode 100644 changelog/unreleased/kong/fix_dns_blocking.yml create mode 100644 changelog/unreleased/kong/fix_dns_disable_dns_no_sync.yml diff --git a/changelog/unreleased/kong/fix_dns_blocking.yml b/changelog/unreleased/kong/fix_dns_blocking.yml new file mode 100644 index 000000000000..a167c5fa1656 --- /dev/null +++ b/changelog/unreleased/kong/fix_dns_blocking.yml @@ -0,0 +1,3 @@ +message: Eliminate asynchronous timer in syncQuery() to prevent hang risk +type: bugfix +scope: Core diff --git a/changelog/unreleased/kong/fix_dns_disable_dns_no_sync.yml b/changelog/unreleased/kong/fix_dns_disable_dns_no_sync.yml new file mode 100644 index 000000000000..f489ab254481 --- /dev/null +++ b/changelog/unreleased/kong/fix_dns_disable_dns_no_sync.yml @@ -0,0 +1,3 @@ +message: The default value of `dns_no_sync` option has been changed to `off` +type: feature +scope: Configuration diff --git a/kong.conf.default b/kong.conf.default index 7bd463da33d5..14c2a3a09465 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -1550,7 +1550,7 @@ #dns_error_ttl = 1 # TTL in seconds for error responses. -#dns_no_sync = on # If enabled, then upon a cache-miss every +#dns_no_sync = off # If enabled, then upon a cache-miss every # request will trigger its own dns query. # When disabled multiple requests for the # same name/type will be synchronised to a diff --git a/kong/resty/dns/client.lua b/kong/resty/dns/client.lua index d3edd588cd8b..c3f460d4b892 100644 --- a/kong/resty/dns/client.lua +++ b/kong/resty/dns/client.lua @@ -31,13 +31,13 @@ local time = ngx.now local log = ngx.log local ERR = ngx.ERR local WARN = ngx.WARN +local ALERT = ngx.ALERT local DEBUG = ngx.DEBUG --[[ DEBUG = ngx.WARN --]] local PREFIX = "[dns-client] " local timer_at = ngx.timer.at -local get_phase = ngx.get_phase local math_min = math.min local math_max = math.max @@ -651,7 +651,9 @@ _M.init = function(options) config = options -- store it in our module level global - resolve_max_wait = options.timeout / 1000 * options.retrans -- maximum time to wait for the dns resolver to hit its timeouts + -- maximum time to wait for the dns resolver to hit its timeouts + -- + 1s to ensure some delay in timer execution and semaphore return are accounted for + resolve_max_wait = options.timeout / 1000 * options.retrans + 1 return true end @@ -742,46 +744,61 @@ local function individualQuery(qname, r_opts, try_list) end local queue = setmetatable({}, {__mode = "v"}) + +local function enqueue_query(key, qname, r_opts, try_list) + local item = { + key = key, + semaphore = semaphore(), + qname = qname, + r_opts = cycle_aware_deep_copy(r_opts), + try_list = try_list, + expire_time = time() + resolve_max_wait, + } + queue[key] = item + return item +end + + +local function dequeue_query(item) + if queue[item.key] == item then + -- query done, but by now many others might be waiting for our result. + -- 1) stop new ones from adding to our lock/semaphore + queue[item.key] = nil + -- 2) release all waiting threads + item.semaphore:post(math_max(item.semaphore:count() * -1, 1)) + item.semaphore = nil + end +end + + +local function queue_get_query(key, try_list) + local item = queue[key] + + if not item then + return nil + end + + -- bug checks: release it actively if the waiting query queue is blocked + if item.expire_time < time() then + local err = "stale query, key:" .. key + add_status_to_try_list(try_list, err) + log(ALERT, PREFIX, err) + dequeue_query(item) + return nil + end + + return item +end + + -- to be called as a timer-callback, performs a query and returns the results -- in the `item` table. local function executeQuery(premature, item) if premature then return end - local r, err = resolver:new(config) - if not r then - item.result, item.err = r, "failed to create a resolver: " .. err - else - --[[ - log(DEBUG, PREFIX, "Query executing: ", item.qname, ":", item.r_opts.qtype, " ", fquery(item)) - --]] - add_status_to_try_list(item.try_list, "querying") - item.result, item.err = r:query(item.qname, item.r_opts) - if item.result then - --[[ - log(DEBUG, PREFIX, "Query answer: ", item.qname, ":", item.r_opts.qtype, " ", fquery(item), - " ", frecord(item.result)) - --]] - parseAnswer(item.qname, item.r_opts.qtype, item.result, item.try_list) - --[[ - log(DEBUG, PREFIX, "Query parsed answer: ", item.qname, ":", item.r_opts.qtype, " ", fquery(item), - " ", frecord(item.result)) - else - log(DEBUG, PREFIX, "Query error: ", item.qname, ":", item.r_opts.qtype, " err=", tostring(err)) - --]] - end - end + item.result, item.err = individualQuery(item.qname, item.r_opts, item.try_list) - -- query done, but by now many others might be waiting for our result. - -- 1) stop new ones from adding to our lock/semaphore - queue[item.key] = nil - -- 2) release all waiting threads - item.semaphore:post(math_max(item.semaphore:count() * -1, 1)) - item.semaphore = nil - ngx.sleep(0) - -- 3) destroy the resolver -- ditto in individualQuery - if r then - r:destroy() - end + dequeue_query(item) end @@ -795,7 +812,7 @@ end -- the `semaphore` field will be removed). Upon error it returns `nil+error`. local function asyncQuery(qname, r_opts, try_list) local key = qname..":"..r_opts.qtype - local item = queue[key] + local item = queue_get_query(key, try_list) if item then --[[ log(DEBUG, PREFIX, "Query async (exists): ", key, " ", fquery(item)) @@ -804,14 +821,7 @@ local function asyncQuery(qname, r_opts, try_list) return item -- already in progress, return existing query end - item = { - key = key, - semaphore = semaphore(), - qname = qname, - r_opts = cycle_aware_deep_copy(r_opts), - try_list = try_list, - } - queue[key] = item + item = enqueue_query(key, qname, r_opts, try_list) local ok, err = timer_at(0, executeQuery, item) if not ok then @@ -837,40 +847,24 @@ end -- @return `result + nil + try_list`, or `nil + err + try_list` in case of errors local function syncQuery(qname, r_opts, try_list) local key = qname..":"..r_opts.qtype - local item = queue[key] - -- if nothing is in progress, we start a new async query + local item = queue_get_query(key, try_list) + + -- If nothing is in progress, we start a new sync query if not item then - local err - item, err = asyncQuery(qname, r_opts, try_list) - if not item then - return item, err, try_list - end - else - add_status_to_try_list(try_list, "in progress (sync)") - end + item = enqueue_query(key, qname, r_opts, try_list) - local supported_semaphore_wait_phases = { - rewrite = true, - access = true, - content = true, - timer = true, - ssl_cert = true, - ssl_session_fetch = true, - } + item.result, item.err = individualQuery(qname, item.r_opts, try_list) - local ngx_phase = get_phase() + dequeue_query(item) - if not supported_semaphore_wait_phases[ngx_phase] then - -- phase not supported by `semaphore:wait` - -- return existing query (item) - -- - -- this will avoid: - -- "dns lookup pool exceeded retries" (second try and subsequent retries) - -- "API disabled in the context of init_worker_by_lua" (first try) - return item, nil, try_list + return item.result, item.err, try_list end + -- If the query is already in progress, we wait for it. + + add_status_to_try_list(try_list, "in progress (sync)") + -- block and wait for the async query to complete local ok, err = item.semaphore:wait(resolve_max_wait) if ok and item.result then @@ -883,6 +877,14 @@ local function syncQuery(qname, r_opts, try_list) return item.result, item.err, try_list end + -- bug checks + if not ok and not item.err then + item.err = err -- only first expired wait() reports error + log(ALERT, PREFIX, "semaphore:wait(", resolve_max_wait, ") failed: ", err, + ", count: ", item.semaphore and item.semaphore:count(), + ", qname: ", qname) + end + err = err or item.err or "unknown" add_status_to_try_list(try_list, "error: "..err) diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index d1f685ae7df7..c28245192924 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -159,7 +159,7 @@ dns_stale_ttl = 4 dns_cache_size = 10000 dns_not_found_ttl = 30 dns_error_ttl = 1 -dns_no_sync = on +dns_no_sync = off dedicated_config_processing = on worker_consistency = eventual diff --git a/spec/01-unit/21-dns-client/02-client_spec.lua b/spec/01-unit/21-dns-client/02-client_spec.lua index 42e20a716bc7..a4285089ed83 100644 --- a/spec/01-unit/21-dns-client/02-client_spec.lua +++ b/spec/01-unit/21-dns-client/02-client_spec.lua @@ -584,7 +584,10 @@ describe("[DNS client]", function() } })) query_func = function(self, original_query_func, name, options) - ngx.sleep(5) + -- The first request uses syncQuery not waiting on the + -- aysncQuery timer, so the low-level r:query() could not sleep(5s), + -- it can only sleep(timeout). + ngx.sleep(math.min(timeout, 5)) return nil end local start_time = ngx.now() @@ -1742,9 +1745,12 @@ describe("[DNS client]", function() end) it("timeout while waiting", function() + + local timeout = 500 + local ip = "1.4.2.3" -- basically the local function _synchronized_query assert(client.init({ - timeout = 500, + timeout = timeout, retrans = 1, resolvConf = { -- resolv.conf without `search` and `domain` options @@ -1755,7 +1761,7 @@ describe("[DNS client]", function() -- insert a stub thats waits and returns a fixed record local name = TEST_DOMAIN query_func = function() - local ip = "1.4.2.3" + local ip = ip local entry = { { type = client.TYPE_A, @@ -1767,7 +1773,9 @@ describe("[DNS client]", function() touch = 0, expire = gettime() + 10, } - sleep(0.5) -- wait before we return the results + -- wait before we return the results + -- `+ 2` s ensures that the semaphore:wait() expires + sleep(timeout/1000 + 2) return entry end @@ -1797,10 +1805,12 @@ describe("[DNS client]", function() ngx.thread.wait(coros[i]) -- this wait will resume the scheduled ones end - -- all results are equal, as they all will wait for the first response - for i = 1, 10 do + -- results[1~9] are equal, as they all will wait for the first response + for i = 1, 9 do assert.equal("timeout", results[i]) end + -- results[10] comes from synchronous DNS access of the first request + assert.equal(ip, results[10][1]["address"]) end) end) diff --git a/t/03-dns-client/01-phases.t b/t/03-dns-client/01-phases.t index e12cfab420cd..7f10aa9f6197 100644 --- a/t/03-dns-client/01-phases.t +++ b/t/03-dns-client/01-phases.t @@ -1,6 +1,6 @@ use Test::Nginx::Socket; -plan tests => repeat_each() * (blocks() * 5); +plan tests => repeat_each() * (blocks() * 4 + 1); workers(6); @@ -59,8 +59,7 @@ qq { GET /t --- response_body answers: nil -err: dns client error: 101 empty record received ---- no_error_log +err: nil +--- error_log [error] -dns lookup pool exceeded retries API disabled in the context of init_worker_by_lua diff --git a/t/03-dns-client/02-timer-usage.t b/t/03-dns-client/02-timer-usage.t index c78f1a5da1f0..73c35ccb1c4e 100644 --- a/t/03-dns-client/02-timer-usage.t +++ b/t/03-dns-client/02-timer-usage.t @@ -2,76 +2,72 @@ use Test::Nginx::Socket; plan tests => repeat_each() * (blocks() * 5); -workers(6); +workers(1); no_shuffle(); run_tests(); __DATA__ - -=== TEST 1: reuse timers for queries of same name, independent on # of workers ---- http_config eval -qq { - init_worker_by_lua_block { - local client = require("kong.resty.dns.client") - assert(client.init({ - nameservers = { "127.0.0.53" }, - hosts = {}, -- empty tables to parse to prevent defaulting to /etc/hosts - resolvConf = {}, -- and resolv.conf files - order = { "A" }, - })) - local host = "konghq.com" - local typ = client.TYPE_A - for i = 1, 10 do - client.resolve(host, { qtype = typ }) - end - - local host = "mockbin.org" - for i = 1, 10 do - client.resolve(host, { qtype = typ }) - end - - workers = ngx.worker.count() - timers = ngx.timer.pending_count() - } -} +=== TEST 1: stale result triggers async timer --- config location = /t { access_by_lua_block { + -- init local client = require("kong.resty.dns.client") - assert(client.init()) + assert(client.init({ + nameservers = { "127.0.0.53" }, + hosts = {}, -- empty tables to parse to prevent defaulting to /etc/hosts + resolvConf = {}, -- and resolv.conf files + order = { "A" }, + validTtl = 1, + })) + local host = "konghq.com" local typ = client.TYPE_A - local answers, err = client.resolve(host, { qtype = typ }) + -- first time + + local answers, err, try_list = client.resolve(host, { qtype = typ }) if not answers then ngx.say("failed to resolve: ", err) + return end - ngx.say("first address name: ", answers[1].name) + ngx.say("first try_list: ", tostring(try_list)) + + -- sleep to wait for dns record to become stale + ngx.sleep(1.5) - host = "mockbin.org" - answers, err = client.resolve(host, { qtype = typ }) + -- second time: use stale result and trigger async timer + answers, err, try_list = client.resolve(host, { qtype = typ }) if not answers then ngx.say("failed to resolve: ", err) + return end - ngx.say("second address name: ", answers[1].name) + ngx.say("second try_list: ", tostring(try_list)) - ngx.say("workers: ", workers) + -- third time: use stale result and find triggered async timer - -- should be 2 timers maximum (1 for each hostname) - ngx.say("timers: ", timers) + answers, err, try_list = client.resolve(host, { qtype = typ }) + if not answers then + ngx.say("failed to resolve: ", err) + return + end + ngx.say("third address name: ", answers[1].name) + ngx.say("third try_list: ", tostring(try_list)) } } --- request GET /t --- response_body first address name: konghq.com -second address name: mockbin.org -workers: 6 -timers: 2 +first try_list: ["(short)konghq.com:1 - cache-miss","konghq.com:1 - cache-miss/querying"] +second address name: konghq.com +second try_list: ["(short)konghq.com:1 - cache-hit/stale","konghq.com:1 - cache-hit/stale/scheduled"] +third address name: konghq.com +third try_list: ["(short)konghq.com:1 - cache-hit/stale","konghq.com:1 - cache-hit/stale/in progress (async)"] --- no_error_log [error] dns lookup pool exceeded retries From 85101b83dd6e918d5e990ccb6d2b4ac4d349dbe6 Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Wed, 15 Nov 2023 16:31:22 +0800 Subject: [PATCH 123/249] refactor(tools): separate IP-related functions from tool.utils (#12012) KAG-2958 --- kong-3.6.0-0.rockspec | 1 + kong/tools/ip.lua | 315 ++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 284 +------------------------------------ 3 files changed, 317 insertions(+), 283 deletions(-) create mode 100644 kong/tools/ip.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index b787d85e6c93..0b7e0789f6a5 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -173,6 +173,7 @@ build = { ["kong.tools.rand"] = "kong/tools/rand.lua", ["kong.tools.system"] = "kong/tools/system.lua", ["kong.tools.time"] = "kong/tools/time.lua", + ["kong.tools.ip"] = "kong/tools/ip.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/ip.lua b/kong/tools/ip.lua new file mode 100644 index 000000000000..c70108132597 --- /dev/null +++ b/kong/tools/ip.lua @@ -0,0 +1,315 @@ +local ipmatcher = require "resty.ipmatcher" +local pl_stringx = require "pl.stringx" + + +local type = type +local ipairs = ipairs +local tonumber = tonumber +local gsub = string.gsub +local sub = string.sub +local fmt = string.format +local lower = string.lower +local find = string.find +local split = pl_stringx.split + + +local _M = {} + + +local ipv4_prefixes = {} +for i = 0, 32 do + ipv4_prefixes[tostring(i)] = i +end + + +local ipv6_prefixes = {} +for i = 0, 128 do + ipv6_prefixes[tostring(i)] = i +end + + +local function split_cidr(cidr, prefixes) + local p = find(cidr, "/", 3, true) + if not p then + return + end + + return sub(cidr, 1, p - 1), prefixes[sub(cidr, p + 1)] +end + + +local function validate(input, f1, f2, prefixes) + if type(input) ~= "string" then + return false + end + + if prefixes then + local ip, prefix = split_cidr(input, prefixes) + if not ip or not prefix then + return false + end + + input = ip + end + + if f1(input) then + return true + end + + if f2 and f2(input) then + return true + end + + return false +end + + +function _M.is_valid_ipv4(ipv4) + return validate(ipv4, ipmatcher.parse_ipv4) +end + + +function _M.is_valid_ipv6(ipv6) + return validate(ipv6, ipmatcher.parse_ipv6) +end + + +function _M.is_valid_ip(ip) + return validate(ip, ipmatcher.parse_ipv4, ipmatcher.parse_ipv6) +end + + +function _M.is_valid_cidr_v4(cidr_v4) + return validate(cidr_v4, ipmatcher.parse_ipv4, nil, ipv4_prefixes) +end + + +function _M.is_valid_cidr_v6(cidr_v6) + return validate(cidr_v6, ipmatcher.parse_ipv6, nil, ipv6_prefixes) +end + + +function _M.is_valid_cidr(cidr) + return validate(cidr, _M.is_valid_cidr_v4, _M.is_valid_cidr_v6) +end + + +function _M.is_valid_ip_or_cidr_v4(ip_or_cidr_v4) + return validate(ip_or_cidr_v4, ipmatcher.parse_ipv4, _M.is_valid_cidr_v4) +end + + +function _M.is_valid_ip_or_cidr_v6(ip_or_cidr_v6) + return validate(ip_or_cidr_v6, ipmatcher.parse_ipv6, _M.is_valid_cidr_v6) +end + + +function _M.is_valid_ip_or_cidr(ip_or_cidr) + return validate(ip_or_cidr, _M.is_valid_ip, _M.is_valid_cidr) +end + + +--- checks the hostname type; ipv4, ipv6, or name. +-- Type is determined by exclusion, not by validation. So if it returns 'ipv6' then +-- it can only be an ipv6, but it is not necessarily a valid ipv6 address. +-- @param name the string to check (this may contain a portnumber) +-- @return string either; 'ipv4', 'ipv6', or 'name' +-- @usage hostname_type("123.123.123.123") --> "ipv4" +-- hostname_type("::1") --> "ipv6" +-- hostname_type("some::thing") --> "ipv6", but invalid... +function _M.hostname_type(name) + local remainder, colons = gsub(name, ":", "") + if colons > 1 then + return "ipv6" + end + if remainder:match("^[%d%.]+$") then + return "ipv4" + end + return "name" +end + + +--- parses, validates and normalizes an ipv4 address. +-- @param address the string containing the address (formats; ipv4, ipv4:port) +-- @return normalized address (string) + port (number or nil), or alternatively nil+error +function _M.normalize_ipv4(address) + local a,b,c,d,port + if address:find(":", 1, true) then + -- has port number + a,b,c,d,port = address:match("^(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?):(%d+)$") + else + -- without port number + a,b,c,d,port = address:match("^(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?)$") + end + if not a then + return nil, "invalid ipv4 address: " .. address + end + a,b,c,d = tonumber(a), tonumber(b), tonumber(c), tonumber(d) + if a < 0 or a > 255 or b < 0 or b > 255 or c < 0 or + c > 255 or d < 0 or d > 255 then + return nil, "invalid ipv4 address: " .. address + end + if port then + port = tonumber(port) + if port > 65535 then + return nil, "invalid port number" + end + end + + return fmt("%d.%d.%d.%d",a,b,c,d), port +end + + +--- parses, validates and normalizes an ipv6 address. +-- @param address the string containing the address (formats; ipv6, [ipv6], [ipv6]:port) +-- @return normalized expanded address (string) + port (number or nil), or alternatively nil+error +function _M.normalize_ipv6(address) + local check, port = address:match("^(%b[])(.-)$") + if port == "" then + port = nil + end + if check then + check = check:sub(2, -2) -- drop the brackets + -- we have ipv6 in brackets, now get port if we got something left + if port then + port = port:match("^:(%d-)$") + if not port then + return nil, "invalid ipv6 address" + end + port = tonumber(port) + if port > 65535 then + return nil, "invalid port number" + end + end + else + -- no brackets, so full address only; no brackets, no port + check = address + port = nil + end + -- check ipv6 format and normalize + if check:sub(1,1) == ":" then + check = "0" .. check + end + if check:sub(-1,-1) == ":" then + check = check .. "0" + end + if check:find("::", 1, true) then + -- expand double colon + local _, count = gsub(check, ":", "") + local ins = ":" .. string.rep("0:", 8 - count) + check = gsub(check, "::", ins, 1) -- replace only 1 occurence! + end + local a,b,c,d,e,f,g,h = check:match("^(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?)$") + if not a then + -- not a valid IPv6 address + return nil, "invalid ipv6 address: " .. address + end + local zeros = "0000" + return lower(fmt("%s:%s:%s:%s:%s:%s:%s:%s", + zeros:sub(1, 4 - #a) .. a, + zeros:sub(1, 4 - #b) .. b, + zeros:sub(1, 4 - #c) .. c, + zeros:sub(1, 4 - #d) .. d, + zeros:sub(1, 4 - #e) .. e, + zeros:sub(1, 4 - #f) .. f, + zeros:sub(1, 4 - #g) .. g, + zeros:sub(1, 4 - #h) .. h)), port +end + + +--- parses and validates a hostname. +-- @param address the string containing the hostname (formats; name, name:port) +-- @return hostname (string) + port (number or nil), or alternatively nil+error +function _M.check_hostname(address) + local name = address + local port = address:match(":(%d+)$") + if port then + name = name:sub(1, -(#port+2)) + port = tonumber(port) + if port > 65535 then + return nil, "invalid port number" + end + end + local match = name:match("^[%d%a%-%.%_]+$") + if match == nil then + return nil, "invalid hostname: " .. address + end + + -- Reject prefix/trailing dashes and dots in each segment + -- notes: + -- - punycode allows prefixed dash, if the characters before the dash are escaped + -- - FQDN can end in dots + for index, segment in ipairs(split(name, ".")) do + if segment:match("-$") or segment:match("^%.") or segment:match("%.$") or + (segment == "" and index ~= #split(name, ".")) then + return nil, "invalid hostname: " .. address + end + end + return name, port +end + + +local verify_types = { + ipv4 = _M.normalize_ipv4, + ipv6 = _M.normalize_ipv6, + name = _M.check_hostname, +} + + +--- verifies and normalizes ip adresses and hostnames. Supports ipv4, ipv4:port, ipv6, [ipv6]:port, name, name:port. +-- Returned ipv4 addresses will have no leading zero's, ipv6 will be fully expanded without brackets. +-- Note: a name will not be normalized! +-- @param address string containing the address +-- @return table with the following fields: `host` (string; normalized address, or name), `type` (string; 'ipv4', 'ipv6', 'name'), and `port` (number or nil), or alternatively nil+error on invalid input +function _M.normalize_ip(address) + local atype = _M.hostname_type(address) + local addr, port = verify_types[atype](address) + if not addr then + return nil, port + end + return { + type = atype, + host = addr, + port = port, + } +end + + +--- Formats an ip address or hostname with an (optional) port for use in urls. +-- Supports ipv4, ipv6 and names. +-- +-- Explicitly accepts 'nil+error' as input, to pass through any errors from the normalizing and name checking functions. +-- @param p1 address to format, either string with name/ip, table returned from `normalize_ip`, or from the `socket.url` library. +-- @param p2 port (optional) if p1 is a table, then this port will be inserted if no port-field is in the table +-- @return formatted address or nil+error +-- @usage +-- local addr, err = format_ip(normalize_ip("001.002.003.004:123")) --> "1.2.3.4:123" +-- local addr, err = format_ip(normalize_ip("::1")) --> "[0000:0000:0000:0000:0000:0000:0000:0001]" +-- local addr, err = format_ip("::1", 80)) --> "[::1]:80" +-- local addr, err = format_ip(check_hostname("//bad .. name\\")) --> nil, "invalid hostname: ... " +function _M.format_host(p1, p2) + local t = type(p1) + if t == "nil" then + return p1, p2 -- just pass through any errors passed in + end + local host, port, typ + if t == "table" then + port = p1.port or p2 + host = p1.host + typ = p1.type or _M.hostname_type(host) + elseif t == "string" then + port = p2 + host = p1 + typ = _M.hostname_type(host) + else + return nil, "cannot format type '" .. t .. "'" + end + if typ == "ipv6" and not find(host, "[", nil, true) then + return "[" .. _M.normalize_ipv6(host) .. "]" .. (port and ":" .. port or "") + else + return host .. (port and ":" .. port or "") + end +end + + +return _M; diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 397c498f9479..2a5ed9378acc 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -20,10 +20,8 @@ local tonumber = tonumber local sort = table.sort local concat = table.concat local insert = table.insert -local lower = string.lower local fmt = string.format local find = string.find -local gsub = string.gsub local join = pl_stringx.join local split = pl_stringx.split local re_match = ngx.re.match @@ -254,287 +252,6 @@ function _M.load_module_if_exists(module_name) end -do - local ipmatcher = require "resty.ipmatcher" - local sub = string.sub - - local ipv4_prefixes = {} - for i = 0, 32 do - ipv4_prefixes[tostring(i)] = i - end - - local ipv6_prefixes = {} - for i = 0, 128 do - ipv6_prefixes[tostring(i)] = i - end - - local function split_cidr(cidr, prefixes) - local p = find(cidr, "/", 3, true) - if not p then - return - end - - return sub(cidr, 1, p - 1), prefixes[sub(cidr, p + 1)] - end - - local validate = function(input, f1, f2, prefixes) - if type(input) ~= "string" then - return false - end - - if prefixes then - local ip, prefix = split_cidr(input, prefixes) - if not ip or not prefix then - return false - end - - input = ip - end - - if f1(input) then - return true - end - - if f2 and f2(input) then - return true - end - - return false - end - - _M.is_valid_ipv4 = function(ipv4) - return validate(ipv4, ipmatcher.parse_ipv4) - end - - _M.is_valid_ipv6 = function(ipv6) - return validate(ipv6, ipmatcher.parse_ipv6) - end - - _M.is_valid_ip = function(ip) - return validate(ip, ipmatcher.parse_ipv4, ipmatcher.parse_ipv6) - end - - _M.is_valid_cidr_v4 = function(cidr_v4) - return validate(cidr_v4, ipmatcher.parse_ipv4, nil, ipv4_prefixes) - end - - _M.is_valid_cidr_v6 = function(cidr_v6) - return validate(cidr_v6, ipmatcher.parse_ipv6, nil, ipv6_prefixes) - end - - _M.is_valid_cidr = function(cidr) - return validate(cidr, _M.is_valid_cidr_v4, _M.is_valid_cidr_v6) - end - - _M.is_valid_ip_or_cidr_v4 = function(ip_or_cidr_v4) - return validate(ip_or_cidr_v4, ipmatcher.parse_ipv4, _M.is_valid_cidr_v4) - end - - _M.is_valid_ip_or_cidr_v6 = function(ip_or_cidr_v6) - return validate(ip_or_cidr_v6, ipmatcher.parse_ipv6, _M.is_valid_cidr_v6) - end - - _M.is_valid_ip_or_cidr = function(ip_or_cidr) - return validate(ip_or_cidr, _M.is_valid_ip, _M.is_valid_cidr) - end -end - - ---- checks the hostname type; ipv4, ipv6, or name. --- Type is determined by exclusion, not by validation. So if it returns 'ipv6' then --- it can only be an ipv6, but it is not necessarily a valid ipv6 address. --- @param name the string to check (this may contain a portnumber) --- @return string either; 'ipv4', 'ipv6', or 'name' --- @usage hostname_type("123.123.123.123") --> "ipv4" --- hostname_type("::1") --> "ipv6" --- hostname_type("some::thing") --> "ipv6", but invalid... -_M.hostname_type = function(name) - local remainder, colons = gsub(name, ":", "") - if colons > 1 then - return "ipv6" - end - if remainder:match("^[%d%.]+$") then - return "ipv4" - end - return "name" -end - ---- parses, validates and normalizes an ipv4 address. --- @param address the string containing the address (formats; ipv4, ipv4:port) --- @return normalized address (string) + port (number or nil), or alternatively nil+error -_M.normalize_ipv4 = function(address) - local a,b,c,d,port - if address:find(":", 1, true) then - -- has port number - a,b,c,d,port = address:match("^(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?):(%d+)$") - else - -- without port number - a,b,c,d,port = address:match("^(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?)%.(%d%d?%d?)$") - end - if not a then - return nil, "invalid ipv4 address: " .. address - end - a,b,c,d = tonumber(a), tonumber(b), tonumber(c), tonumber(d) - if a < 0 or a > 255 or b < 0 or b > 255 or c < 0 or - c > 255 or d < 0 or d > 255 then - return nil, "invalid ipv4 address: " .. address - end - if port then - port = tonumber(port) - if port > 65535 then - return nil, "invalid port number" - end - end - - return fmt("%d.%d.%d.%d",a,b,c,d), port -end - ---- parses, validates and normalizes an ipv6 address. --- @param address the string containing the address (formats; ipv6, [ipv6], [ipv6]:port) --- @return normalized expanded address (string) + port (number or nil), or alternatively nil+error -_M.normalize_ipv6 = function(address) - local check, port = address:match("^(%b[])(.-)$") - if port == "" then - port = nil - end - if check then - check = check:sub(2, -2) -- drop the brackets - -- we have ipv6 in brackets, now get port if we got something left - if port then - port = port:match("^:(%d-)$") - if not port then - return nil, "invalid ipv6 address" - end - port = tonumber(port) - if port > 65535 then - return nil, "invalid port number" - end - end - else - -- no brackets, so full address only; no brackets, no port - check = address - port = nil - end - -- check ipv6 format and normalize - if check:sub(1,1) == ":" then - check = "0" .. check - end - if check:sub(-1,-1) == ":" then - check = check .. "0" - end - if check:find("::", 1, true) then - -- expand double colon - local _, count = gsub(check, ":", "") - local ins = ":" .. string.rep("0:", 8 - count) - check = gsub(check, "::", ins, 1) -- replace only 1 occurence! - end - local a,b,c,d,e,f,g,h = check:match("^(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?):(%x%x?%x?%x?)$") - if not a then - -- not a valid IPv6 address - return nil, "invalid ipv6 address: " .. address - end - local zeros = "0000" - return lower(fmt("%s:%s:%s:%s:%s:%s:%s:%s", - zeros:sub(1, 4 - #a) .. a, - zeros:sub(1, 4 - #b) .. b, - zeros:sub(1, 4 - #c) .. c, - zeros:sub(1, 4 - #d) .. d, - zeros:sub(1, 4 - #e) .. e, - zeros:sub(1, 4 - #f) .. f, - zeros:sub(1, 4 - #g) .. g, - zeros:sub(1, 4 - #h) .. h)), port -end - ---- parses and validates a hostname. --- @param address the string containing the hostname (formats; name, name:port) --- @return hostname (string) + port (number or nil), or alternatively nil+error -_M.check_hostname = function(address) - local name = address - local port = address:match(":(%d+)$") - if port then - name = name:sub(1, -(#port+2)) - port = tonumber(port) - if port > 65535 then - return nil, "invalid port number" - end - end - local match = name:match("^[%d%a%-%.%_]+$") - if match == nil then - return nil, "invalid hostname: " .. address - end - - -- Reject prefix/trailing dashes and dots in each segment - -- notes: - -- - punycode allows prefixed dash, if the characters before the dash are escaped - -- - FQDN can end in dots - for index, segment in ipairs(split(name, ".")) do - if segment:match("-$") or segment:match("^%.") or segment:match("%.$") or - (segment == "" and index ~= #split(name, ".")) then - return nil, "invalid hostname: " .. address - end - end - return name, port -end - -local verify_types = { - ipv4 = _M.normalize_ipv4, - ipv6 = _M.normalize_ipv6, - name = _M.check_hostname, -} ---- verifies and normalizes ip adresses and hostnames. Supports ipv4, ipv4:port, ipv6, [ipv6]:port, name, name:port. --- Returned ipv4 addresses will have no leading zero's, ipv6 will be fully expanded without brackets. --- Note: a name will not be normalized! --- @param address string containing the address --- @return table with the following fields: `host` (string; normalized address, or name), `type` (string; 'ipv4', 'ipv6', 'name'), and `port` (number or nil), or alternatively nil+error on invalid input -_M.normalize_ip = function(address) - local atype = _M.hostname_type(address) - local addr, port = verify_types[atype](address) - if not addr then - return nil, port - end - return { - type = atype, - host = addr, - port = port - } -end - ---- Formats an ip address or hostname with an (optional) port for use in urls. --- Supports ipv4, ipv6 and names. --- --- Explicitly accepts 'nil+error' as input, to pass through any errors from the normalizing and name checking functions. --- @param p1 address to format, either string with name/ip, table returned from `normalize_ip`, or from the `socket.url` library. --- @param p2 port (optional) if p1 is a table, then this port will be inserted if no port-field is in the table --- @return formatted address or nil+error --- @usage --- local addr, err = format_ip(normalize_ip("001.002.003.004:123")) --> "1.2.3.4:123" --- local addr, err = format_ip(normalize_ip("::1")) --> "[0000:0000:0000:0000:0000:0000:0000:0001]" --- local addr, err = format_ip("::1", 80)) --> "[::1]:80" --- local addr, err = format_ip(check_hostname("//bad .. name\\")) --> nil, "invalid hostname: ... " -_M.format_host = function(p1, p2) - local t = type(p1) - if t == "nil" then - return p1, p2 -- just pass through any errors passed in - end - local host, port, typ - if t == "table" then - port = p1.port or p2 - host = p1.host - typ = p1.type or _M.hostname_type(host) - elseif t == "string" then - port = p2 - host = p1 - typ = _M.hostname_type(host) - else - return nil, "cannot format type '" .. t .. "'" - end - if typ == "ipv6" and not find(host, "[", nil, true) then - return "[" .. _M.normalize_ipv6(host) .. "]" .. (port and ":" .. port or "") - else - return host .. (port and ":" .. port or "") - end -end - local CONTROLS = [[\x00-\x1F\x7F]] local HIGHBIT = [[\x80-\xFF]] local SEPARATORS = [==[ \t()<>@,;:\\\"\/?={}\[\]]==] @@ -973,6 +690,7 @@ do "kong.tools.rand", "kong.tools.system", "kong.tools.time", + "kong.tools.ip", } for _, str in ipairs(modules) do From df2105d826ad121a33ac73e36c5b59efa4d64d0e Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Wed, 15 Nov 2023 17:22:36 +0800 Subject: [PATCH 124/249] refactor(tools): separate module-related functions from tool.utils (#12018) KAG-2960 --- kong-3.6.0-0.rockspec | 1 + kong/tools/module.lua | 32 ++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 24 ++---------------------- 3 files changed, 35 insertions(+), 22 deletions(-) create mode 100644 kong/tools/module.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 0b7e0789f6a5..0ec4c9516df7 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -173,6 +173,7 @@ build = { ["kong.tools.rand"] = "kong/tools/rand.lua", ["kong.tools.system"] = "kong/tools/system.lua", ["kong.tools.time"] = "kong/tools/time.lua", + ["kong.tools.module"] = "kong/tools/module.lua", ["kong.tools.ip"] = "kong/tools/ip.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", diff --git a/kong/tools/module.lua b/kong/tools/module.lua new file mode 100644 index 000000000000..b41c8d038ee1 --- /dev/null +++ b/kong/tools/module.lua @@ -0,0 +1,32 @@ +local type = type +local xpcall = xpcall +local require = require +local error = error +local find = string.find + + +local _M = {} + + +--- Try to load a module. +-- Will not throw an error if the module was not found, but will throw an error if the +-- loading failed for another reason (eg: syntax error). +-- @param module_name Path of the module to load (ex: kong.plugins.keyauth.api). +-- @return success A boolean indicating whether the module was found. +-- @return module The retrieved module, or the error in case of a failure +function _M.load_module_if_exists(module_name) + local status, res = xpcall(function() + return require(module_name) + end, debug.traceback) + if status then + return true, res + -- Here we match any character because if a module has a dash '-' in its name, we would need to escape it. + elseif type(res) == "string" and find(res, "module '" .. module_name .. "' not found", nil, true) then + return false, res + else + error("error loading module '" .. module_name .. "':\n" .. res) + end +end + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 2a5ed9378acc..b7d700b92df6 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -12,6 +12,7 @@ local pl_stringx = require "pl.stringx" local pl_path = require "pl.path" local pl_file = require "pl.file" + local type = type local pairs = pairs local ipairs = ipairs @@ -21,7 +22,6 @@ local sort = table.sort local concat = table.concat local insert = table.insert local fmt = string.format -local find = string.find local join = pl_stringx.join local split = pl_stringx.split local re_match = ngx.re.match @@ -231,27 +231,6 @@ _M.check_https = function(trusted_ip, allow_terminated) end ---- Try to load a module. --- Will not throw an error if the module was not found, but will throw an error if the --- loading failed for another reason (eg: syntax error). --- @param module_name Path of the module to load (ex: kong.plugins.keyauth.api). --- @return success A boolean indicating whether the module was found. --- @return module The retrieved module, or the error in case of a failure -function _M.load_module_if_exists(module_name) - local status, res = xpcall(function() - return require(module_name) - end, debug.traceback) - if status then - return true, res - -- Here we match any character because if a module has a dash '-' in its name, we would need to escape it. - elseif type(res) == "string" and find(res, "module '" .. module_name .. "' not found", nil, true) then - return false, res - else - error("error loading module '" .. module_name .. "':\n" .. res) - end -end - - local CONTROLS = [[\x00-\x1F\x7F]] local HIGHBIT = [[\x80-\xFF]] local SEPARATORS = [==[ \t()<>@,;:\\\"\/?={}\[\]]==] @@ -690,6 +669,7 @@ do "kong.tools.rand", "kong.tools.system", "kong.tools.time", + "kong.tools.module", "kong.tools.ip", } From 7e4c654aef13ef4137b6d33260ab7f50461e497b Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Wed, 15 Nov 2023 09:36:37 +0100 Subject: [PATCH 125/249] chore: trigger backport on label addition Signed-off-by: Joshua Schmid --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 290eb67c8912..2d2d2c1d8f11 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,7 +1,7 @@ name: Backport on: pull_request_target: - types: [closed] + types: [closed, labeled] # runs when the pull request is closed/merged or labeled (to trigger a backport in hindsight) permissions: contents: write # so it can comment pull-requests: write # so it can create pull requests From 12504c9fad0620e90c3e778b2bcac032c7374a0f Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Wed, 15 Nov 2023 18:22:44 +0800 Subject: [PATCH 126/249] refactor(tools): move topological_sort from tools.utils to db.sort (#12002) KAG-2959 --- kong-3.6.0-0.rockspec | 1 + kong/db/schema/topological_sort.lua | 2 +- kong/db/strategies/postgres/connector.lua | 3 +- kong/db/utils.lua | 73 +++++++++++++++++++++++ kong/tools/utils.lua | 70 ---------------------- spec/01-unit/05-utils_spec.lua | 2 +- 6 files changed, 78 insertions(+), 73 deletions(-) create mode 100644 kong/db/utils.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 0ec4c9516df7..0ce47bb66509 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -200,6 +200,7 @@ build = { ["kong.workspaces"] = "kong/workspaces/init.lua", ["kong.db"] = "kong/db/init.lua", + ["kong.db.utils"] = "kong/db/utils.lua", ["kong.db.errors"] = "kong/db/errors.lua", ["kong.db.iteration"] = "kong/db/iteration.lua", ["kong.db.dao"] = "kong/db/dao/init.lua", diff --git a/kong/db/schema/topological_sort.lua b/kong/db/schema/topological_sort.lua index ed74e8e3bc46..e968a9e7b9ba 100644 --- a/kong/db/schema/topological_sort.lua +++ b/kong/db/schema/topological_sort.lua @@ -1,5 +1,5 @@ local constants = require "kong.constants" -local utils = require "kong.tools.utils" +local utils = require "kong.db.utils" local utils_toposort = utils.topological_sort diff --git a/kong/db/strategies/postgres/connector.lua b/kong/db/strategies/postgres/connector.lua index 703a91bb889e..102259dc5beb 100644 --- a/kong/db/strategies/postgres/connector.lua +++ b/kong/db/strategies/postgres/connector.lua @@ -6,6 +6,7 @@ local stringx = require "pl.stringx" local semaphore = require "ngx.semaphore" local kong_global = require "kong.global" local constants = require "kong.constants" +local db_utils = require "kong.db.utils" local setmetatable = setmetatable @@ -28,7 +29,7 @@ local log = ngx.log local match = string.match local fmt = string.format local sub = string.sub -local utils_toposort = utils.topological_sort +local utils_toposort = db_utils.topological_sort local insert = table.insert local table_merge = utils.table_merge diff --git a/kong/db/utils.lua b/kong/db/utils.lua new file mode 100644 index 000000000000..9476c07c22eb --- /dev/null +++ b/kong/db/utils.lua @@ -0,0 +1,73 @@ +local insert = table.insert + + +local _M = {} + + +local function visit(current, neighbors_map, visited, marked, sorted) + if visited[current] then + return true + end + + if marked[current] then + return nil, "Cycle detected, cannot sort topologically" + end + + marked[current] = true + + local schemas_pointing_to_current = neighbors_map[current] + if schemas_pointing_to_current then + local neighbor, ok, err + for i = 1, #schemas_pointing_to_current do + neighbor = schemas_pointing_to_current[i] + ok, err = visit(neighbor, neighbors_map, visited, marked, sorted) + if not ok then + return nil, err + end + end + end + + marked[current] = false + + visited[current] = true + + insert(sorted, 1, current) + + return true +end + + +function _M.topological_sort(items, get_neighbors) + local neighbors_map = {} + local source, destination + local neighbors + for i = 1, #items do + source = items[i] -- services + neighbors = get_neighbors(source) + for j = 1, #neighbors do + destination = neighbors[j] --routes + neighbors_map[destination] = neighbors_map[destination] or {} + insert(neighbors_map[destination], source) + end + end + + local sorted = {} + local visited = {} + local marked = {} + + local current, ok, err + for i = 1, #items do + current = items[i] + if not visited[current] and not marked[current] then + ok, err = visit(current, neighbors_map, visited, marked, sorted) + if not ok then + return nil, err + end + end + end + + return sorted +end + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index b7d700b92df6..f8579fb8e0da 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -20,7 +20,6 @@ local tostring = tostring local tonumber = tonumber local sort = table.sort local concat = table.concat -local insert = table.insert local fmt = string.format local join = pl_stringx.join local split = pl_stringx.split @@ -590,75 +589,6 @@ _M.get_response_type = get_response_type _M.get_error_template = get_error_template -local topological_sort do - - local function visit(current, neighbors_map, visited, marked, sorted) - if visited[current] then - return true - end - - if marked[current] then - return nil, "Cycle detected, cannot sort topologically" - end - - marked[current] = true - - local schemas_pointing_to_current = neighbors_map[current] - if schemas_pointing_to_current then - local neighbor, ok, err - for i = 1, #schemas_pointing_to_current do - neighbor = schemas_pointing_to_current[i] - ok, err = visit(neighbor, neighbors_map, visited, marked, sorted) - if not ok then - return nil, err - end - end - end - - marked[current] = false - - visited[current] = true - - insert(sorted, 1, current) - - return true - end - - topological_sort = function(items, get_neighbors) - local neighbors_map = {} - local source, destination - local neighbors - for i = 1, #items do - source = items[i] -- services - neighbors = get_neighbors(source) - for j = 1, #neighbors do - destination = neighbors[j] --routes - neighbors_map[destination] = neighbors_map[destination] or {} - insert(neighbors_map[destination], source) - end - end - - local sorted = {} - local visited = {} - local marked = {} - - local current, ok, err - for i = 1, #items do - current = items[i] - if not visited[current] and not marked[current] then - ok, err = visit(current, neighbors_map, visited, marked, sorted) - if not ok then - return nil, err - end - end - end - - return sorted - end -end -_M.topological_sort = topological_sort - - do local modules = { "kong.tools.table", diff --git a/spec/01-unit/05-utils_spec.lua b/spec/01-unit/05-utils_spec.lua index 05deee5ab434..dbd9944cfd8f 100644 --- a/spec/01-unit/05-utils_spec.lua +++ b/spec/01-unit/05-utils_spec.lua @@ -831,7 +831,7 @@ describe("Utils", function() describe("topological_sort", function() local get_neighbors = function(x) return x end - local ts = utils.topological_sort + local ts = require("kong.db.utils").topological_sort it("it puts destinations first", function() local a = { id = "a" } From c468b77efae40c044031760120889af37fe8cb0d Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Wed, 15 Nov 2023 10:58:47 +0100 Subject: [PATCH 127/249] chore: add write permission for backport action Signed-off-by: Joshua Schmid --- .github/workflows/backport.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 2d2d2c1d8f11..901580fe073b 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -5,6 +5,7 @@ on: permissions: contents: write # so it can comment pull-requests: write # so it can create pull requests + actions: write jobs: backport: name: Backport From 13dbed38b62f8f092dcb4616aba929db693c2c4b Mon Sep 17 00:00:00 2001 From: oowl Date: Thu, 16 Nov 2023 11:11:51 +0800 Subject: [PATCH 128/249] feat(plugin/azure-function): clear upstream uri and request uri inject plugin logic (#11850) KAG-2841 --- ...fix-upstream-uri-azure-function-plugin.yml | 3 ++ kong/plugins/azure-functions/handler.lua | 27 ++--------- .../35-azure-functions/01-access_spec.lua | 45 ++++++++++++++++++- 3 files changed, 51 insertions(+), 24 deletions(-) create mode 100644 changelog/unreleased/kong/fix-upstream-uri-azure-function-plugin.yml diff --git a/changelog/unreleased/kong/fix-upstream-uri-azure-function-plugin.yml b/changelog/unreleased/kong/fix-upstream-uri-azure-function-plugin.yml new file mode 100644 index 000000000000..7598254143ce --- /dev/null +++ b/changelog/unreleased/kong/fix-upstream-uri-azure-function-plugin.yml @@ -0,0 +1,3 @@ +message: "**azure-functions**: azure-functions plugin now eliminates upstream/request URI and only use `routeprefix` configuration field to construct request path when requesting Azure API" +type: breaking_change +scope: Plugin diff --git a/kong/plugins/azure-functions/handler.lua b/kong/plugins/azure-functions/handler.lua index f523330c5ae9..1fdcb664330c 100644 --- a/kong/plugins/azure-functions/handler.lua +++ b/kong/plugins/azure-functions/handler.lua @@ -6,8 +6,6 @@ local kong_meta = require "kong.meta" local kong = kong local fmt = string.format -local sub = string.sub -local find = string.find local byte = string.byte local match = string.match local var = ngx.var @@ -26,10 +24,6 @@ local azure = { function azure:access(conf) local path do - -- strip any query args - local upstream_uri = var.upstream_uri or var.request_uri - local s = find(upstream_uri, "?", 1, true) - upstream_uri = s and sub(upstream_uri, 1, s - 1) or upstream_uri -- strip pre-/postfix slashes path = match(conf.routeprefix or "", STRIP_SLASHES_PATTERN) @@ -39,24 +33,11 @@ function azure:access(conf) path = "/" .. path end - path = path .. "/" .. func - - -- concatenate path with upstream uri - local upstream_uri_first_byte = byte(upstream_uri, 1) - local path_last_byte = byte(path, -1) - if path_last_byte == SLASH then - if upstream_uri_first_byte == SLASH then - path = path .. sub(upstream_uri, 2, -1) - else - path = path .. upstream_uri - end - + local functionname_first_byte = byte(func, 1) + if functionname_first_byte == SLASH then + path = path .. func else - if upstream_uri_first_byte == SLASH then - path = path .. upstream_uri - elseif upstream_uri ~= "" then - path = path .. "/" .. upstream_uri - end + path = path .. "/" .. func end end diff --git a/spec/03-plugins/35-azure-functions/01-access_spec.lua b/spec/03-plugins/35-azure-functions/01-access_spec.lua index 9907c7e0d0b3..7208cb9985bf 100644 --- a/spec/03-plugins/35-azure-functions/01-access_spec.lua +++ b/spec/03-plugins/35-azure-functions/01-access_spec.lua @@ -98,6 +98,36 @@ for _, strategy in helpers.each_strategy() do dns_mock = helpers.dns_mock.new() } + local route3 = db.routes:insert { + hosts = { "azure3.com" }, + protocols = { "http", "https" }, + service = db.services:insert( + { + name = "azure3", + host = "azure.example.com", -- just mock service, it will not be requested + port = 80, + path = "/request", + } + ), + } + + -- this plugin definition results in an upstream url to + -- http://mockbin.org/request + -- which will echo the request for inspection + db.plugins:insert { + name = "azure-functions", + route = { id = route3.id }, + config = { + https = false, + appname = "azure", + hostdomain = "example.com", + routeprefix = "request", + functionname = "test-func-name", + apikey = "anything_but_an_API_key", + clientid = "and_no_clientid", + }, + } + fixtures.dns_mock:A({ name = "azure.example.com", address = "127.0.0.1", @@ -169,7 +199,7 @@ for _, strategy in helpers.each_strategy() do assert.response(res).has.status(200) local json = assert.response(res).has.jsonbody() - assert.matches("/request/test%-func%-name/and/then/some", json.uri) + assert.matches("/request/test%-func%-name", json.uri) end) it("passes the method", function() @@ -243,5 +273,18 @@ for _, strategy in helpers.each_strategy() do assert(tonumber(res.headers["Content-Length"]) > 100) end) + it("service upstream uri and request uri can not influence azure function", function() + local res = assert(proxy_client:send { + method = "GET", + path = "/", + query = { hello = "world" }, + headers = { + ["Host"] = "azure3.com" + } + }) + + assert(tonumber(res.headers["Content-Length"]) > 100) + end) + end) -- describe end From 37417735d548d181ff3086e3241b18d1c0029dd1 Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 16 Nov 2023 16:28:13 +0800 Subject: [PATCH 129/249] refactor(router): move some declarations into local scope (#12014) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit It is a small improvement of #12008,moving some declarations into do end block. --- kong/router/utils.lua | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/kong/router/utils.lua b/kong/router/utils.lua index e1b8d44381f4..a70eb5077c96 100644 --- a/kong/router/utils.lua +++ b/kong/router/utils.lua @@ -1,15 +1,15 @@ local constants = require("kong.constants") local hostname_type = require("kong.tools.utils").hostname_type local normalize = require("kong.tools.uri").normalize -local yield = require("kong.tools.yield").yield -local type = type +local type = type local error = error +local ipairs = ipairs local find = string.find local sub = string.sub local byte = string.byte -local get_phase = ngx.get_phase + local SLASH = byte("/") @@ -251,7 +251,9 @@ local phonehome_statistics do local reports = require("kong.reports") local nkeys = require("table.nkeys") + local yield = require("kong.tools.yield").yield local worker_id = ngx.worker.id + local get_phase = ngx.get_phase local TILDE = byte("~") is_regex_magic = function(path) From 9d30e2b866f34dc64306331bd99e748a4386dc83 Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Thu, 16 Nov 2023 17:06:49 +0800 Subject: [PATCH 130/249] refactor(tools): separate HTTP-related function from `kong.tools.utils` to `kong.tools.http` (#12027) KAG-2957 --- kong-3.6.0-0.rockspec | 1 + kong/tools/http.lua | 530 ++++++++++++++++++++++++++++++++++++++++++ kong/tools/utils.lua | 526 +---------------------------------------- 3 files changed, 537 insertions(+), 520 deletions(-) create mode 100644 kong/tools/http.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 0ce47bb66509..11fa1100bfaa 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -175,6 +175,7 @@ build = { ["kong.tools.time"] = "kong/tools/time.lua", ["kong.tools.module"] = "kong/tools/module.lua", ["kong.tools.ip"] = "kong/tools/ip.lua", + ["kong.tools.http"] = "kong/tools/http.lua", ["kong.runloop.handler"] = "kong/runloop/handler.lua", ["kong.runloop.events"] = "kong/runloop/events.lua", diff --git a/kong/tools/http.lua b/kong/tools/http.lua new file mode 100644 index 000000000000..621dd5f53d2f --- /dev/null +++ b/kong/tools/http.lua @@ -0,0 +1,530 @@ +local pl_stringx = require "pl.stringx" +local pl_path = require "pl.path" +local pl_file = require "pl.file" +local str = require "kong.tools.string" + + +local type = type +local pairs = pairs +local ipairs = ipairs +local tostring = tostring +local tonumber = tonumber +local setmetatable = setmetatable +local sort = table.sort +local concat = table.concat +local fmt = string.format +local join = pl_stringx.join +local split = pl_stringx.split +local re_match = ngx.re.match + + +local _M = {} + + +do + local url = require "socket.url" + + + --- URL escape and format key and value + -- values should be already decoded or the `raw` option should be passed to prevent double-encoding + local function encode_args_value(key, value, raw) + if not raw then + key = url.escape(key) + end + if value ~= nil then + if not raw then + value = url.escape(value) + end + return fmt("%s=%s", key, value) + else + return key + end + end + + + local function compare_keys(a, b) + local ta = type(a) + if ta == type(b) then + return a < b + end + return ta == "number" -- numbers go first, then the rest of keys (usually strings) + end + + + -- Recursively URL escape and format key and value + -- Handles nested arrays and tables + local function recursive_encode_args(parent_key, value, raw, no_array_indexes, query) + local sub_keys = {} + for sk in pairs(value) do + sub_keys[#sub_keys + 1] = sk + end + sort(sub_keys, compare_keys) + + local sub_value, next_sub_key + for _, sub_key in ipairs(sub_keys) do + sub_value = value[sub_key] + + if type(sub_key) == "number" then + if no_array_indexes then + next_sub_key = parent_key .. "[]" + else + next_sub_key = ("%s[%s]"):format(parent_key, tostring(sub_key)) + end + else + next_sub_key = ("%s.%s"):format(parent_key, tostring(sub_key)) + end + + if type(sub_value) == "table" then + recursive_encode_args(next_sub_key, sub_value, raw, no_array_indexes, query) + else + query[#query+1] = encode_args_value(next_sub_key, sub_value, raw) + end + end + end + + + local ngx_null = ngx.null + + + --- Encode a Lua table to a querystring + -- Tries to mimic ngx_lua's `ngx.encode_args`, but has differences: + -- * It percent-encodes querystring values. + -- * It also supports encoding for bodies (only because it is used in http_client for specs. + -- * It encodes arrays like Lapis instead of like ngx.encode_args to allow interacting with Lapis + -- * It encodes ngx.null as empty strings + -- * It encodes true and false as "true" and "false" + -- * It is capable of encoding nested data structures: + -- * An array access is encoded as `arr[1]` + -- * A struct access is encoded as `struct.field` + -- * Nested structures can use both: `arr[1].field[3]` + -- @see https://github.com/Mashape/kong/issues/749 + -- @param[type=table] args A key/value table containing the query args to encode. + -- @param[type=boolean] raw If true, will not percent-encode any key/value and will ignore special boolean rules. + -- @param[type=boolean] no_array_indexes If true, arrays/map elements will be + -- encoded without an index: 'my_array[]='. By default, + -- array elements will have an index: 'my_array[0]='. + -- @treturn string A valid querystring (without the prefixing '?') + function _M.encode_args(args, raw, no_array_indexes) + local query = {} + local keys = {} + + for k in pairs(args) do + keys[#keys+1] = k + end + + sort(keys, compare_keys) + + for _, key in ipairs(keys) do + local value = args[key] + if type(value) == "table" then + recursive_encode_args(key, value, raw, no_array_indexes, query) + elseif value == ngx_null then + query[#query+1] = encode_args_value(key, "") + elseif value ~= nil or raw then + value = tostring(value) + if value ~= "" then + query[#query+1] = encode_args_value(key, value, raw) + elseif raw or value == "" then + query[#query+1] = key + end + end + end + + return concat(query, "&") + end + + + local function decode_array(t) + local keys = {} + local len = 0 + for k in pairs(t) do + len = len + 1 + local number = tonumber(k) + if not number then + return nil + end + keys[len] = number + end + + sort(keys) + local new_t = {} + + for i=1,len do + if keys[i] ~= i then + return nil + end + new_t[i] = t[tostring(i)] + end + + return new_t + end + + + -- Parses params in post requests + -- Transforms "string-like numbers" inside "array-like" tables into numbers + -- (needs a complete array with no holes starting on "1") + -- { x = {["1"] = "a", ["2"] = "b" } } becomes { x = {"a", "b"} } + -- Transforms empty strings into ngx.null: + -- { x = "" } becomes { x = ngx.null } + -- Transforms the strings "true" and "false" into booleans + -- { x = "true" } becomes { x = true } + function _M.decode_args(args) + local new_args = {} + + for k, v in pairs(args) do + if type(v) == "table" then + v = decode_array(v) or v + elseif v == "" then + v = ngx_null + elseif v == "true" then + v = true + elseif v == "false" then + v = false + end + new_args[k] = v + end + + return new_args + end + +end + + +--- Checks whether a request is https or was originally https (but already +-- terminated). It will check in the current request (global `ngx` table). If +-- the header `X-Forwarded-Proto` exists -- with value `https` then it will also +-- be considered as an https connection. +-- @param trusted_ip boolean indicating if the client is a trusted IP +-- @param allow_terminated if truthy, the `X-Forwarded-Proto` header will be checked as well. +-- @return boolean or nil+error in case the header exists multiple times +_M.check_https = function(trusted_ip, allow_terminated) + if ngx.var.scheme:lower() == "https" then + return true + end + + if not allow_terminated then + return false + end + + -- if we trust this IP, examine it's X-Forwarded-Proto header + -- otherwise, we fall back to relying on the client scheme + -- (which was either validated earlier, or we fall through this block) + if trusted_ip then + local scheme = ngx.req.get_headers()["x-forwarded-proto"] + + -- we could use the first entry (lower security), or check the contents of + -- each of them (slow). So for now defensive, and error + -- out on multiple entries for the x-forwarded-proto header. + if type(scheme) == "table" then + return nil, "Only one X-Forwarded-Proto header allowed" + end + + return tostring(scheme):lower() == "https" + end + + return false +end + + +local CONTROLS = [[\x00-\x1F\x7F]] +local HIGHBIT = [[\x80-\xFF]] +local SEPARATORS = [==[ \t()<>@,;:\\\"\/?={}\[\]]==] +local HTTP_TOKEN_FORBID_PATTERN = "[".. CONTROLS .. HIGHBIT .. SEPARATORS .. "]" + + +--- Validates a token defined by RFC 2616. +-- @param token (string) the string to verify +-- @return the valid token, or `nil+error` +function _M.validate_http_token(token) + if token == nil or token == "" then + return nil, "no token provided" + end + + if not re_match(token, HTTP_TOKEN_FORBID_PATTERN, "jo") then + return token + end + + return nil, "contains one or more invalid characters. ASCII " .. + "control characters (0-31;127), space, tab and the " .. + "characters ()<>@,;:\\\"/?={}[] are not allowed." +end + + +-- should we also use validate_http_token for this? +--- Validates a header name. +-- Checks characters used in a header name to be valid, as per nginx only +-- a-z, A-Z, 0-9 and '-' are allowed. +-- @param name (string) the header name to verify +-- @return the valid header name, or `nil+error` +function _M.validate_header_name(name) + if name == nil or name == "" then + return nil, "no header name provided" + end + + if re_match(name, "^[a-zA-Z0-9-_]+$", "jo") then + return name + end + + return nil, "bad header name '" .. name .. + "', allowed characters are A-Z, a-z, 0-9, '_', and '-'" +end + + +--- Validates a cookie name. +-- @param name (string) the cookie name to verify +-- @return the valid cookie name, or `nil+error` +_M.validate_cookie_name = _M.validate_http_token + + +--- +-- Given an http status and an optional message, this function will +-- return a body that could be used in `kong.response.exit`. +-- +-- * Status 204 will always return nil for the body +-- * 405, 500 and 502 always return a predefined message +-- * If there is a message, it will be used as a body +-- * Otherwise, there's a default body for 401, 404 & 503 responses +-- +-- If after applying those rules there's a body, and that body isn't a +-- table, it will be transformed into one of the form `{ message = ... }`, +-- where `...` is the untransformed body. +-- +-- This function throws an error on invalid inputs. +-- +-- @tparam number status The status to be used +-- @tparam[opt] table|string message The message to be used +-- @tparam[opt] table headers The headers to be used +-- @return table|nil a possible body which can be used in kong.response.exit +-- @usage +-- +-- --- 204 always returns nil +-- get_default_exit_body(204) --> nil +-- get_default_exit_body(204, "foo") --> nil +-- +-- --- 405, 500 & 502 always return predefined values +-- +-- get_default_exit_body(502, "ignored") --> { message = "Bad gateway" } +-- +-- --- If message is a table, it is returned +-- +-- get_default_exit_body(200, { ok = true }) --> { ok = true } +-- +-- --- If message is not a table, it is transformed into one +-- +-- get_default_exit_body(200, "ok") --> { message = "ok" } +-- +-- --- 401, 404 and 503 provide default values if none is defined +-- +-- get_default_exit_body(404) --> { message = "Not found" } +-- +do + local _overrides = { + [405] = "Method not allowed", + [500] = "An unexpected error occurred", + [502] = "Bad gateway", + } + + local _defaults = { + [401] = "Unauthorized", + [404] = "Not found", + [503] = "Service unavailable", + } + + local MIN_STATUS_CODE = 100 + local MAX_STATUS_CODE = 599 + + + function _M.get_default_exit_body(status, message) + if type(status) ~= "number" then + error("code must be a number", 2) + + elseif status < MIN_STATUS_CODE or status > MAX_STATUS_CODE then + error(fmt("code must be a number between %u and %u", MIN_STATUS_CODE, MAX_STATUS_CODE), 2) + end + + if status == 204 then + return nil + end + + local body = _overrides[status] or message or _defaults[status] + if body ~= nil and type(body) ~= "table" then + body = { message = body } + end + + return body + end +end + + +do + local CONTENT_TYPE_JSON = "application/json" + local CONTENT_TYPE_GRPC = "application/grpc" + local CONTENT_TYPE_HTML = "text/html" + local CONTENT_TYPE_XML = "application/xml" + local CONTENT_TYPE_PLAIN = "text/plain" + local CONTENT_TYPE_APP = "application" + local CONTENT_TYPE_TEXT = "text" + local CONTENT_TYPE_DEFAULT = "default" + local CONTENT_TYPE_ANY = "*" + + local MIME_TYPES = { + [CONTENT_TYPE_GRPC] = "", + [CONTENT_TYPE_HTML] = "text/html; charset=utf-8", + [CONTENT_TYPE_JSON] = "application/json; charset=utf-8", + [CONTENT_TYPE_PLAIN] = "text/plain; charset=utf-8", + [CONTENT_TYPE_XML] = "application/xml; charset=utf-8", + [CONTENT_TYPE_APP] = "application/json; charset=utf-8", + [CONTENT_TYPE_TEXT] = "text/plain; charset=utf-8", + [CONTENT_TYPE_DEFAULT] = "application/json; charset=utf-8", + } + + local ERROR_TEMPLATES = { + [CONTENT_TYPE_GRPC] = "", + [CONTENT_TYPE_HTML] = [[ + + + + + Error + + +

Error

+

%s.

+

request_id: %s

+ + +]], + [CONTENT_TYPE_JSON] = [[ +{ + "message":"%s", + "request_id":"%s" +}]], + [CONTENT_TYPE_PLAIN] = "%s\nrequest_id: %s\n", + [CONTENT_TYPE_XML] = [[ + + + %s + %s + +]], + } + + local ngx_log = ngx.log + local ERR = ngx.ERR + local custom_error_templates = setmetatable({}, { + __index = function(self, format) + local template_path = kong.configuration["error_template_" .. format] + if not template_path then + rawset(self, format, false) + return false + end + + local template, err + if pl_path.exists(template_path) then + template, err = pl_file.read(template_path) + else + err = "file not found" + end + + if template then + rawset(self, format, template) + return template + end + + ngx_log(ERR, fmt("failed reading the custom %s error template: %s", format, err)) + rawset(self, format, false) + return false + end + }) + + + function _M.get_response_type(accept_header) + local content_type = MIME_TYPES[CONTENT_TYPE_DEFAULT] + if type(accept_header) == "table" then + accept_header = join(",", accept_header) + end + + if accept_header ~= nil then + local pattern = [[ + ((?:[a-z0-9][a-z0-9-!#$&^_+.]+|\*) \/ (?:[a-z0-9][a-z0-9-!#$&^_+.]+|\*)) + (?: + \s*;\s* + q = ( 1(?:\.0{0,3}|) | 0(?:\.\d{0,3}|) ) + | \s*;\s* [a-z0-9][a-z0-9-!#$&^_+.]+ (?:=[^;]*|) + )* + ]] + local accept_values = split(accept_header, ",") + local max_quality = 0 + + for _, accept_value in ipairs(accept_values) do + accept_value = str.strip(accept_value) + local matches = ngx.re.match(accept_value, pattern, "ajoxi") + + if matches then + local media_type = matches[1] + local q = tonumber(matches[2]) or 1 + + if q > max_quality then + max_quality = q + content_type = _M.get_mime_type(media_type) or content_type + end + end + end + end + + return content_type + end + + + function _M.get_mime_type(content_header, use_default) + use_default = use_default == nil or use_default + content_header = str.strip(content_header) + content_header = str.split(content_header, ";")[1] + local mime_type + + local entries = split(content_header, "/") + if #entries > 1 then + if entries[2] == CONTENT_TYPE_ANY then + if entries[1] == CONTENT_TYPE_ANY then + mime_type = MIME_TYPES[CONTENT_TYPE_DEFAULT] + else + mime_type = MIME_TYPES[entries[1]] + end + else + mime_type = MIME_TYPES[content_header] + end + end + + if mime_type or use_default then + return mime_type or MIME_TYPES[CONTENT_TYPE_DEFAULT] + end + + return nil, "could not find MIME type" + end + + + function _M.get_error_template(mime_type) + if mime_type == CONTENT_TYPE_JSON or mime_type == MIME_TYPES[CONTENT_TYPE_JSON] then + return custom_error_templates.json or ERROR_TEMPLATES[CONTENT_TYPE_JSON] + + elseif mime_type == CONTENT_TYPE_HTML or mime_type == MIME_TYPES[CONTENT_TYPE_HTML] then + return custom_error_templates.html or ERROR_TEMPLATES[CONTENT_TYPE_HTML] + + elseif mime_type == CONTENT_TYPE_XML or mime_type == MIME_TYPES[CONTENT_TYPE_XML] then + return custom_error_templates.xml or ERROR_TEMPLATES[CONTENT_TYPE_XML] + + elseif mime_type == CONTENT_TYPE_PLAIN or mime_type == MIME_TYPES[CONTENT_TYPE_PLAIN] then + return custom_error_templates.plain or ERROR_TEMPLATES[CONTENT_TYPE_PLAIN] + + elseif mime_type == CONTENT_TYPE_GRPC or mime_type == MIME_TYPES[CONTENT_TYPE_GRPC] then + return ERROR_TEMPLATES[CONTENT_TYPE_GRPC] + + end + + return nil, "no template found for MIME type " .. (mime_type or "empty") + end + +end + + +return _M diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index f8579fb8e0da..0d67b241a420 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -8,275 +8,16 @@ -- @license [Apache 2.0](https://opensource.org/licenses/Apache-2.0) -- @module kong.tools.utils -local pl_stringx = require "pl.stringx" -local pl_path = require "pl.path" -local pl_file = require "pl.file" - - -local type = type -local pairs = pairs -local ipairs = ipairs -local tostring = tostring -local tonumber = tonumber -local sort = table.sort -local concat = table.concat -local fmt = string.format -local join = pl_stringx.join -local split = pl_stringx.split -local re_match = ngx.re.match -local setmetatable = setmetatable +local pairs = pairs +local ipairs = ipairs +local require = require +local fmt = string.format +local re_match = ngx.re.match local _M = {} -do - local url = require "socket.url" - - --- URL escape and format key and value - -- values should be already decoded or the `raw` option should be passed to prevent double-encoding - local function encode_args_value(key, value, raw) - if not raw then - key = url.escape(key) - end - if value ~= nil then - if not raw then - value = url.escape(value) - end - return fmt("%s=%s", key, value) - else - return key - end - end - - local function compare_keys(a, b) - local ta = type(a) - if ta == type(b) then - return a < b - end - return ta == "number" -- numbers go first, then the rest of keys (usually strings) - end - - - -- Recursively URL escape and format key and value - -- Handles nested arrays and tables - local function recursive_encode_args(parent_key, value, raw, no_array_indexes, query) - local sub_keys = {} - for sk in pairs(value) do - sub_keys[#sub_keys + 1] = sk - end - sort(sub_keys, compare_keys) - - local sub_value, next_sub_key - for _, sub_key in ipairs(sub_keys) do - sub_value = value[sub_key] - - if type(sub_key) == "number" then - if no_array_indexes then - next_sub_key = parent_key .. "[]" - else - next_sub_key = ("%s[%s]"):format(parent_key, tostring(sub_key)) - end - else - next_sub_key = ("%s.%s"):format(parent_key, tostring(sub_key)) - end - - if type(sub_value) == "table" then - recursive_encode_args(next_sub_key, sub_value, raw, no_array_indexes, query) - else - query[#query+1] = encode_args_value(next_sub_key, sub_value, raw) - end - end - end - - - local ngx_null = ngx.null - - --- Encode a Lua table to a querystring - -- Tries to mimic ngx_lua's `ngx.encode_args`, but has differences: - -- * It percent-encodes querystring values. - -- * It also supports encoding for bodies (only because it is used in http_client for specs. - -- * It encodes arrays like Lapis instead of like ngx.encode_args to allow interacting with Lapis - -- * It encodes ngx.null as empty strings - -- * It encodes true and false as "true" and "false" - -- * It is capable of encoding nested data structures: - -- * An array access is encoded as `arr[1]` - -- * A struct access is encoded as `struct.field` - -- * Nested structures can use both: `arr[1].field[3]` - -- @see https://github.com/Mashape/kong/issues/749 - -- @param[type=table] args A key/value table containing the query args to encode. - -- @param[type=boolean] raw If true, will not percent-encode any key/value and will ignore special boolean rules. - -- @param[type=boolean] no_array_indexes If true, arrays/map elements will be - -- encoded without an index: 'my_array[]='. By default, - -- array elements will have an index: 'my_array[0]='. - -- @treturn string A valid querystring (without the prefixing '?') - function _M.encode_args(args, raw, no_array_indexes) - local query = {} - local keys = {} - - for k in pairs(args) do - keys[#keys+1] = k - end - - sort(keys, compare_keys) - - for _, key in ipairs(keys) do - local value = args[key] - if type(value) == "table" then - recursive_encode_args(key, value, raw, no_array_indexes, query) - elseif value == ngx_null then - query[#query+1] = encode_args_value(key, "") - elseif value ~= nil or raw then - value = tostring(value) - if value ~= "" then - query[#query+1] = encode_args_value(key, value, raw) - elseif raw or value == "" then - query[#query+1] = key - end - end - end - - return concat(query, "&") - end - - local function decode_array(t) - local keys = {} - local len = 0 - for k in pairs(t) do - len = len + 1 - local number = tonumber(k) - if not number then - return nil - end - keys[len] = number - end - - sort(keys) - local new_t = {} - - for i=1,len do - if keys[i] ~= i then - return nil - end - new_t[i] = t[tostring(i)] - end - - return new_t - end - - -- Parses params in post requests - -- Transforms "string-like numbers" inside "array-like" tables into numbers - -- (needs a complete array with no holes starting on "1") - -- { x = {["1"] = "a", ["2"] = "b" } } becomes { x = {"a", "b"} } - -- Transforms empty strings into ngx.null: - -- { x = "" } becomes { x = ngx.null } - -- Transforms the strings "true" and "false" into booleans - -- { x = "true" } becomes { x = true } - function _M.decode_args(args) - local new_args = {} - - for k, v in pairs(args) do - if type(v) == "table" then - v = decode_array(v) or v - elseif v == "" then - v = ngx_null - elseif v == "true" then - v = true - elseif v == "false" then - v = false - end - new_args[k] = v - end - - return new_args - end - -end - - ---- Checks whether a request is https or was originally https (but already --- terminated). It will check in the current request (global `ngx` table). If --- the header `X-Forwarded-Proto` exists -- with value `https` then it will also --- be considered as an https connection. --- @param trusted_ip boolean indicating if the client is a trusted IP --- @param allow_terminated if truthy, the `X-Forwarded-Proto` header will be checked as well. --- @return boolean or nil+error in case the header exists multiple times -_M.check_https = function(trusted_ip, allow_terminated) - if ngx.var.scheme:lower() == "https" then - return true - end - - if not allow_terminated then - return false - end - - -- if we trust this IP, examine it's X-Forwarded-Proto header - -- otherwise, we fall back to relying on the client scheme - -- (which was either validated earlier, or we fall through this block) - if trusted_ip then - local scheme = ngx.req.get_headers()["x-forwarded-proto"] - - -- we could use the first entry (lower security), or check the contents of - -- each of them (slow). So for now defensive, and error - -- out on multiple entries for the x-forwarded-proto header. - if type(scheme) == "table" then - return nil, "Only one X-Forwarded-Proto header allowed" - end - - return tostring(scheme):lower() == "https" - end - - return false -end - - -local CONTROLS = [[\x00-\x1F\x7F]] -local HIGHBIT = [[\x80-\xFF]] -local SEPARATORS = [==[ \t()<>@,;:\\\"\/?={}\[\]]==] -local HTTP_TOKEN_FORBID_PATTERN = "[".. CONTROLS .. HIGHBIT .. SEPARATORS .. "]" - ---- Validates a token defined by RFC 2616. --- @param token (string) the string to verify --- @return the valid token, or `nil+error` -function _M.validate_http_token(token) - if token == nil or token == "" then - return nil, "no token provided" - end - - if not re_match(token, HTTP_TOKEN_FORBID_PATTERN, "jo") then - return token - end - - return nil, "contains one or more invalid characters. ASCII " .. - "control characters (0-31;127), space, tab and the " .. - "characters ()<>@,;:\\\"/?={}[] are not allowed." -end - --- should we also use validate_http_token for this? ---- Validates a header name. --- Checks characters used in a header name to be valid, as per nginx only --- a-z, A-Z, 0-9 and '-' are allowed. --- @param name (string) the header name to verify --- @return the valid header name, or `nil+error` -_M.validate_header_name = function(name) - if name == nil or name == "" then - return nil, "no header name provided" - end - - if re_match(name, "^[a-zA-Z0-9-_]+$", "jo") then - return name - end - - return nil, "bad header name '" .. name .. - "', allowed characters are A-Z, a-z, 0-9, '_', and '-'" -end - ---- Validates a cookie name. --- @param name (string) the cookie name to verify --- @return the valid cookie name, or `nil+error` -_M.validate_cookie_name = _M.validate_http_token - - local validate_labels do local nkeys = require "table.nkeys" @@ -333,262 +74,6 @@ end _M.validate_labels = validate_labels ---- --- Given an http status and an optional message, this function will --- return a body that could be used in `kong.response.exit`. --- --- * Status 204 will always return nil for the body --- * 405, 500 and 502 always return a predefined message --- * If there is a message, it will be used as a body --- * Otherwise, there's a default body for 401, 404 & 503 responses --- --- If after applying those rules there's a body, and that body isn't a --- table, it will be transformed into one of the form `{ message = ... }`, --- where `...` is the untransformed body. --- --- This function throws an error on invalid inputs. --- --- @tparam number status The status to be used --- @tparam[opt] table|string message The message to be used --- @tparam[opt] table headers The headers to be used --- @return table|nil a possible body which can be used in kong.response.exit --- @usage --- --- --- 204 always returns nil --- get_default_exit_body(204) --> nil --- get_default_exit_body(204, "foo") --> nil --- --- --- 405, 500 & 502 always return predefined values --- --- get_default_exit_body(502, "ignored") --> { message = "Bad gateway" } --- --- --- If message is a table, it is returned --- --- get_default_exit_body(200, { ok = true }) --> { ok = true } --- --- --- If message is not a table, it is transformed into one --- --- get_default_exit_body(200, "ok") --> { message = "ok" } --- --- --- 401, 404 and 503 provide default values if none is defined --- --- get_default_exit_body(404) --> { message = "Not found" } --- -do - local _overrides = { - [405] = "Method not allowed", - [500] = "An unexpected error occurred", - [502] = "Bad gateway", - } - - local _defaults = { - [401] = "Unauthorized", - [404] = "Not found", - [503] = "Service unavailable", - } - - local MIN_STATUS_CODE = 100 - local MAX_STATUS_CODE = 599 - - function _M.get_default_exit_body(status, message) - if type(status) ~= "number" then - error("code must be a number", 2) - - elseif status < MIN_STATUS_CODE or status > MAX_STATUS_CODE then - error(fmt("code must be a number between %u and %u", MIN_STATUS_CODE, MAX_STATUS_CODE), 2) - end - - if status == 204 then - return nil - end - - local body = _overrides[status] or message or _defaults[status] - if body ~= nil and type(body) ~= "table" then - body = { message = body } - end - - return body - end -end - - -local get_mime_type -local get_response_type -local get_error_template -do - local CONTENT_TYPE_JSON = "application/json" - local CONTENT_TYPE_GRPC = "application/grpc" - local CONTENT_TYPE_HTML = "text/html" - local CONTENT_TYPE_XML = "application/xml" - local CONTENT_TYPE_PLAIN = "text/plain" - local CONTENT_TYPE_APP = "application" - local CONTENT_TYPE_TEXT = "text" - local CONTENT_TYPE_DEFAULT = "default" - local CONTENT_TYPE_ANY = "*" - - local MIME_TYPES = { - [CONTENT_TYPE_GRPC] = "", - [CONTENT_TYPE_HTML] = "text/html; charset=utf-8", - [CONTENT_TYPE_JSON] = "application/json; charset=utf-8", - [CONTENT_TYPE_PLAIN] = "text/plain; charset=utf-8", - [CONTENT_TYPE_XML] = "application/xml; charset=utf-8", - [CONTENT_TYPE_APP] = "application/json; charset=utf-8", - [CONTENT_TYPE_TEXT] = "text/plain; charset=utf-8", - [CONTENT_TYPE_DEFAULT] = "application/json; charset=utf-8", - } - - local ERROR_TEMPLATES = { - [CONTENT_TYPE_GRPC] = "", - [CONTENT_TYPE_HTML] = [[ - - - - - Error - - -

Error

-

%s.

-

request_id: %s

- - -]], - [CONTENT_TYPE_JSON] = [[ -{ - "message":"%s", - "request_id":"%s" -}]], - [CONTENT_TYPE_PLAIN] = "%s\nrequest_id: %s\n", - [CONTENT_TYPE_XML] = [[ - - - %s - %s - -]], - } - - local ngx_log = ngx.log - local ERR = ngx.ERR - local custom_error_templates = setmetatable({}, { - __index = function(self, format) - local template_path = kong.configuration["error_template_" .. format] - if not template_path then - rawset(self, format, false) - return false - end - - local template, err - if pl_path.exists(template_path) then - template, err = pl_file.read(template_path) - else - err = "file not found" - end - - if template then - rawset(self, format, template) - return template - end - - ngx_log(ERR, fmt("failed reading the custom %s error template: %s", format, err)) - rawset(self, format, false) - return false - end - }) - - - get_response_type = function(accept_header) - local content_type = MIME_TYPES[CONTENT_TYPE_DEFAULT] - if type(accept_header) == "table" then - accept_header = join(",", accept_header) - end - - if accept_header ~= nil then - local pattern = [[ - ((?:[a-z0-9][a-z0-9-!#$&^_+.]+|\*) \/ (?:[a-z0-9][a-z0-9-!#$&^_+.]+|\*)) - (?: - \s*;\s* - q = ( 1(?:\.0{0,3}|) | 0(?:\.\d{0,3}|) ) - | \s*;\s* [a-z0-9][a-z0-9-!#$&^_+.]+ (?:=[^;]*|) - )* - ]] - local accept_values = split(accept_header, ",") - local max_quality = 0 - - for _, accept_value in ipairs(accept_values) do - accept_value = _M.strip(accept_value) - local matches = ngx.re.match(accept_value, pattern, "ajoxi") - - if matches then - local media_type = matches[1] - local q = tonumber(matches[2]) or 1 - - if q > max_quality then - max_quality = q - content_type = get_mime_type(media_type) or content_type - end - end - end - end - - return content_type - end - - - get_mime_type = function(content_header, use_default) - use_default = use_default == nil or use_default - content_header = _M.strip(content_header) - content_header = _M.split(content_header, ";")[1] - local mime_type - - local entries = split(content_header, "/") - if #entries > 1 then - if entries[2] == CONTENT_TYPE_ANY then - if entries[1] == CONTENT_TYPE_ANY then - mime_type = MIME_TYPES[CONTENT_TYPE_DEFAULT] - else - mime_type = MIME_TYPES[entries[1]] - end - else - mime_type = MIME_TYPES[content_header] - end - end - - if mime_type or use_default then - return mime_type or MIME_TYPES[CONTENT_TYPE_DEFAULT] - end - - return nil, "could not find MIME type" - end - - - get_error_template = function(mime_type) - if mime_type == CONTENT_TYPE_JSON or mime_type == MIME_TYPES[CONTENT_TYPE_JSON] then - return custom_error_templates.json or ERROR_TEMPLATES[CONTENT_TYPE_JSON] - - elseif mime_type == CONTENT_TYPE_HTML or mime_type == MIME_TYPES[CONTENT_TYPE_HTML] then - return custom_error_templates.html or ERROR_TEMPLATES[CONTENT_TYPE_HTML] - - elseif mime_type == CONTENT_TYPE_XML or mime_type == MIME_TYPES[CONTENT_TYPE_XML] then - return custom_error_templates.xml or ERROR_TEMPLATES[CONTENT_TYPE_XML] - - elseif mime_type == CONTENT_TYPE_PLAIN or mime_type == MIME_TYPES[CONTENT_TYPE_PLAIN] then - return custom_error_templates.plain or ERROR_TEMPLATES[CONTENT_TYPE_PLAIN] - - elseif mime_type == CONTENT_TYPE_GRPC or mime_type == MIME_TYPES[CONTENT_TYPE_GRPC] then - return ERROR_TEMPLATES[CONTENT_TYPE_GRPC] - - end - - return nil, "no template found for MIME type " .. (mime_type or "empty") - end - -end -_M.get_mime_type = get_mime_type -_M.get_response_type = get_response_type -_M.get_error_template = get_error_template - - do local modules = { "kong.tools.table", @@ -601,6 +86,7 @@ do "kong.tools.time", "kong.tools.module", "kong.tools.ip", + "kong.tools.http", } for _, str in ipairs(modules) do From dbdd3e92b830e6ae50b030579a6f7f67abbe3a31 Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 16 Nov 2023 19:14:22 +0800 Subject: [PATCH 131/249] style(tools): small style fixes for ip module (#12046) --- kong/tools/ip.lua | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/kong/tools/ip.lua b/kong/tools/ip.lua index c70108132597..786bf8d6460e 100644 --- a/kong/tools/ip.lua +++ b/kong/tools/ip.lua @@ -5,6 +5,7 @@ local pl_stringx = require "pl.stringx" local type = type local ipairs = ipairs local tonumber = tonumber +local tostring = tostring local gsub = string.gsub local sub = string.sub local fmt = string.format @@ -312,4 +313,4 @@ function _M.format_host(p1, p2) end -return _M; +return _M From 21505656e6a41775400b0b9cce372b298e7d74af Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 16 Nov 2023 21:08:57 +0800 Subject: [PATCH 132/249] style(tools): optimize string operations in http module (#12048) --- kong/tools/http.lua | 16 ++++++++-------- kong/tools/string.lua | 3 +++ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/kong/tools/http.lua b/kong/tools/http.lua index 621dd5f53d2f..133678f35d18 100644 --- a/kong/tools/http.lua +++ b/kong/tools/http.lua @@ -1,7 +1,6 @@ -local pl_stringx = require "pl.stringx" local pl_path = require "pl.path" local pl_file = require "pl.file" -local str = require "kong.tools.string" +local tools_str = require "kong.tools.string" local type = type @@ -13,9 +12,10 @@ local setmetatable = setmetatable local sort = table.sort local concat = table.concat local fmt = string.format -local join = pl_stringx.join -local split = pl_stringx.split local re_match = ngx.re.match +local join = tools_str.join +local split = tools_str.split +local strip = tools_str.strip local _M = {} @@ -457,8 +457,8 @@ do local max_quality = 0 for _, accept_value in ipairs(accept_values) do - accept_value = str.strip(accept_value) - local matches = ngx.re.match(accept_value, pattern, "ajoxi") + accept_value = strip(accept_value) + local matches = re_match(accept_value, pattern, "ajoxi") if matches then local media_type = matches[1] @@ -478,8 +478,8 @@ do function _M.get_mime_type(content_header, use_default) use_default = use_default == nil or use_default - content_header = str.strip(content_header) - content_header = str.split(content_header, ";")[1] + content_header = strip(content_header) + content_header = split(content_header, ";")[1] local mime_type local entries = split(content_header, "/") diff --git a/kong/tools/string.lua b/kong/tools/string.lua index 53dfe3d233ba..1920d7e970b7 100644 --- a/kong/tools/string.lua +++ b/kong/tools/string.lua @@ -13,6 +13,9 @@ local gsub = string.gsub local _M = {} +_M.join = pl_stringx.join + + --- splits a string. -- just a placeholder to the penlight `pl.stringx.split` function -- @function split From a7e7cb44253ce2dfe285226a6be797e945abe49c Mon Sep 17 00:00:00 2001 From: xumin Date: Wed, 15 Nov 2023 15:27:37 +0800 Subject: [PATCH 133/249] fix(plugin server): an instance for every request As the __key__ changes its definition (cache key) it can never match a plugin's uuid. change to use __plugin_id. Fix KAG-2969 --- changelog/unreleased/kong/plugin-server-instance-leak.yml | 3 +++ kong/runloop/plugin_servers/init.lua | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/plugin-server-instance-leak.yml diff --git a/changelog/unreleased/kong/plugin-server-instance-leak.yml b/changelog/unreleased/kong/plugin-server-instance-leak.yml new file mode 100644 index 000000000000..c00cbfc69e6c --- /dev/null +++ b/changelog/unreleased/kong/plugin-server-instance-leak.yml @@ -0,0 +1,3 @@ +message: "**Plugin Server**: fix an issue where every request causes a new plugin instance to be created" +type: bugfix +scope: PDK diff --git a/kong/runloop/plugin_servers/init.lua b/kong/runloop/plugin_servers/init.lua index c78913f4cf8b..6c3937efc8ec 100644 --- a/kong/runloop/plugin_servers/init.lua +++ b/kong/runloop/plugin_servers/init.lua @@ -213,7 +213,7 @@ function get_instance_id(plugin_name, conf) if instance_info and instance_info.id - and instance_info.conf and instance_info.conf.__key__ == key + and instance_info.conf and instance_info.conf.__plugin_id == key then -- exact match, return it return instance_info.id From a382576530b7ddd57898c9ce917343bddeaf93f4 Mon Sep 17 00:00:00 2001 From: tzssangglass Date: Fri, 17 Nov 2023 01:56:35 +0800 Subject: [PATCH 134/249] feat(cp): add dp cert details (#11921) * feat(cp): add dp cert details support for exposing dataplane certificate expiry date to `/clustering/data-planes` endpoint Fix: [FTI-5530](https://konghq.atlassian.net/browse/FTI-5530) Signed-off-by: tzssangglass --- .../kong/cp-expose-dp-cert-details.yml | 5 + kong-3.6.0-0.rockspec | 1 + kong/clustering/control_plane.lua | 15 ++- kong/clustering/init.lua | 6 +- kong/clustering/tls.lua | 4 +- kong/db/migrations/core/022_350_to_360.lua | 13 ++ kong/db/migrations/core/init.lua | 1 + .../entities/clustering_data_planes.lua | 8 ++ .../01-schema/13-cluster_status_spec.lua | 12 ++ spec/01-unit/19-hybrid/02-clustering_spec.lua | 1 - .../03-db/13-cluster_status_spec.lua | 41 +++++++ .../09-hybrid_mode/01-sync_spec.lua | 116 ++++++++++++++++++ .../migrations/core/022_350_to_360_spec.lua | 7 ++ 13 files changed, 224 insertions(+), 6 deletions(-) create mode 100644 changelog/unreleased/kong/cp-expose-dp-cert-details.yml create mode 100644 kong/db/migrations/core/022_350_to_360.lua create mode 100644 spec/05-migration/db/migrations/core/022_350_to_360_spec.lua diff --git a/changelog/unreleased/kong/cp-expose-dp-cert-details.yml b/changelog/unreleased/kong/cp-expose-dp-cert-details.yml new file mode 100644 index 000000000000..4863a932f1d9 --- /dev/null +++ b/changelog/unreleased/kong/cp-expose-dp-cert-details.yml @@ -0,0 +1,5 @@ +message: | + **Clustering**: Expose data plane certificate expiry date on the control plane API. +type: feature +scope: Clustering + diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 11fa1100bfaa..1453b8b11479 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -278,6 +278,7 @@ build = { ["kong.db.migrations.core.019_320_to_330"] = "kong/db/migrations/core/019_320_to_330.lua", ["kong.db.migrations.core.020_330_to_340"] = "kong/db/migrations/core/020_330_to_340.lua", ["kong.db.migrations.core.021_340_to_350"] = "kong/db/migrations/core/021_340_to_350.lua", + ["kong.db.migrations.core.022_350_to_360"] = "kong/db/migrations/core/022_350_to_360.lua", ["kong.db.migrations.operations.200_to_210"] = "kong/db/migrations/operations/200_to_210.lua", ["kong.db.migrations.operations.212_to_213"] = "kong/db/migrations/operations/212_to_213.lua", ["kong.db.migrations.operations.280_to_300"] = "kong/db/migrations/operations/280_to_300.lua", diff --git a/kong/clustering/control_plane.lua b/kong/clustering/control_plane.lua index 423e33d74c50..fb66db3fbc9f 100644 --- a/kong/clustering/control_plane.lua +++ b/kong/clustering/control_plane.lua @@ -77,6 +77,17 @@ local function is_timeout(err) end +local function extract_dp_cert(cert) + local expiry_timestamp = cert:get_not_after() + -- values in cert_details must be strings + local cert_details = { + expiry_timestamp = expiry_timestamp, + } + + return cert_details +end + + function _M.new(clustering) assert(type(clustering) == "table", "kong.clustering is not instantiated") @@ -183,7 +194,7 @@ _M.check_version_compatibility = compat.check_version_compatibility _M.check_configuration_compatibility = compat.check_configuration_compatibility -function _M:handle_cp_websocket() +function _M:handle_cp_websocket(cert) local dp_id = ngx_var.arg_node_id local dp_hostname = ngx_var.arg_node_hostname local dp_ip = ngx_var.remote_addr @@ -230,6 +241,7 @@ function _M:handle_cp_websocket() return ngx_exit(ngx_CLOSE) end + local dp_cert_details = extract_dp_cert(cert) local dp_plugins_map = plugins_list_to_map(data.plugins) local config_hash = DECLARATIVE_EMPTY_CONFIG_HASH -- initial hash local last_seen = ngx_time() @@ -247,6 +259,7 @@ function _M:handle_cp_websocket() version = dp_version, sync_status = sync_status, -- TODO: import may have been failed though labels = data.labels, + cert_details = dp_cert_details, }, { ttl = purge_delay }) if not ok then ngx_log(ngx_ERR, _log_prefix, "unable to update clustering data plane status: ", err, log_suffix) diff --git a/kong/clustering/init.lua b/kong/clustering/init.lua index a661a8c4eeaf..0d5570badd52 100644 --- a/kong/clustering/init.lua +++ b/kong/clustering/init.lua @@ -63,13 +63,13 @@ end function _M:handle_cp_websocket() - local ok, err = self:validate_client_cert() - if not ok then + local cert, err = self:validate_client_cert() + if not cert then ngx_log(ngx_ERR, _log_prefix, err) return ngx_exit(444) end - return self.instance:handle_cp_websocket() + return self.instance:handle_cp_websocket(cert) end diff --git a/kong/clustering/tls.lua b/kong/clustering/tls.lua index 03e4f4205a9e..cc528ff24d14 100644 --- a/kong/clustering/tls.lua +++ b/kong/clustering/tls.lua @@ -13,6 +13,8 @@ local constants = require("kong.constants") local ngx_log = ngx.log local WARN = ngx.WARN +local tostring = tostring + local OCSP_TIMEOUT = constants.CLUSTERING_OCSP_TIMEOUT @@ -226,7 +228,7 @@ function tls.validate_client_cert(kong_config, cp_cert, dp_cert_pem) return nil, err end - return true + return cert, nil end diff --git a/kong/db/migrations/core/022_350_to_360.lua b/kong/db/migrations/core/022_350_to_360.lua new file mode 100644 index 000000000000..364632a1cd55 --- /dev/null +++ b/kong/db/migrations/core/022_350_to_360.lua @@ -0,0 +1,13 @@ +return { + postgres = { + up = [[ + DO $$ + BEGIN + ALTER TABLE IF EXISTS ONLY "clustering_data_planes" ADD "cert_details" JSONB; + EXCEPTION WHEN DUPLICATE_COLUMN THEN + -- Do nothing, accept existing state + END; + $$; + ]] + } +} diff --git a/kong/db/migrations/core/init.lua b/kong/db/migrations/core/init.lua index b61c1f698c74..b19a271ce7aa 100644 --- a/kong/db/migrations/core/init.lua +++ b/kong/db/migrations/core/init.lua @@ -19,4 +19,5 @@ return { "019_320_to_330", "020_330_to_340", "021_340_to_350", + "022_350_to_360", } diff --git a/kong/db/schema/entities/clustering_data_planes.lua b/kong/db/schema/entities/clustering_data_planes.lua index 7d85ecf9fec9..fb1f43db0990 100644 --- a/kong/db/schema/entities/clustering_data_planes.lua +++ b/kong/db/schema/entities/clustering_data_planes.lua @@ -38,5 +38,13 @@ return { description = "Custom key value pairs as meta-data for DPs.", }, }, + { cert_details = { + type = "record", + fields = { + { expiry_timestamp = { type = "number", timestamp = true, required = false } } + }, + description = "Certificate details of the DPs.", + }, + }, }, } diff --git a/spec/01-unit/01-db/01-schema/13-cluster_status_spec.lua b/spec/01-unit/01-db/01-schema/13-cluster_status_spec.lua index 81e621846eb1..b42f1ae5a8ce 100644 --- a/spec/01-unit/01-db/01-schema/13-cluster_status_spec.lua +++ b/spec/01-unit/01-db/01-schema/13-cluster_status_spec.lua @@ -66,4 +66,16 @@ describe("plugins", function() assert.is_true(ok) assert.is_nil(err) end) + + it("accepts cert details", function() + local ok, err = validate({ + ip = "127.0.0.1", + hostname = "dp.example.com", + cert_details = { + expiry_timestamp = 1897136778, + } + }) + assert.is_true(ok) + assert.is_nil(err) + end) end) diff --git a/spec/01-unit/19-hybrid/02-clustering_spec.lua b/spec/01-unit/19-hybrid/02-clustering_spec.lua index f134aeab5af0..d2d54f10d83e 100644 --- a/spec/01-unit/19-hybrid/02-clustering_spec.lua +++ b/spec/01-unit/19-hybrid/02-clustering_spec.lua @@ -1,7 +1,6 @@ local calculate_config_hash = require("kong.clustering.config_helper").calculate_config_hash local version = require("kong.clustering.compat.version") - describe("kong.clustering.compat.version", function() it("correctly parses 3 or 4 digit version numbers", function() assert.equal(3000000000, version.string_to_number("3.0.0")) diff --git a/spec/02-integration/03-db/13-cluster_status_spec.lua b/spec/02-integration/03-db/13-cluster_status_spec.lua index 3734df8f8b0a..34ffbed25606 100644 --- a/spec/02-integration/03-db/13-cluster_status_spec.lua +++ b/spec/02-integration/03-db/13-cluster_status_spec.lua @@ -71,5 +71,46 @@ for _, strategy in helpers.each_strategy() do assert.is_nil(err) end) end) + + describe("cert_details", function() + it(":upsert()", function() + local p, err = + db.clustering_data_planes:upsert( + { + id = "eb51145a-aaaa-bbbb-cccc-22087fb081db", + }, + { + config_hash = "a9a166c59873245db8f1a747ba9a80a7", + hostname = "localhost", + ip = "127.0.0.1", + cert_details = { + expiry_timestamp = 1897136778, + } + } + ) + + assert.is_truthy(p) + assert.is_nil(err) + end) + + it(":update()", function() + -- this time update instead of insert + local p, err = + db.clustering_data_planes:update( + { + id = "eb51145a-aaaa-bbbb-cccc-22087fb081db", + }, + { + config_hash = "a9a166c59873245db8f1a747ba9a80a7", + cert_details = { + expiry_timestamp = 1888983905, + } + } + ) + + assert.is_truthy(p) + assert.is_nil(err) + end) + end) end) -- kong.db [strategy] end diff --git a/spec/02-integration/09-hybrid_mode/01-sync_spec.lua b/spec/02-integration/09-hybrid_mode/01-sync_spec.lua index d29f0fc614ec..a27d02faf785 100644 --- a/spec/02-integration/09-hybrid_mode/01-sync_spec.lua +++ b/spec/02-integration/09-hybrid_mode/01-sync_spec.lua @@ -784,4 +784,120 @@ describe("CP/DP labels #" .. strategy, function() end) end) +describe("CP/DP cert details(cluster_mtls = shared) #" .. strategy, function() + lazy_setup(function() + helpers.get_db_utils(strategy) -- runs migrations + + assert(helpers.start_kong({ + role = "control_plane", + cluster_cert = "spec/fixtures/kong_clustering.crt", + cluster_cert_key = "spec/fixtures/kong_clustering.key", + database = strategy, + db_update_frequency = 0.1, + cluster_listen = "127.0.0.1:9005", + nginx_conf = "spec/fixtures/custom_nginx.template", + })) + + assert(helpers.start_kong({ + role = "data_plane", + database = "off", + prefix = "servroot2", + cluster_cert = "spec/fixtures/kong_clustering.crt", + cluster_cert_key = "spec/fixtures/kong_clustering.key", + cluster_control_plane = "127.0.0.1:9005", + proxy_listen = "0.0.0.0:9002", + nginx_conf = "spec/fixtures/custom_nginx.template", + cluster_dp_labels="deployment:mycloud,region:us-east-1", + })) + end) + + lazy_teardown(function() + helpers.stop_kong("servroot2") + helpers.stop_kong() + end) + + describe("status API", function() + it("shows DP cert details", function() + helpers.wait_until(function() + local admin_client = helpers.admin_client() + finally(function() + admin_client:close() + end) + + local res = assert(admin_client:get("/clustering/data-planes")) + local body = assert.res_status(200, res) + local json = cjson.decode(body) + + for _, v in pairs(json.data) do + if v.ip == "127.0.0.1" then + assert.equal(1888983905, v.cert_details.expiry_timestamp) + return true + end + end + end, 3) + end) + end) +end) + +describe("CP/DP cert details(cluster_mtls = pki) #" .. strategy, function() + lazy_setup(function() + helpers.get_db_utils(strategy) -- runs migrations + + assert(helpers.start_kong({ + role = "control_plane", + cluster_cert = "spec/fixtures/kong_clustering.crt", + cluster_cert_key = "spec/fixtures/kong_clustering.key", + db_update_frequency = 0.1, + database = strategy, + cluster_listen = "127.0.0.1:9005", + nginx_conf = "spec/fixtures/custom_nginx.template", + -- additional attributes for PKI: + cluster_mtls = "pki", + cluster_ca_cert = "spec/fixtures/kong_clustering_ca.crt", + })) + + assert(helpers.start_kong({ + role = "data_plane", + nginx_conf = "spec/fixtures/custom_nginx.template", + database = "off", + prefix = "servroot2", + cluster_cert = "spec/fixtures/kong_clustering_client.crt", + cluster_cert_key = "spec/fixtures/kong_clustering_client.key", + cluster_control_plane = "127.0.0.1:9005", + proxy_listen = "0.0.0.0:9002", + -- additional attributes for PKI: + cluster_mtls = "pki", + cluster_server_name = "kong_clustering", + cluster_ca_cert = "spec/fixtures/kong_clustering.crt", + })) + end) + + lazy_teardown(function() + helpers.stop_kong("servroot2") + helpers.stop_kong() + end) + + describe("status API", function() + it("shows DP cert details", function() + helpers.wait_until(function() + local admin_client = helpers.admin_client() + finally(function() + admin_client:close() + end) + + local res = admin_client:get("/clustering/data-planes") + local body = assert.res_status(200, res) + local json = cjson.decode(body) + + for _, v in pairs(json.data) do + if v.ip == "127.0.0.1" then + assert.equal(1897136778, v.cert_details.expiry_timestamp) + return true + end + end + end, 3) + end) + end) +end) + end diff --git a/spec/05-migration/db/migrations/core/022_350_to_360_spec.lua b/spec/05-migration/db/migrations/core/022_350_to_360_spec.lua new file mode 100644 index 000000000000..572d139140fb --- /dev/null +++ b/spec/05-migration/db/migrations/core/022_350_to_360_spec.lua @@ -0,0 +1,7 @@ +local uh = require "spec/upgrade_helpers" + +describe("database migration", function() + uh.old_after_up("has created the expected new columns", function() + assert.table_has_column("clustering_data_planes", "cert_details", "jsonb") + end) +end) From a355d01cfdab7ab98f74a0230d57184ffeb86d92 Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Fri, 17 Nov 2023 03:43:13 +0000 Subject: [PATCH 135/249] fix(plugin): RL instances sync to the same DB at same rate (#12003) All rate-limiting plugin instance syncs with the same plugin config, that is the very first config got hit by a request, and they all sync with the same rate. Even a config update won't change the DB to be synced. The timer will sync not just the same instance's counters but all counters in the same DB. This is a compromise given the emergency and we prefer simplicity over correctness for this behavior. Full changelog - The counter table is split with DB; - Timers are created when a request hits; - The sync_rate is guaranteed with limited running timers and timer delay - Cover the case in the integration test by "with_sync_rate" Fix KAG-2904 Co-authored-by: samugi --- .../unreleased/kong/rl-shared-sync-timer.yml | 3 + kong/plugins/rate-limiting/policies/init.lua | 162 +++++-- .../23-rate-limiting/05-integration_spec.lua | 404 +++++++++--------- 3 files changed, 323 insertions(+), 246 deletions(-) create mode 100644 changelog/unreleased/kong/rl-shared-sync-timer.yml diff --git a/changelog/unreleased/kong/rl-shared-sync-timer.yml b/changelog/unreleased/kong/rl-shared-sync-timer.yml new file mode 100644 index 000000000000..e07b78236dab --- /dev/null +++ b/changelog/unreleased/kong/rl-shared-sync-timer.yml @@ -0,0 +1,3 @@ +message: "**Rate Limiting**: fix an issuer where all counters are synced to the same DB at the same rate." +type: bugfix +scope: Plugin diff --git a/kong/plugins/rate-limiting/policies/init.lua b/kong/plugins/rate-limiting/policies/init.lua index f20a2ea5b4d4..f372d6310a7d 100644 --- a/kong/plugins/rate-limiting/policies/init.lua +++ b/kong/plugins/rate-limiting/policies/init.lua @@ -15,27 +15,32 @@ local SYNC_RATE_REALTIME = -1 local EMPTY_UUID = "00000000-0000-0000-0000-000000000000" --- for `conf.sync_rate > 0` -local auto_sync_timer +local EMPTY = {} local cur_usage = { --[[ - [cache_key] = + [db_key][cache_key] = --]] } local cur_usage_expire_at = { --[[ - [cache_key] = + [db_key][cache_key] = --]] } local cur_delta = { --[[ - [cache_key] = + [db_key][cache_key] = --]] } +local function init_tables(db_key) + cur_usage[db_key] = cur_usage[db_key] or {} + cur_usage_expire_at[db_key] = cur_usage_expire_at[db_key] or {} + cur_delta[db_key] = cur_delta[db_key] or {} +end + local function is_present(str) return str and str ~= "" and str ~= null @@ -73,6 +78,13 @@ local sock_opts = {} local EXPIRATION = require "kong.plugins.rate-limiting.expiration" +local function get_db_key(conf) + return fmt("%s:%d;%d", + conf.redis_host, + conf.redis_port, + conf.redis_database) +end + local function get_redis_connection(conf) local red = redis:new() @@ -82,26 +94,25 @@ local function get_redis_connection(conf) sock_opts.ssl_verify = conf.redis_ssl_verify sock_opts.server_name = conf.redis_server_name + local db_key = get_db_key(conf) + -- use a special pool name only if redis_database is set to non-zero -- otherwise use the default pool name host:port if conf.redis_database ~= 0 then - sock_opts.pool = fmt( "%s:%d;%d", - conf.redis_host, - conf.redis_port, - conf.redis_database) + sock_opts.pool = db_key end local ok, err = red:connect(conf.redis_host, conf.redis_port, sock_opts) if not ok then kong.log.err("failed to connect to Redis: ", err) - return nil, err + return nil, db_key, err end local times, err = red:get_reused_times() if err then kong.log.err("failed to get connect reused times: ", err) - return nil, err + return nil, db_key, err end if times == 0 then @@ -118,7 +129,7 @@ local function get_redis_connection(conf) end if not ok then kong.log.err("failed to auth Redis: ", err) - return nil, err + return nil, db_key, err end end @@ -129,18 +140,21 @@ local function get_redis_connection(conf) local ok, err = red:select(conf.redis_database) if not ok then kong.log.err("failed to change Redis database: ", err) - return nil, err + return nil, db_key, err end end end - return red + return red, db_key, err end -local function clear_local_counter() - table_clear(cur_usage) - table_clear(cur_usage_expire_at) - table_clear(cur_delta) +local function clear_local_counter(db_key) + -- for config updates a db may no longer be used but this happens rarely + -- and unlikely there will be a lot of them. So we choose to not remove the table + -- but just clear it, as recreating the table will be more expensive + table_clear(cur_usage[db_key]) + table_clear(cur_usage_expire_at[db_key]) + table_clear(cur_delta[db_key]) end local function sync_to_redis(premature, conf) @@ -148,16 +162,16 @@ local function sync_to_redis(premature, conf) return end - local red, err = get_redis_connection(conf) + local red, db_key, err = get_redis_connection(conf) if not red then kong.log.err("[rate-limiting] failed to connect to Redis: ", err) - clear_local_counter() + clear_local_counter(db_key) return end red:init_pipeline() - for cache_key, delta in pairs(cur_delta) do + for cache_key, delta in pairs(cur_delta[db_key] or EMPTY) do red:eval([[ local key, value, expiration = KEYS[1], tonumber(ARGV[1]), ARGV[2] local exists = redis.call("exists", key) @@ -165,50 +179,104 @@ local function sync_to_redis(premature, conf) if not exists or exists == 0 then redis.call("expireat", key, expiration) end - ]], 1, cache_key, delta, cur_usage_expire_at[cache_key]) + ]], 1, cache_key, delta, cur_usage_expire_at[db_key][cache_key]) end local _, err = red:commit_pipeline() if err then kong.log.err("[rate-limiting] failed to commit increment pipeline in Redis: ", err) - clear_local_counter() + clear_local_counter(db_key) return end local ok, err = red:set_keepalive(10000, 100) if not ok then kong.log.err("[rate-limiting] failed to set Redis keepalive: ", err) - clear_local_counter() + clear_local_counter(db_key) return end -- just clear these tables and avoid creating three new tables - clear_local_counter() + clear_local_counter(db_key) end -local function periodical_sync(conf, sync_func) - if not auto_sync_timer then - local err - -- timer may be initialized after the module's loaded so we need to update the reference - auto_sync_timer, err = kong.timer:named_every("rate-limiting-auto-sync", conf.sync_rate, sync_func, conf) +local plugin_sync_pending = {} +local plugin_sync_running = {} + +-- It's called "rate_limited_sync" because the sync timer itself +-- is rate-limited by the sync_rate. +-- It should be easy to prove that: +-- 1. There will be at most 2 timers per worker for a plugin instance +-- at any given time, 1 syncing and 1 pending (guaranteed by the locks) +-- 2. 2 timers will at least start with a sync_rate interval apart +-- 3. A change is always picked up by a pending timer and +-- will be sync to Redis at most sync_rate interval +local function rate_limited_sync(conf, sync_func) + local cache_key = conf.__key__ or conf.__plugin_id or "rate-limiting" + -- a timer is pending. The change will be picked up by the pending timer + if plugin_sync_pending[cache_key] then + return true + end - if not auto_sync_timer then - kong.log.err("failed to create timer: ", err) - return nil, err + -- The change may or may not be picked up by a running timer + -- let's start a pending timer to make sure the change is picked up + plugin_sync_pending[cache_key] = true + return kong.timer:at(conf.sync_rate, function(premature) + if premature then + -- we do not clear the pending flag to prevent more timers to be started + -- as they will also exit prematurely + return end - end - return true + -- a "pending" state is never touched before the timer is started + assert(plugin_sync_pending[cache_key]) + + + local tries = 0 + -- a timer is already running. + -- the sleep time is picked to a seemingly reasonable value + while plugin_sync_running[cache_key] do + -- we should wait for at most 2 runs even if the connection times out + -- when this happens, we should not clear the "running" state as it would + -- cause a race condition; + -- we don't want to clear the "pending" state and exit the timer either as + -- it's equivalent to waiting for more runs + if tries > 4 then + kong.log.emerg("A Redis sync is blocked by a previous try. " .. + "The previous try should have timed out but it didn't for unknown reasons.") + end + + ngx.sleep(conf.redis_timeout / 2) + tries = tries + 1 + end + + plugin_sync_running[cache_key] = true + + plugin_sync_pending[cache_key] = nil + + -- given the condition, the counters will never be empty so no need to + -- check for empty tables and skip the sync + local ok, err = pcall(sync_func, premature, conf) + if not ok then + kong.log.err("[rate-limiting] error when syncing counters to Redis: ", err) + end + + plugin_sync_running[cache_key] = nil + end) end local function update_local_counters(conf, periods, limits, identifier, value) + local db_key = get_db_key(conf) + init_tables(db_key) + for period, period_date in pairs(periods) do if limits[period] then local cache_key = get_local_key(conf, identifier, period, period_date) - cur_delta[cache_key] = (cur_delta[cache_key] or 0) + value + cur_delta[db_key][cache_key] = (cur_delta[db_key][cache_key] or 0) + value end end + end return { @@ -286,23 +354,25 @@ return { else update_local_counters(conf, periods, limits, identifier, value) - return periodical_sync(conf, sync_to_redis) + return rate_limited_sync(conf, sync_to_redis) end end, usage = function(conf, identifier, period, current_timestamp) local periods = timestamp.get_timestamps(current_timestamp) local cache_key = get_local_key(conf, identifier, period, periods[period]) + local db_key = get_db_key(conf) + init_tables(db_key) -- use local cache to reduce the number of redis calls -- also by pass the logic of incrementing the counter - if conf.sync_rate ~= SYNC_RATE_REALTIME and cur_usage[cache_key] then - if cur_usage_expire_at[cache_key] > ngx_time() then - return cur_usage[cache_key] + (cur_delta[cache_key] or 0) + if conf.sync_rate ~= SYNC_RATE_REALTIME and cur_usage[db_key][cache_key] then + if cur_usage_expire_at[db_key][cache_key] > ngx_time() then + return cur_usage[db_key][cache_key] + (cur_delta[db_key][cache_key] or 0) end - cur_usage[cache_key] = 0 - cur_usage_expire_at[cache_key] = periods[period] + EXPIRATION[period] - cur_delta[cache_key] = 0 + cur_usage[db_key][cache_key] = 0 + cur_usage_expire_at[db_key][cache_key] = periods[period] + EXPIRATION[period] + cur_delta[db_key][cache_key] = 0 return 0 end @@ -339,11 +409,11 @@ return { end if conf.sync_rate ~= SYNC_RATE_REALTIME then - cur_usage[cache_key] = current_metric or 0 - cur_usage_expire_at[cache_key] = periods[period] + EXPIRATION[period] + cur_usage[db_key][cache_key] = current_metric or 0 + cur_usage_expire_at[db_key][cache_key] = periods[period] + EXPIRATION[period] -- The key was just read from Redis using `incr`, which incremented it -- by 1. Adjust the value to account for the prior increment. - cur_delta[cache_key] = -1 + cur_delta[db_key][cache_key] = -1 end return current_metric or 0 diff --git a/spec/03-plugins/23-rate-limiting/05-integration_spec.lua b/spec/03-plugins/23-rate-limiting/05-integration_spec.lua index d919c50f0eaf..8b00ea67e780 100644 --- a/spec/03-plugins/23-rate-limiting/05-integration_spec.lua +++ b/spec/03-plugins/23-rate-limiting/05-integration_spec.lua @@ -88,104 +88,63 @@ describe("Plugin: rate-limiting (integration)", function() }, } + -- it's set smaller than SLEEP_TIME in purpose + local SYNC_RATE = 0.1 for strategy, config in pairs(strategies) do - describe("config.policy = redis #" .. strategy, function() - -- Regression test for the following issue: - -- https://github.com/Kong/kong/issues/3292 - - lazy_setup(function() - flush_redis(red, REDIS_DB_1) - flush_redis(red, REDIS_DB_2) - flush_redis(red, REDIS_DB_3) - if red_version >= version("6.0.0") then - add_redis_user(red) - end - - bp = helpers.get_db_utils(nil, { - "routes", - "services", - "plugins", - }, { - "rate-limiting" - }) - - local route1 = assert(bp.routes:insert { - hosts = { "redistest1.com" }, - }) - assert(bp.plugins:insert { - name = "rate-limiting", - route = { id = route1.id }, - config = { - minute = 1, - policy = "redis", - redis_host = REDIS_HOST, - redis_port = config.redis_port, - redis_database = REDIS_DB_1, - redis_ssl = config.redis_ssl, - redis_ssl_verify = config.redis_ssl_verify, - redis_server_name = config.redis_server_name, - fault_tolerant = false, - redis_timeout = 10000, - }, - }) - - local route2 = assert(bp.routes:insert { - hosts = { "redistest2.com" }, - }) - assert(bp.plugins:insert { - name = "rate-limiting", - route = { id = route2.id }, - config = { - minute = 1, - policy = "redis", - redis_host = REDIS_HOST, - redis_port = config.redis_port, - redis_database = REDIS_DB_2, - redis_ssl = config.redis_ssl, - redis_ssl_verify = config.redis_ssl_verify, - redis_server_name = config.redis_server_name, - fault_tolerant = false, - redis_timeout = 10000, - }, - }) - - if red_version >= version("6.0.0") then - local route3 = assert(bp.routes:insert { - hosts = { "redistest3.com" }, + for with_sync_rate in pairs{false, true} do + describe("config.policy = redis #" .. strategy, function() + -- Regression test for the following issue: + -- https://github.com/Kong/kong/issues/3292 + + lazy_setup(function() + flush_redis(red, REDIS_DB_1) + flush_redis(red, REDIS_DB_2) + flush_redis(red, REDIS_DB_3) + if red_version >= version("6.0.0") then + add_redis_user(red) + end + + bp = helpers.get_db_utils(nil, { + "routes", + "services", + "plugins", + }, { + "rate-limiting" + }) + + local route1 = assert(bp.routes:insert { + hosts = { "redistest1.com" }, }) assert(bp.plugins:insert { name = "rate-limiting", - route = { id = route3.id }, + route = { id = route1.id }, config = { - minute = 2, -- Handle multiple tests + minute = 1, policy = "redis", redis_host = REDIS_HOST, redis_port = config.redis_port, - redis_username = REDIS_USER_VALID, - redis_password = REDIS_PASSWORD, - redis_database = REDIS_DB_3, -- ensure to not get a pooled authenticated connection by using a different db + redis_database = REDIS_DB_1, redis_ssl = config.redis_ssl, redis_ssl_verify = config.redis_ssl_verify, redis_server_name = config.redis_server_name, fault_tolerant = false, redis_timeout = 10000, + sync_rate = with_sync_rate and SYNC_RATE or nil, }, }) - local route4 = assert(bp.routes:insert { - hosts = { "redistest4.com" }, + local route2 = assert(bp.routes:insert { + hosts = { "redistest2.com" }, }) assert(bp.plugins:insert { name = "rate-limiting", - route = { id = route4.id }, + route = { id = route2.id }, config = { minute = 1, policy = "redis", redis_host = REDIS_HOST, redis_port = config.redis_port, - redis_username = REDIS_USER_INVALID, - redis_password = REDIS_PASSWORD, - redis_database = REDIS_DB_4, -- ensure to not get a pooled authenticated connection by using a different db + redis_database = REDIS_DB_2, redis_ssl = config.redis_ssl, redis_ssl_verify = config.redis_ssl_verify, redis_server_name = config.redis_server_name, @@ -193,104 +152,88 @@ describe("Plugin: rate-limiting (integration)", function() redis_timeout = 10000, }, }) - end + if red_version >= version("6.0.0") then + local route3 = assert(bp.routes:insert { + hosts = { "redistest3.com" }, + }) + assert(bp.plugins:insert { + name = "rate-limiting", + route = { id = route3.id }, + config = { + minute = 2, -- Handle multiple tests + policy = "redis", + redis_host = REDIS_HOST, + redis_port = config.redis_port, + redis_username = REDIS_USER_VALID, + redis_password = REDIS_PASSWORD, + redis_database = REDIS_DB_3, -- ensure to not get a pooled authenticated connection by using a different db + redis_ssl = config.redis_ssl, + redis_ssl_verify = config.redis_ssl_verify, + redis_server_name = config.redis_server_name, + fault_tolerant = false, + redis_timeout = 10000, + }, + }) + + local route4 = assert(bp.routes:insert { + hosts = { "redistest4.com" }, + }) + assert(bp.plugins:insert { + name = "rate-limiting", + route = { id = route4.id }, + config = { + minute = 1, + policy = "redis", + redis_host = REDIS_HOST, + redis_port = config.redis_port, + redis_username = REDIS_USER_INVALID, + redis_password = REDIS_PASSWORD, + redis_database = REDIS_DB_4, -- ensure to not get a pooled authenticated connection by using a different db + redis_ssl = config.redis_ssl, + redis_ssl_verify = config.redis_ssl_verify, + redis_server_name = config.redis_server_name, + fault_tolerant = false, + redis_timeout = 10000, + }, + }) + end + + + assert(helpers.start_kong({ + nginx_conf = "spec/fixtures/custom_nginx.template", + lua_ssl_trusted_certificate = config.lua_ssl_trusted_certificate, + })) + client = helpers.proxy_client() + end) + + lazy_teardown(function() + helpers.stop_kong() + if red_version >= version("6.0.0") then + remove_redis_user(red) + end + end) + + it("connection pool respects database setting", function() + assert(red:select(REDIS_DB_1)) + local size_1 = assert(red:dbsize()) - assert(helpers.start_kong({ - nginx_conf = "spec/fixtures/custom_nginx.template", - lua_ssl_trusted_certificate = config.lua_ssl_trusted_certificate, - })) - client = helpers.proxy_client() - end) + assert(red:select(REDIS_DB_2)) + local size_2 = assert(red:dbsize()) - lazy_teardown(function() - helpers.stop_kong() - if red_version >= version("6.0.0") then - remove_redis_user(red) - end - end) + assert.equal(0, tonumber(size_1)) + assert.equal(0, tonumber(size_2)) + if red_version >= version("6.0.0") then + assert(red:select(REDIS_DB_3)) + local size_3 = assert(red:dbsize()) + assert.equal(0, tonumber(size_3)) + end - it("connection pool respects database setting", function() - assert(red:select(REDIS_DB_1)) - local size_1 = assert(red:dbsize()) - - assert(red:select(REDIS_DB_2)) - local size_2 = assert(red:dbsize()) - - assert.equal(0, tonumber(size_1)) - assert.equal(0, tonumber(size_2)) - if red_version >= version("6.0.0") then - assert(red:select(REDIS_DB_3)) - local size_3 = assert(red:dbsize()) - assert.equal(0, tonumber(size_3)) - end - - local res = assert(client:send { - method = "GET", - path = "/status/200", - headers = { - ["Host"] = "redistest1.com" - } - }) - assert.res_status(200, res) - - -- Wait for async timer to increment the limit - - ngx.sleep(SLEEP_TIME) - - assert(red:select(REDIS_DB_1)) - size_1 = assert(red:dbsize()) - - assert(red:select(REDIS_DB_2)) - size_2 = assert(red:dbsize()) - - -- TEST: DB 1 should now have one hit, DB 2 and 3 none - - assert.equal(1, tonumber(size_1)) - assert.equal(0, tonumber(size_2)) - if red_version >= version("6.0.0") then - assert(red:select(REDIS_DB_3)) - local size_3 = assert(red:dbsize()) - assert.equal(0, tonumber(size_3)) - end - - -- rate-limiting plugin will reuses the redis connection - local res = assert(client:send { - method = "GET", - path = "/status/200", - headers = { - ["Host"] = "redistest2.com" - } - }) - assert.res_status(200, res) - - -- Wait for async timer to increment the limit - - ngx.sleep(SLEEP_TIME) - - assert(red:select(REDIS_DB_1)) - size_1 = assert(red:dbsize()) - - assert(red:select(REDIS_DB_2)) - size_2 = assert(red:dbsize()) - - -- TEST: DB 1 and 2 should now have one hit, DB 3 none - - assert.equal(1, tonumber(size_1)) - assert.equal(1, tonumber(size_2)) - if red_version >= version("6.0.0") then - assert(red:select(REDIS_DB_3)) - local size_3 = assert(red:dbsize()) - assert.equal(0, tonumber(size_3)) - end - - if red_version >= version("6.0.0") then - -- rate-limiting plugin will reuses the redis connection local res = assert(client:send { method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest3.com" + ["Host"] = "redistest1.com" } }) assert.res_status(200, res) @@ -305,52 +248,113 @@ describe("Plugin: rate-limiting (integration)", function() assert(red:select(REDIS_DB_2)) size_2 = assert(red:dbsize()) - assert(red:select(REDIS_DB_3)) - local size_3 = assert(red:dbsize()) + -- TEST: DB 1 should now have one hit, DB 2 and 3 none - -- TEST: All DBs should now have one hit, because the - -- plugin correctly chose to select the database it is - -- configured to hit + assert.equal(1, tonumber(size_1)) + assert.equal(0, tonumber(size_2)) + if red_version >= version("6.0.0") then + assert(red:select(REDIS_DB_3)) + local size_3 = assert(red:dbsize()) + assert.equal(0, tonumber(size_3)) + end - assert.is_true(tonumber(size_1) > 0) - assert.is_true(tonumber(size_2) > 0) - assert.is_true(tonumber(size_3) > 0) - end - end) - - it("authenticates and executes with a valid redis user having proper ACLs", function() - if red_version >= version("6.0.0") then + -- rate-limiting plugin will reuses the redis connection local res = assert(client:send { method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest3.com" + ["Host"] = "redistest2.com" } }) assert.res_status(200, res) - else - ngx.log(ngx.WARN, "Redis v" .. tostring(red_version) .. " does not support ACL functions " .. - "'authenticates and executes with a valid redis user having proper ACLs' will be skipped") - end - end) - it("fails to rate-limit for a redis user with missing ACLs", function() - if red_version >= version("6.0.0") then - local res = assert(client:send { - method = "GET", - path = "/status/200", - headers = { - ["Host"] = "redistest4.com" - } - }) - assert.res_status(500, res) - else - ngx.log(ngx.WARN, "Redis v" .. tostring(red_version) .. " does not support ACL functions " .. - "'fails to rate-limit for a redis user with missing ACLs' will be skipped") - end - end) + -- Wait for async timer to increment the limit - end) - end + ngx.sleep(SLEEP_TIME) + + assert(red:select(REDIS_DB_1)) + size_1 = assert(red:dbsize()) + + assert(red:select(REDIS_DB_2)) + size_2 = assert(red:dbsize()) + -- TEST: DB 1 and 2 should now have one hit, DB 3 none + + assert.equal(1, tonumber(size_1)) + assert.equal(1, tonumber(size_2)) + if red_version >= version("6.0.0") then + assert(red:select(REDIS_DB_3)) + local size_3 = assert(red:dbsize()) + assert.equal(0, tonumber(size_3)) + end + + if red_version >= version("6.0.0") then + -- rate-limiting plugin will reuses the redis connection + local res = assert(client:send { + method = "GET", + path = "/status/200", + headers = { + ["Host"] = "redistest3.com" + } + }) + assert.res_status(200, res) + + -- Wait for async timer to increment the limit + + ngx.sleep(SLEEP_TIME) + + assert(red:select(REDIS_DB_1)) + size_1 = assert(red:dbsize()) + + assert(red:select(REDIS_DB_2)) + size_2 = assert(red:dbsize()) + + assert(red:select(REDIS_DB_3)) + local size_3 = assert(red:dbsize()) + + -- TEST: All DBs should now have one hit, because the + -- plugin correctly chose to select the database it is + -- configured to hit + + assert.is_true(tonumber(size_1) > 0) + assert.is_true(tonumber(size_2) > 0) + assert.is_true(tonumber(size_3) > 0) + end + end) + + it("authenticates and executes with a valid redis user having proper ACLs", function() + if red_version >= version("6.0.0") then + local res = assert(client:send { + method = "GET", + path = "/status/200", + headers = { + ["Host"] = "redistest3.com" + } + }) + assert.res_status(200, res) + else + ngx.log(ngx.WARN, "Redis v" .. tostring(red_version) .. " does not support ACL functions " .. + "'authenticates and executes with a valid redis user having proper ACLs' will be skipped") + end + end) + + it("fails to rate-limit for a redis user with missing ACLs", function() + if red_version >= version("6.0.0") then + local res = assert(client:send { + method = "GET", + path = "/status/200", + headers = { + ["Host"] = "redistest4.com" + } + }) + assert.res_status(500, res) + else + ngx.log(ngx.WARN, "Redis v" .. tostring(red_version) .. " does not support ACL functions " .. + "'fails to rate-limit for a redis user with missing ACLs' will be skipped") + end + end) + + end) + end + end end) From cfc478bb5a2d054d1125fbe29263860b97f32f7f Mon Sep 17 00:00:00 2001 From: Chrono Date: Fri, 17 Nov 2023 18:00:59 +0800 Subject: [PATCH 136/249] chore(deps): bump lua-resty-lmdb to 1.4.0 (#12043) --- .requirements | 2 +- changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml diff --git a/.requirements b/.requirements index 42b0dbef5154..0c18973a4b66 100644 --- a/.requirements +++ b/.requirements @@ -7,7 +7,7 @@ PCRE=8.45 LIBEXPAT=2.5.0 LUA_KONG_NGINX_MODULE=4fbc3ddc7dcbc706ed286b95344f3cb6da17e637 # 0.8.0 -LUA_RESTY_LMDB=951926f20b674a0622236a0e331b359df1c02d9b # 1.3.0 +LUA_RESTY_LMDB=d236fc5ba339897e8f2c6ada1c1b4ab9311feee8 # 1.4.0 LUA_RESTY_EVENTS=8448a92cec36ac04ea522e78f6496ba03c9b1fd8 # 0.2.0 LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 ATC_ROUTER=7a2ad42d4246598ba1f753b6ae79cb1456040afa # 1.3.1 diff --git a/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml b/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml new file mode 100644 index 000000000000..ea9b62f3d999 --- /dev/null +++ b/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml @@ -0,0 +1,3 @@ +message: Bumped lua-resty-lmdb from 1.3.0 to 1.4.0 +type: dependency +scope: Core From f36bd0a12c5d384d06ac77346e8a85f8540c979b Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Fri, 17 Nov 2023 19:29:00 +0800 Subject: [PATCH 137/249] refactor(tools): move function validate_labels from tools.utils to conf_loader (#12051) KAG-3094 --- kong/conf_loader/init.lua | 61 +++++++++++++++++++++++++++++++++++++-- kong/tools/utils.lua | 58 ------------------------------------- 2 files changed, 59 insertions(+), 60 deletions(-) diff --git a/kong/conf_loader/init.lua b/kong/conf_loader/init.lua index 29ac8d52a2f4..92a9f05e9464 100644 --- a/kong/conf_loader/init.lua +++ b/kong/conf_loader/init.lua @@ -22,6 +22,7 @@ local env = require "kong.cmd.utils.env" local ffi = require "ffi" +local re_match = ngx.re.match local fmt = string.format local sub = string.sub local type = type @@ -727,7 +728,7 @@ end local function check_dynamic_module(mod_name) local configure_line = ngx.config.nginx_configure() local mod_re = [[^.*--add-dynamic-module=(.+\/]] .. mod_name .. [[(\s|$)).*$]] - return ngx.re.match(configure_line, mod_re, "oi") ~= nil + return re_match(configure_line, mod_re, "oi") ~= nil end @@ -771,6 +772,62 @@ local function validate_wasm(conf) return true end +local validate_labels +do + local MAX_KEY_SIZE = 63 + local MAX_VALUE_SIZE = 63 + local MAX_KEYS_COUNT = 10 + + + -- validation rules based on Kong Labels AIP + -- https://kong-aip.netlify.app/aip/129/ + local BASE_PTRN = "[a-z0-9]([\\w\\.:-]*[a-z0-9]|)$" + local KEY_PTRN = "(?!kong)(?!konnect)(?!insomnia)(?!mesh)(?!kic)" .. BASE_PTRN + local VAL_PTRN = BASE_PTRN + + + local function validate_entry(str, max_size, pattern) + if str == "" or #str > max_size then + return nil, fmt( + "%s must have between 1 and %d characters", str, max_size) + end + if not re_match(str, pattern, "ajoi") then + return nil, fmt("%s is invalid. Must match pattern: %s", str, pattern) + end + return true + end + + + -- Validates a label array. + -- Validates labels based on the kong Labels AIP + function validate_labels(raw_labels) + local nkeys = require "table.nkeys" + if nkeys(raw_labels) > MAX_KEYS_COUNT then + return nil, fmt( + "labels validation failed: count exceeded %d max elements", + MAX_KEYS_COUNT + ) + end + + for _, kv in ipairs(raw_labels) do + local del = kv:find(":", 1, true) + local k = del and kv:sub(1, del - 1) or "" + local v = del and kv:sub(del + 1) or "" + + local ok, err = validate_entry(k, MAX_KEY_SIZE, KEY_PTRN) + if not ok then + return nil, "label key validation failed: " .. err + end + ok, err = validate_entry(v, MAX_VALUE_SIZE, VAL_PTRN) + if not ok then + return nil, "label value validation failed: " .. err + end + end + + return true + end +end + -- Validate properties (type/enum/custom) and infer their type. -- @param[type=table] conf The configuration table to treat. @@ -1291,7 +1348,7 @@ local function check_and_parse(conf, opts) end if conf.cluster_dp_labels then - local _, err = utils.validate_labels(conf.cluster_dp_labels) + local _, err = validate_labels(conf.cluster_dp_labels) if err then errors[#errors + 1] = err end diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 0d67b241a420..0b38d0dab5b7 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -11,69 +11,11 @@ local pairs = pairs local ipairs = ipairs local require = require -local fmt = string.format -local re_match = ngx.re.match local _M = {} -local validate_labels -do - local nkeys = require "table.nkeys" - - local MAX_KEY_SIZE = 63 - local MAX_VALUE_SIZE = 63 - local MAX_KEYS_COUNT = 10 - - -- validation rules based on Kong Labels AIP - -- https://kong-aip.netlify.app/aip/129/ - local BASE_PTRN = "[a-z0-9]([\\w\\.:-]*[a-z0-9]|)$" - local KEY_PTRN = "(?!kong)(?!konnect)(?!insomnia)(?!mesh)(?!kic)" .. BASE_PTRN - local VAL_PTRN = BASE_PTRN - - local function validate_entry(str, max_size, pattern) - if str == "" or #str > max_size then - return nil, fmt( - "%s must have between 1 and %d characters", str, max_size) - end - if not re_match(str, pattern, "ajoi") then - return nil, fmt("%s is invalid. Must match pattern: %s", str, pattern) - end - return true - end - - -- Validates a label array. - -- Validates labels based on the kong Labels AIP - function validate_labels(raw_labels) - if nkeys(raw_labels) > MAX_KEYS_COUNT then - return nil, fmt( - "labels validation failed: count exceeded %d max elements", - MAX_KEYS_COUNT - ) - end - - for _, kv in ipairs(raw_labels) do - local del = kv:find(":", 1, true) - local k = del and kv:sub(1, del - 1) or "" - local v = del and kv:sub(del + 1) or "" - - local ok, err = validate_entry(k, MAX_KEY_SIZE, KEY_PTRN) - if not ok then - return nil, "label key validation failed: " .. err - end - ok, err = validate_entry(v, MAX_VALUE_SIZE, VAL_PTRN) - if not ok then - return nil, "label value validation failed: " .. err - end - end - - return true - end -end -_M.validate_labels = validate_labels - - do local modules = { "kong.tools.table", From c75c7e0f03d7f2cdbc10d2f5d4862797b7d18fbe Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Mon, 20 Nov 2023 16:13:12 +0800 Subject: [PATCH 138/249] chore(cd): remove trigger of tags to avoid it overwriting release (#12042) Fix #11776 If tag is created after the release workflow_dispatch is finished, it may overwrite existing ubuntu docker image. --- .github/workflows/release.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 64d03425bc52..39507c76f691 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -15,8 +15,6 @@ on: # yamllint disable-line rule:truthy schedule: - cron: '0 0 * * *' push: - tags: - - '**' branches: - master workflow_dispatch: From 67970ea2b03a8b1538c76b1ede0ace05bff294bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Mon, 20 Nov 2023 09:44:36 +0100 Subject: [PATCH 139/249] feat(ci): only re-run failed tests (#11925) * fix(tests): only run failed tests when rerunning * fix(ci): when all tests pass, create empty 'failed' file * fix(ci): scope 'failed tests file' artifact to current workflow run * fix(tests): remove test batch balancing --- .ci/run_tests.sh | 54 +++++++++++++++++----------- .github/workflows/build_and_test.yml | 21 ++++++++++- spec/busted-log-failed.lua | 33 +++++++++++++++++ 3 files changed, 87 insertions(+), 21 deletions(-) create mode 100644 spec/busted-log-failed.lua diff --git a/.ci/run_tests.sh b/.ci/run_tests.sh index bf10d6243975..447936f73ff6 100755 --- a/.ci/run_tests.sh +++ b/.ci/run_tests.sh @@ -4,11 +4,25 @@ set -e function cyan() { echo -e "\033[1;36m$*\033[0m" } + function red() { echo -e "\033[1;31m$*\033[0m" } -export BUSTED_ARGS="--no-k -o htest -v --exclude-tags=flaky,ipv6" +function get_failed { + if [ ! -z "$FAILED_TEST_FILES_FILE" -a -f "$FAILED_TEST_FILES_FILE" ] + then + cat < $FAILED_TEST_FILES_FILE + else + echo "$@" + fi +} + +BUSTED_ARGS="--keep-going -o htest -v --exclude-tags=flaky,ipv6" +if [ ! -z "$FAILED_TEST_FILES_FILE" ] +then + BUSTED_ARGS="--helper=spec/busted-log-failed.lua $BUSTED_ARGS" +fi if [ "$KONG_TEST_DATABASE" == "postgres" ]; then export TEST_CMD="bin/busted $BUSTED_ARGS,off" @@ -29,37 +43,37 @@ else export TEST_CMD="bin/busted $BUSTED_ARGS,postgres,db" fi -if [[ "$KONG_TEST_COVERAGE" = true ]]; then - export TEST_CMD="$TEST_CMD --keep-going" -fi - if [ "$TEST_SUITE" == "integration" ]; then if [[ "$TEST_SPLIT" == first* ]]; then # GitHub Actions, run first batch of integration tests - eval "$TEST_CMD" $(ls -d spec/02-integration/* | sort | grep -v 05-proxy) + files=$(ls -d spec/02-integration/* | sort | grep -v 05-proxy) + files=$(get_failed $files) + eval "$TEST_CMD" $files elif [[ "$TEST_SPLIT" == second* ]]; then # GitHub Actions, run second batch of integration tests # Note that the split here is chosen carefully to result # in a similar run time between the two batches, and should # be adjusted if imbalance become significant in the future - eval "$TEST_CMD" $(ls -d spec/02-integration/* | sort | grep 05-proxy) + files=$(ls -d spec/02-integration/* | sort | grep 05-proxy) + files=$(get_failed $files) + eval "$TEST_CMD" $files else # Non GitHub Actions - eval "$TEST_CMD" spec/02-integration/ + eval "$TEST_CMD" $(get_failed spec/02-integration/) fi fi if [ "$TEST_SUITE" == "dbless" ]; then - eval "$TEST_CMD" spec/02-integration/02-cmd \ - spec/02-integration/05-proxy \ - spec/02-integration/04-admin_api/02-kong_routes_spec.lua \ - spec/02-integration/04-admin_api/15-off_spec.lua \ - spec/02-integration/08-status_api/01-core_routes_spec.lua \ - spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua \ - spec/02-integration/11-dbless \ - spec/02-integration/20-wasm + eval "$TEST_CMD" $(get_failed spec/02-integration/02-cmd \ + spec/02-integration/05-proxy \ + spec/02-integration/04-admin_api/02-kong_routes_spec.lua \ + spec/02-integration/04-admin_api/15-off_spec.lua \ + spec/02-integration/08-status_api/01-core_routes_spec.lua \ + spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua \ + spec/02-integration/11-dbless \ + spec/02-integration/20-wasm) fi if [ "$TEST_SUITE" == "plugins" ]; then set +ex @@ -67,18 +81,18 @@ if [ "$TEST_SUITE" == "plugins" ]; then if [[ "$TEST_SPLIT" == first* ]]; then # GitHub Actions, run first batch of plugin tests - PLUGINS=$(ls -d spec/03-plugins/* | head -n22) + PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | head -n22)) elif [[ "$TEST_SPLIT" == second* ]]; then # GitHub Actions, run second batch of plugin tests # Note that the split here is chosen carefully to result # in a similar run time between the two batches, and should # be adjusted if imbalance become significant in the future - PLUGINS=$(ls -d spec/03-plugins/* | tail -n+23) + PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | tail -n+23)) else # Non GitHub Actions - PLUGINS=$(ls -d spec/03-plugins/*) + PLUGINS=$(get_failed $(ls -d spec/03-plugins/*)) fi for p in $PLUGINS; do @@ -91,7 +105,7 @@ if [ "$TEST_SUITE" == "plugins" ]; then $TEST_CMD $p || echo "* $p" >> .failed done - if [[ "$TEST_SPLIT" == second* ]] || [[ "$TEST_SPLIT" != first* ]]; then + if [[ "$TEST_SPLIT" != first* ]]; then cat kong-*.rockspec | grep kong- | grep -v zipkin | grep -v sidecar | grep "~" | grep -v kong-prometheus-plugin | while read line ; do REPOSITORY=`echo $line | sed "s/\"/ /g" | awk -F" " '{print $1}'` VERSION=`luarocks show $REPOSITORY | grep $REPOSITORY | head -1 | awk -F" " '{print $2}' | cut -f1 -d"-"` diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index a3e98af0eea8..8b3c77ccf375 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -127,7 +127,7 @@ jobs: fail-fast: false matrix: suite: [integration, plugins] - split: [first (01-04), second (>= 05)] + split: [first, second] services: postgres: @@ -231,6 +231,17 @@ jobs: # arm64 runners may use more connections due to more worker cores psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;' + - name: Generate test rerun filename + run: | + echo FAILED_TEST_FILES_FILE=$(echo '${{ github.run_id }}-${{ matrix.suite }}-${{ matrix.split }}' | tr A-Z a-z | sed -Ee 's/[^a-z0-9]+/-/g').txt >> $GITHUB_ENV + + + - name: Download test rerun information + uses: actions/download-artifact@v3 + continue-on-error: true + with: + name: ${{ env.FAILED_TEST_FILES_FILE }} + - name: Tests env: KONG_TEST_PG_DATABASE: kong @@ -246,6 +257,14 @@ jobs: source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh .ci/run_tests.sh + - name: Upload test rerun information + if: always() + uses: actions/upload-artifact@v3 + with: + name: ${{ env.FAILED_TEST_FILES_FILE }} + path: ${{ env.FAILED_TEST_FILES_FILE }} + retention-days: 2 + - name: Archive coverage stats file uses: actions/upload-artifact@v3 if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} diff --git a/spec/busted-log-failed.lua b/spec/busted-log-failed.lua new file mode 100644 index 000000000000..7bfe6804b83f --- /dev/null +++ b/spec/busted-log-failed.lua @@ -0,0 +1,33 @@ +-- busted-log-failed.lua + +-- Log which test files run by busted had failures or errors in a +-- file. The file to use for logging is specified in the +-- FAILED_TEST_FILES_FILE environment variable. This is used to +-- reduce test rerun times for flaky tests. + +local busted = require 'busted' +local failed_files_file = assert(os.getenv("FAILED_TEST_FILES_FILE"), + "FAILED_TEST_FILES_FILE environment variable not set") + +local FAILED_FILES = {} + +busted.subscribe({ 'failure' }, function(element, parent, message, debug) + FAILED_FILES[element.trace.source] = true +end) + +busted.subscribe({ 'error' }, function(element, parent, message, debug) + FAILED_FILES[element.trace.source] = true +end) + +busted.subscribe({ 'suite', 'end' }, function(suite, count, total) + local output = assert(io.open(failed_files_file, "w")) + if next(FAILED_FILES) then + for failed_file in pairs(FAILED_FILES) do + if failed_file:sub(1, 1) == '@' then + failed_file = failed_file:sub(2) + end + assert(output:write(failed_file .. "\n")) + end + end + output:close() +end) From aed8c0572b064e7f0e26879f8adff7b2c355cdac Mon Sep 17 00:00:00 2001 From: Xiaoyan Rao <270668624@qq.com> Date: Tue, 21 Nov 2023 11:25:26 +0800 Subject: [PATCH 140/249] fix(kconfig): remove kong version and edition from kconfig.js (#12045) --- kong/admin_gui/init.lua | 3 --- spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua | 4 ---- spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua | 2 -- 3 files changed, 9 deletions(-) diff --git a/kong/admin_gui/init.lua b/kong/admin_gui/init.lua index 02d3b038a3cc..4186f4f966b5 100644 --- a/kong/admin_gui/init.lua +++ b/kong/admin_gui/init.lua @@ -1,4 +1,3 @@ -local meta = require "kong.meta" local utils = require "kong.admin_gui.utils" local _M = {} @@ -15,8 +14,6 @@ function _M.generate_kconfig(kong_config) ADMIN_API_URL = utils.prepare_variable(kong_config.admin_gui_api_url), ADMIN_API_PORT = utils.prepare_variable(api_port), ADMIN_API_SSL_PORT = utils.prepare_variable(api_ssl_port), - KONG_VERSION = utils.prepare_variable(meta.version), - KONG_EDITION = meta._VERSION:match("enterprise") and "enterprise" or "community", ANONYMOUS_REPORTS = utils.prepare_variable(kong_config.anonymous_reports), } diff --git a/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua b/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua index 67c95bdbaa30..6a262eee2492 100644 --- a/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua +++ b/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua @@ -68,7 +68,6 @@ describe("admin_gui template", function() assert.matches("'ADMIN_API_URL': 'https://admin-reference.kong-cloud.com'", kconfig_content, nil, true) assert.matches("'ADMIN_API_PORT': '8001'", kconfig_content, nil, true) assert.matches("'ADMIN_API_SSL_PORT': '8444'", kconfig_content, nil, true) - assert.matches("'KONG_EDITION': 'community'", kconfig_content, nil, true) end) it("should regenerates the appropriate kconfig from another call", function() @@ -88,7 +87,6 @@ describe("admin_gui template", function() assert.matches("'ADMIN_API_URL': 'http://localhost:8001'", new_content, nil, true) assert.matches("'ADMIN_API_PORT': '8001'", new_content, nil, true) assert.matches("'ADMIN_API_SSL_PORT': '8444'", new_content, nil, true) - assert.matches("'KONG_EDITION': 'community'", new_content, nil, true) end) end) @@ -151,7 +149,6 @@ describe("admin_gui template", function() assert.matches("'ADMIN_API_PORT': '8001'", kconfig_content, nil, true) assert.matches("'ADMIN_API_SSL_PORT': '8444'", kconfig_content, nil, true) assert.matches("'ANONYMOUS_REPORTS': 'false'", kconfig_content, nil, true) - assert.matches("'KONG_EDITION': 'community'", kconfig_content, nil, true) end) it("should regenerates the appropriate kconfig from another call", function() @@ -170,7 +167,6 @@ describe("admin_gui template", function() assert.matches("'ADMIN_API_PORT': '8001'", new_content, nil, true) assert.matches("'ADMIN_API_SSL_PORT': '8444'", new_content, nil, true) assert.matches("'ANONYMOUS_REPORTS': 'true'", new_content, nil, true) - assert.matches("'KONG_EDITION': 'community'", new_content, nil, true) end) end) diff --git a/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua b/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua index f6a458cd6b42..90a1096ff9e5 100644 --- a/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua +++ b/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua @@ -49,7 +49,6 @@ describe("Admin GUI - admin_gui_path", function() path = "/kconfig.js", }) res = assert.res_status(200, res) - assert.matches("'KONG_VERSION': '", res) assert.matches("'ADMIN_GUI_PATH': '/'", res, nil, true) end) @@ -116,7 +115,6 @@ describe("Admin GUI - admin_gui_path", function() path = "/manager/kconfig.js", }) res = assert.res_status(200, res) - assert.matches("'KONG_VERSION': '", res) assert.matches("'ADMIN_GUI_PATH': '/manager'", res, nil, true) end) end) From 25e0ee731c7b6cb3e5b1ab9b46d2d6f3cc7160a0 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Tue, 21 Nov 2023 12:42:28 +0200 Subject: [PATCH 141/249] chore(deps): bump kong-lapis from 1.14.0.3 to 1.16.0.1 (#12064) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Summary #### v1.16.0 – November 2 2023 ##### Additions - lapis.validate.types — Add types.params_map validation type, the params compatible variant of types.map_of ##### Changes - model:update will now only assign the update object to the model instnance if the update completes successfully - model:update support the returns option to control the RETURNING clause of the generated query - model:update when timestamps are enabled, the generated updated_at value is assigned to the model instance ##### Fixes - lapis.validate.types — Fix bug where types.params_shape would not return the state object - model:update will avoid storing db.raw values on passed as update object to the model instance if the update does not complmete successfully #### v1.15.0 – October 6 2023 ##### Additions - Model:include_in can now use computed keys to dynamically calculate a foreign key value by applying a function to each passed in object to load. This can be done by specifying a function instead of a field name when defining the column mapping table - Relations can use compured keys where appropriate by passing a function instead of a field name when defining the column mapping table - lapis.validate.types — add types.params_array for validating an array of objects with a common shape - lapis.validate.types — add types.flatten_errors for error output compatibility with tableshape - lapis.validate.types — types.params_shape can now accept numerical names for fields for validating array like objects with a fixed number of entries - lapis generate — Rockspec generator can now specify --moonscript and --cqueues to automatically append dependencies - lapis migrate — Add the --dry-run flag to to run all pending migrations in a transaction that is never commited. (Note: in some databases, there are queries that can not be rolled back) ##### Misc - Various updates to documentation - Fix error message for types.truncated_text Signed-off-by: Aapo Talvensaari --- changelog/unreleased/kong/bump-lapis-1.16.0.1.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/bump-lapis-1.16.0.1.yml diff --git a/changelog/unreleased/kong/bump-lapis-1.16.0.1.yml b/changelog/unreleased/kong/bump-lapis-1.16.0.1.yml new file mode 100644 index 000000000000..51e94fe26879 --- /dev/null +++ b/changelog/unreleased/kong/bump-lapis-1.16.0.1.yml @@ -0,0 +1,3 @@ +message: "Bumped kong-lapis from 1.14.0.3 to 1.16.0.1" +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 1453b8b11479..7e9aa4deac5f 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -21,7 +21,7 @@ dependencies = { "lua-ffi-zlib == 0.6", "multipart == 0.5.9", "version == 1.0.1", - "kong-lapis == 1.14.0.3", + "kong-lapis == 1.16.0.1", "kong-pgmoon == 1.16.2", "luatz == 0.4", "lua_system_constants == 0.1.4", From 0485a76276da23064c593326178c6b04fb6ee117 Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Fri, 10 Nov 2023 15:11:29 +0100 Subject: [PATCH 142/249] chore: improve cherry-picking process Signed-off-by: Joshua Schmid --- .github/workflows/cherry-picks.yml | 41 ++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .github/workflows/cherry-picks.yml diff --git a/.github/workflows/cherry-picks.yml b/.github/workflows/cherry-picks.yml new file mode 100644 index 000000000000..6383c1d5fd6a --- /dev/null +++ b/.github/workflows/cherry-picks.yml @@ -0,0 +1,41 @@ +name: Cherry Pick to remote repository +on: + pull_request_target: + types: [closed, labeled] + issue_comment: + types: [created] +jobs: + cross-repo-cherrypick: + name: Cherry pick to remote repository + runs-on: ubuntu-latest + # Only run when pull request is merged, or labeled + # or when a comment containing `/cherry-pick` is created + # and the author is a member, collaborator or owner + if: > + ( + github.event_name == 'pull_request_target' && + github.event.pull_request.merged + ) || ( + github.event_name == 'issue_comment' && + github.event.issue.pull_request && + contains(fromJSON('["MEMBER", "COLLABORATOR", "OWNER"]'), github.event.comment.author_association) && + contains(github.event.comment.body, '/cherry-pick') + ) + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.CHERRY_PICK_TOKEN }} + - name: Create backport pull requests + uses: jschmid1/cross-repo-cherrypick-action@2366f50fd85e8966aa024a4dd6fbf70e7019d7e1 + with: + token: ${{ secrets.CHERRY_PICK_TOKEN }} + pull_title: '[cherry-pick -> ${target_branch}] ${pull_title}' + merge_commits: 'skip' + trigger_label: 'cherry-pick kong-ee' # trigger based on this label + pull_description: |- + Automated cherry-pick to `${target_branch}`, triggered by a label in https://github.com/${owner}/${repo}/pull/${pull_number} :robot:. + upstream_repo: 'kong/kong-ee' + branch_map: |- + { + "master": "master" + } From 516210b2176dbfcd240059ec670ffefa6f687067 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 22 Nov 2023 11:24:06 +0200 Subject: [PATCH 143/249] chore(deps): bump lua-messagepack from 0.5.3 to 0.5.4 (#12076) ### Summary - improve speed (map) Signed-off-by: Aapo Talvensaari --- changelog/unreleased/kong/bump-lua-messagepack-0.5.4.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/bump-lua-messagepack-0.5.4.yml diff --git a/changelog/unreleased/kong/bump-lua-messagepack-0.5.4.yml b/changelog/unreleased/kong/bump-lua-messagepack-0.5.4.yml new file mode 100644 index 000000000000..312351789cfc --- /dev/null +++ b/changelog/unreleased/kong/bump-lua-messagepack-0.5.4.yml @@ -0,0 +1,3 @@ +message: "Bumped lua-messagepack from 0.5.3 to 0.5.4" +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 7e9aa4deac5f..f08b00d014e7 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -32,7 +32,7 @@ dependencies = { "luaxxhash >= 1.0", "lua-protobuf == 0.5.0", "lua-resty-healthcheck == 3.0.0", - "lua-messagepack == 0.5.3", + "lua-messagepack == 0.5.4", "lua-resty-aws == 1.3.5", "lua-resty-openssl == 1.0.1", "lua-resty-counter == 0.2.1", From 1c4bfb3ebb0d714edb0b00c74b58a819000f5921 Mon Sep 17 00:00:00 2001 From: Zhefeng C <38037704+catbro666@users.noreply.github.com> Date: Wed, 22 Nov 2023 20:52:42 +0800 Subject: [PATCH 144/249] fix(ca_certificates): invalidate ca store caches when a ca cert is updated and prevent ca_certificates that are still being referenced by other entities from being deleted (#11789) * fix(ca_certificates): invalidate ca store caches when a ca cert is updated and prevent ca_certificates that are still being referenced by other entities from being deleted. Fix [FTI-2060](https://konghq.atlassian.net/browse/FTI-2060) * apply comments * change plugin tables from maps to arrays * fix plugin_name double check * remove `search_fields` for now as it is EE-only * do the iteration and filtering in dao by adding `select_by_ca_certificate` * auto-detect the entities and plugins that reference ca certificates to make it more generic. create a custom ca_certificates dao and put the check_ca_reference logic into the `:delete()` method instead of a custom API route * update the schema of ca_certificates * fix: fields in schema is an array and cert_pk is a table * add services:select_by_ca_certificate() tests * fix lint * add custom plugin "reference-ca-cert" and plugins:select_by_ca_certificate() tests * add ca_certificates:delete() tests * Apply suggestions from code review Co-authored-by: Michael Martin * fix typo * remove plugins.lua and services.lua for `off` as they're not currently being used --------- Co-authored-by: Michael Martin --- .../kong/ca_certificates_reference_check.yml | 3 + kong-3.6.0-0.rockspec | 4 + kong/api/endpoints.lua | 1 + kong/db/dao/ca_certificates.lua | 55 ++++ kong/db/dao/plugins.lua | 18 ++ kong/db/dao/services.lua | 16 + kong/db/errors.lua | 11 + kong/db/schema/entities/ca_certificates.lua | 1 + kong/db/schema/entities/services.lua | 1 + kong/db/strategies/postgres/plugins.lua | 39 +++ kong/db/strategies/postgres/services.lua | 20 ++ kong/runloop/certificate.lua | 99 ++++++ kong/runloop/events.lua | 53 ++++ spec/02-integration/03-db/03-plugins_spec.lua | 296 +++++++++++++++++- .../02-integration/03-db/21-services_spec.lua | 215 +++++++++++++ .../03-db/22-ca_certificates_spec.lua | 145 +++++++++ .../16-ca_certificates_routes_spec.lua | 27 ++ .../05-proxy/18-upstream_tls_spec.lua | 178 ++++++++++- .../plugins/reference-ca-cert/handler.lua | 6 + .../kong/plugins/reference-ca-cert/schema.lua | 15 + 20 files changed, 1189 insertions(+), 14 deletions(-) create mode 100644 changelog/unreleased/kong/ca_certificates_reference_check.yml create mode 100644 kong/db/dao/ca_certificates.lua create mode 100644 kong/db/dao/services.lua create mode 100644 kong/db/strategies/postgres/plugins.lua create mode 100644 kong/db/strategies/postgres/services.lua create mode 100644 spec/02-integration/03-db/21-services_spec.lua create mode 100644 spec/02-integration/03-db/22-ca_certificates_spec.lua create mode 100644 spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/handler.lua create mode 100644 spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/schema.lua diff --git a/changelog/unreleased/kong/ca_certificates_reference_check.yml b/changelog/unreleased/kong/ca_certificates_reference_check.yml new file mode 100644 index 000000000000..3ac9d8a3aab5 --- /dev/null +++ b/changelog/unreleased/kong/ca_certificates_reference_check.yml @@ -0,0 +1,3 @@ +message: prevent ca to be deleted when it's still referenced by other entities and invalidate the related ca store caches when a ca cert is updated. +type: bugfix +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index f08b00d014e7..1617e7ff99e5 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -212,6 +212,8 @@ build = { ["kong.db.dao.tags"] = "kong/db/dao/tags.lua", ["kong.db.dao.vaults"] = "kong/db/dao/vaults.lua", ["kong.db.dao.workspaces"] = "kong/db/dao/workspaces.lua", + ["kong.db.dao.services"] = "kong/db/dao/services.lua", + ["kong.db.dao.ca_certificates"] = "kong/db/dao/ca_certificates.lua", ["kong.db.declarative"] = "kong/db/declarative/init.lua", ["kong.db.declarative.marshaller"] = "kong/db/declarative/marshaller.lua", ["kong.db.declarative.export"] = "kong/db/declarative/export.lua", @@ -251,6 +253,8 @@ build = { ["kong.db.strategies.postgres"] = "kong/db/strategies/postgres/init.lua", ["kong.db.strategies.postgres.connector"] = "kong/db/strategies/postgres/connector.lua", ["kong.db.strategies.postgres.tags"] = "kong/db/strategies/postgres/tags.lua", + ["kong.db.strategies.postgres.services"] = "kong/db/strategies/postgres/services.lua", + ["kong.db.strategies.postgres.plugins"] = "kong/db/strategies/postgres/plugins.lua", ["kong.db.strategies.off"] = "kong/db/strategies/off/init.lua", ["kong.db.strategies.off.connector"] = "kong/db/strategies/off/connector.lua", ["kong.db.strategies.off.tags"] = "kong/db/strategies/off/tags.lua", diff --git a/kong/api/endpoints.lua b/kong/api/endpoints.lua index 0ca7dbe8ccc1..eb995a357b76 100644 --- a/kong/api/endpoints.lua +++ b/kong/api/endpoints.lua @@ -35,6 +35,7 @@ local ERRORS_HTTP_CODES = { [Errors.codes.INVALID_OPTIONS] = 400, [Errors.codes.OPERATION_UNSUPPORTED] = 405, [Errors.codes.FOREIGN_KEYS_UNRESOLVED] = 400, + [Errors.codes.REFERENCED_BY_OTHERS] = 400, } local TAGS_AND_REGEX diff --git a/kong/db/dao/ca_certificates.lua b/kong/db/dao/ca_certificates.lua new file mode 100644 index 000000000000..4720b3881b37 --- /dev/null +++ b/kong/db/dao/ca_certificates.lua @@ -0,0 +1,55 @@ +local certificate = require "kong.runloop.certificate" +local fmt = string.format + +local Ca_certificates = {} + +-- returns the first encountered entity element that is referencing the ca cert +-- otherwise, returns nil, err +function Ca_certificates:check_ca_reference(ca_id) + for _, entity in ipairs(certificate.get_ca_certificate_reference_entities()) do + local elements, err = self.db[entity]:select_by_ca_certificate(ca_id, 1) + if err then + local msg = fmt("failed to select %s by ca certificate %s: %s", entity, ca_id, err) + return nil, msg + end + + if type(elements) == "table" and #elements > 0 then + return entity, elements[1] + end + end + + local reference_plugins = certificate.get_ca_certificate_reference_plugins() + if reference_plugins and next(reference_plugins) then + local plugins, err = self.db.plugins:select_by_ca_certificate(ca_id, 1, reference_plugins) + if err then + local msg = fmt("failed to select plugins by ca_certificate %s: %s", ca_id, err) + return nil, msg + end + + if type(plugins) == "table" and #plugins > 0 then + return "plugins", plugins[1] + end + end + + return nil, nil +end + +-- Overrides the default delete function to check the ca reference before deleting +function Ca_certificates:delete(cert_pk, options) + local entity, element_or_err = self:check_ca_reference(cert_pk.id) + if entity then + local msg = fmt("ca certificate %s is still referenced by %s (id = %s)", + cert_pk.id, entity, element_or_err.id) + local err_t = self.errors:referenced_by_others(msg) + return nil, tostring(err_t), err_t + + elseif element_or_err then + local err_t = self.errors:database_error(element_or_err) + return nil, tostring(err_t), err_t + end + + return self.super.delete(self, cert_pk, options) +end + + +return Ca_certificates diff --git a/kong/db/dao/plugins.lua b/kong/db/dao/plugins.lua index 8790de32c2ca..58521cc07f84 100644 --- a/kong/db/dao/plugins.lua +++ b/kong/db/dao/plugins.lua @@ -371,5 +371,23 @@ function Plugins:get_handlers() return list end +-- @ca_id: the id of ca certificate to be searched +-- @limit: the maximum number of entities to return (must >= 0) +-- @plugin_names: the plugin names to filter the entities (must be of type table, string or nil) +-- @return an array of the plugin entity +function Plugins:select_by_ca_certificate(ca_id, limit, plugin_names) + local param_type = type(plugin_names) + if param_type ~= "table" and param_type ~= "string" and param_type ~= "nil" then + return nil, "parameter `plugin_names` must be of type table, string, or nil" + end + + local plugins, err = self.strategy:select_by_ca_certificate(ca_id, limit, plugin_names) + if err then + return nil, err + end + + return self:rows_to_entities(plugins), nil +end + return Plugins diff --git a/kong/db/dao/services.lua b/kong/db/dao/services.lua new file mode 100644 index 000000000000..d79c1618e125 --- /dev/null +++ b/kong/db/dao/services.lua @@ -0,0 +1,16 @@ + +local Services = {} + +-- @ca_id: the id of ca certificate to be searched +-- @limit: the maximum number of entities to return (must >= 0) +-- @return an array of the service entity +function Services:select_by_ca_certificate(ca_id, limit) + local services, err = self.strategy:select_by_ca_certificate(ca_id, limit) + if err then + return nil, err + end + + return self:rows_to_entities(services), nil +end + +return Services diff --git a/kong/db/errors.lua b/kong/db/errors.lua index e5c01f3473f5..5a43911741a0 100644 --- a/kong/db/errors.lua +++ b/kong/db/errors.lua @@ -52,6 +52,7 @@ local ERRORS = { INVALID_FOREIGN_KEY = 16, -- foreign key is valid for matching a row INVALID_WORKSPACE = 17, -- strategy reports a workspace error INVALID_UNIQUE_GLOBAL = 18, -- unique field value is invalid for global query + REFERENCED_BY_OTHERS = 19, -- still referenced by other entities } @@ -77,6 +78,7 @@ local ERRORS_NAMES = { [ERRORS.INVALID_FOREIGN_KEY] = "invalid foreign key", [ERRORS.INVALID_WORKSPACE] = "invalid workspace", [ERRORS.INVALID_UNIQUE_GLOBAL] = "invalid global query", + [ERRORS.REFERENCED_BY_OTHERS] = "referenced by others", } @@ -517,6 +519,15 @@ function _M:invalid_unique_global(name) end +function _M:referenced_by_others(err) + if type(err) ~= "string" then + error("err must be a string", 2) + end + + return new_err_t(self, ERRORS.REFERENCED_BY_OTHERS, err) +end + + local flatten_errors do local function singular(noun) diff --git a/kong/db/schema/entities/ca_certificates.lua b/kong/db/schema/entities/ca_certificates.lua index f87cd35722be..212c79dd3cc7 100644 --- a/kong/db/schema/entities/ca_certificates.lua +++ b/kong/db/schema/entities/ca_certificates.lua @@ -11,6 +11,7 @@ local CERT_TAG_LEN = #CERT_TAG return { name = "ca_certificates", primary_key = { "id" }, + dao = "kong.db.dao.ca_certificates", fields = { { id = typedefs.uuid, }, diff --git a/kong/db/schema/entities/services.lua b/kong/db/schema/entities/services.lua index 030eb90c4389..cf2954a36770 100644 --- a/kong/db/schema/entities/services.lua +++ b/kong/db/schema/entities/services.lua @@ -23,6 +23,7 @@ return { primary_key = { "id" }, workspaceable = true, endpoint_key = "name", + dao = "kong.db.dao.services", fields = { { id = typedefs.uuid, }, diff --git a/kong/db/strategies/postgres/plugins.lua b/kong/db/strategies/postgres/plugins.lua new file mode 100644 index 000000000000..6a08a4a825fb --- /dev/null +++ b/kong/db/strategies/postgres/plugins.lua @@ -0,0 +1,39 @@ +local kong = kong +local fmt = string.format +local tb_insert = table.insert +local tb_concat = table.concat + +local Plugins = {} + +function Plugins:select_by_ca_certificate(ca_id, limit, plugin_names) + local connector = kong.db.connector + local escape_literal = connector.escape_literal + local limit_condition = "" + if limit then + limit_condition = "LIMIT " .. escape_literal(connector, limit) + end + + local name_condition = "" + local escaped_names = {} + if type(plugin_names) == "string" then + tb_insert(escaped_names, "name = " .. escape_literal(connector, plugin_names)) + elseif type(plugin_names) == "table" then + for name, _ in pairs(plugin_names) do + tb_insert(escaped_names, "name = " .. escape_literal(connector, name)) + end + end + + if #escaped_names > 0 then + name_condition = "AND (" .. tb_concat(escaped_names, " OR ") .. ")" + end + + local qs = fmt( + "SELECT * FROM plugins WHERE config->'ca_certificates' ? %s %s %s;", + escape_literal(connector, ca_id), + name_condition, + limit_condition) + + return connector:query(qs) +end + +return Plugins diff --git a/kong/db/strategies/postgres/services.lua b/kong/db/strategies/postgres/services.lua new file mode 100644 index 000000000000..02393a4249e9 --- /dev/null +++ b/kong/db/strategies/postgres/services.lua @@ -0,0 +1,20 @@ +local kong = kong +local fmt = string.format + +local Services = {} + +function Services:select_by_ca_certificate(ca_id, limit) + local limit_condition = "" + if limit then + limit_condition = "LIMIT " .. kong.db.connector:escape_literal(limit) + end + + local qs = fmt( + "SELECT * FROM services WHERE %s = ANY(ca_certificates) %s;", + kong.db.connector:escape_literal(ca_id), + limit_condition) + + return kong.db.connector:query(qs) +end + +return Services diff --git a/kong/runloop/certificate.lua b/kong/runloop/certificate.lua index 53da6b3d8d35..f52f338ac685 100644 --- a/kong/runloop/certificate.lua +++ b/kong/runloop/certificate.lua @@ -2,6 +2,9 @@ local ngx_ssl = require "ngx.ssl" local pl_utils = require "pl.utils" local mlcache = require "kong.resty.mlcache" local new_tab = require "table.new" +local constants = require "kong.constants" +local utils = require "kong.tools.utils" +local plugin_servers = require "kong.runloop.plugin_servers" local openssl_x509_store = require "resty.openssl.x509.store" local openssl_x509 = require "resty.openssl.x509" @@ -19,6 +22,7 @@ local set_cert = ngx_ssl.set_cert local set_priv_key = ngx_ssl.set_priv_key local tb_concat = table.concat local tb_sort = table.sort +local tb_insert = table.insert local kong = kong local type = type local error = error @@ -371,6 +375,97 @@ local function get_ca_certificate_store(ca_ids) end +local function get_ca_certificate_store_for_plugin(ca_ids) + return kong.cache:get(ca_ids_cache_key(ca_ids), + get_ca_store_opts, fetch_ca_certificates, + ca_ids) +end + + +-- here we assume the field name is always `ca_certificates` +local get_ca_certificate_reference_entities +do + local function is_entity_referencing_ca_certificates(name) + local entity_schema = require("kong.db.schema.entities." .. name) + for _, field in ipairs(entity_schema.fields) do + if field.ca_certificates then + return true + end + end + + return false + end + + -- ordinary entities that reference ca certificates + -- For example: services + local CA_CERT_REFERENCE_ENTITIES + get_ca_certificate_reference_entities = function() + if not CA_CERT_REFERENCE_ENTITIES then + CA_CERT_REFERENCE_ENTITIES = {} + for _, entity_name in ipairs(constants.CORE_ENTITIES) do + local res = is_entity_referencing_ca_certificates(entity_name) + if res then + tb_insert(CA_CERT_REFERENCE_ENTITIES, entity_name) + end + end + end + + return CA_CERT_REFERENCE_ENTITIES + end +end + + +-- here we assume the field name is always `ca_certificates` +local get_ca_certificate_reference_plugins +do + local function is_plugin_referencing_ca_certificates(name) + local plugin_schema = "kong.plugins." .. name .. ".schema" + local ok, schema = utils.load_module_if_exists(plugin_schema) + if not ok then + ok, schema = plugin_servers.load_schema(name) + end + + if not ok then + return nil, "no configuration schema found for plugin: " .. name + end + + for _, field in ipairs(schema.fields) do + if field.config then + for _, field in ipairs(field.config.fields) do + if field.ca_certificates then + return true + end + end + end + end + + return false + end + + -- loaded plugins that reference ca certificates + -- For example: mtls-auth + local CA_CERT_REFERENCE_PLUGINS + get_ca_certificate_reference_plugins = function() + if not CA_CERT_REFERENCE_PLUGINS then + CA_CERT_REFERENCE_PLUGINS = {} + local loaded_plugins = kong.configuration.loaded_plugins + for name, v in pairs(loaded_plugins) do + local res, err = is_plugin_referencing_ca_certificates(name) + if err then + return nil, err + end + + if res then + CA_CERT_REFERENCE_PLUGINS[name] = true + end + end + end + + return CA_CERT_REFERENCE_PLUGINS + end +end + + return { init = init, find_certificate = find_certificate, @@ -378,4 +473,8 @@ return { execute = execute, get_certificate = get_certificate, get_ca_certificate_store = get_ca_certificate_store, + get_ca_certificate_store_for_plugin = get_ca_certificate_store_for_plugin, + ca_ids_cache_key = ca_ids_cache_key, + get_ca_certificate_reference_entities = get_ca_certificate_reference_entities, + get_ca_certificate_reference_plugins = get_ca_certificate_reference_plugins, } diff --git a/kong/runloop/events.lua b/kong/runloop/events.lua index 6e6b42c0db37..1b0d177c0bcc 100644 --- a/kong/runloop/events.lua +++ b/kong/runloop/events.lua @@ -319,6 +319,56 @@ local function crud_wasm_handler(data, schema_name) end +local function crud_ca_certificates_handler(data) + if data.operation ~= "update" then + return + end + + log(DEBUG, "[events] CA certificate updated, invalidating ca certificate store caches") + + local ca_id = data.entity.id + + local done_keys = {} + for _, entity in ipairs(certificate.get_ca_certificate_reference_entities()) do + local elements, err = kong.db[entity]:select_by_ca_certificate(ca_id) + if err then + log(ERR, "[events] failed to select ", entity, " by ca certificate ", ca_id, ": ", err) + return + end + + if elements then + for _, e in ipairs(elements) do + local key = certificate.ca_ids_cache_key(e.ca_certificates) + + if not done_keys[key] then + done_keys[key] = true + kong.core_cache:invalidate(key) + end + end + end + end + + local plugin_done_keys = {} + local plugins, err = kong.db.plugins:select_by_ca_certificate(ca_id, nil, + certificate.get_ca_certificate_reference_plugins()) + if err then + log(ERR, "[events] failed to select plugins by ca certificate ", ca_id, ": ", err) + return + end + + if plugins then + for _, e in ipairs(plugins) do + local key = certificate.ca_ids_cache_key(e.config.ca_certificates) + + if not plugin_done_keys[key] then + plugin_done_keys[key] = true + kong.cache:invalidate(key) + end + end + end +end + + local LOCAL_HANDLERS = { { "dao:crud", nil , dao_crud_handler }, @@ -338,6 +388,9 @@ local LOCAL_HANDLERS = { { "crud" , "filter_chains" , crud_wasm_handler }, { "crud" , "services" , crud_wasm_handler }, { "crud" , "routes" , crud_wasm_handler }, + + -- ca certificate store caches invalidations + { "crud" , "ca_certificates" , crud_ca_certificates_handler }, } diff --git a/spec/02-integration/03-db/03-plugins_spec.lua b/spec/02-integration/03-db/03-plugins_spec.lua index b844835cac27..febe2e8519d4 100644 --- a/spec/02-integration/03-db/03-plugins_spec.lua +++ b/spec/02-integration/03-db/03-plugins_spec.lua @@ -1,5 +1,72 @@ local helpers = require "spec.helpers" - +local ssl_fixtures = require "spec.fixtures.ssl" + +local ca_cert2 = [[ +-----BEGIN CERTIFICATE----- +MIIEvjCCAqagAwIBAgIJALabx/Nup200MA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV +BAMMCFlvbG80Mi4xMCAXDTE5MDkxNTE2Mjc1M1oYDzIxMTkwODIyMTYyNzUzWjAT +MREwDwYDVQQDDAhZb2xvNDIuMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC +ggIBANIW67Ay0AtTeBY2mORaGet/VPL5jnBRz0zkZ4Jt7fEq3lbxYaJBnFI8wtz3 +bHLtLsxkvOFujEMY7HVd+iTqbJ7hLBtK0AdgXDjf+HMmoWM7x0PkZO+3XSqyRBbI +YNoEaQvYBNIXrKKJbXIU6higQaXYszeN8r3+RIbcTIlZxy28msivEGfGTrNujQFc +r/eyf+TLHbRqh0yg4Dy/U/T6fqamGhFrjupRmOMugwF/BHMH2JHhBYkkzuZLgV2u +7Yh1S5FRlh11am5vWuRSbarnx72hkJ99rUb6szOWnJKKew8RSn3CyhXbS5cb0QRc +ugRc33p/fMucJ4mtCJ2Om1QQe83G1iV2IBn6XJuCvYlyWH8XU0gkRxWD7ZQsl0bB +8AFTkVsdzb94OM8Y6tWI5ybS8rwl8b3r3fjyToIWrwK4WDJQuIUx4nUHObDyw+KK ++MmqwpAXQWbNeuAc27FjuJm90yr/163aGuInNY5Wiz6CM8WhFNAi/nkEY2vcxKKx +irSdSTkbnrmLFAYrThaq0BWTbW2mwkOatzv4R2kZzBUOiSjRLPnbyiPhI8dHLeGs +wMxiTXwyPi8iQvaIGyN4DPaSEiZ1GbexyYFdP7sJJD8tG8iccbtJYquq3cDaPTf+ +qv5M6R/JuMqtUDheLSpBNK+8vIe5e3MtGFyrKqFXdynJtfHVAgMBAAGjEzARMA8G +A1UdEwQIMAYBAf8CAQAwDQYJKoZIhvcNAQELBQADggIBAK0BmL5B1fPSMbFy8Hbc +/ESEunt4HGaRWmZZSa/aOtTjhKyDXLLJZz3C4McugfOf9BvvmAOZU4uYjfHTnNH2 +Z3neBkdTpQuJDvrBPNoCtJns01X/nuqFaTK/Tt9ZjAcVeQmp51RwhyiD7nqOJ/7E +Hp2rC6gH2ABXeexws4BDoZPoJktS8fzGWdFBCHzf4mCJcb4XkI+7GTYpglR818L3 +dMNJwXeuUsmxxKScBVH6rgbgcEC/6YwepLMTHB9VcH3X5VCfkDIyPYLWmvE0gKV7 +6OU91E2Rs8PzbJ3EuyQpJLxFUQp8ohv5zaNBlnMb76UJOPR6hXfst5V+e7l5Dgwv +Dh4CeO46exmkEsB+6R3pQR8uOFtubH2snA0S3JA1ji6baP5Y9Wh9bJ5McQUgbAPE +sCRBFoDLXOj3EgzibohC5WrxN3KIMxlQnxPl3VdQvp4gF899mn0Z9V5dAsGPbxRd +quE+DwfXkm0Sa6Ylwqrzu2OvSVgbMliF3UnWbNsDD5KcHGIaFxVC1qkwK4cT3pyS +58i/HAB2+P+O+MltQUDiuw0OSUFDC0IIjkDfxLVffbF+27ef9C5NG81QlwTz7TuN +zeigcsBKooMJTszxCl6dtxSyWTj7hJWXhy9pXsm1C1QulG6uT4RwCa3m0QZoO7G+ +6Wu6lP/kodPuoNubstIuPdi2 +-----END CERTIFICATE----- +]] + +local other_ca_cert = [[ +-----BEGIN CERTIFICATE----- +MIIFrTCCA5WgAwIBAgIUFQe9z25yjw26iWzS+P7+hz1zx6AwDQYJKoZIhvcNAQEL +BQAwXjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjENMAsG +A1UECgwES29uZzEUMBIGA1UECwwLRW5naW5lZXJpbmcxEDAOBgNVBAMMB3Jvb3Rf +Y2EwHhcNMjEwMzA0MTEyMjM0WhcNNDEwMjI3MTEyMjM0WjBeMQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExCzAJBgNVBAcMAlNGMQ0wCwYDVQQKDARLb25nMRQwEgYD +VQQLDAtFbmdpbmVlcmluZzEQMA4GA1UEAwwHcm9vdF9jYTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAKKjido39I5SEmPhme0Z+hG0buOylXg+jmqHpJ/K +rs+dSq/PsJCjSke81eOP2MFa5duyBxdnXmMJwZYxuQ91bKxdzWVE9ZgCJgNJYsB6 +y5+Fe7ypERwa2ebS/M99FFJ3EzpF017XdsgnSfVh1GEQOZkWQ1+7YrEUEgtwN5lO +MVUmj1EfoL+jQ/zwxwdxpLu3dh3Ica3szmx3YxqIPRnpyoYYqbktjL63gmFCjLeW +zEXdVZyoisdaA4iZ9e/wmuLR2/F4cbZ0SjU7QULZ2Zt/SCrs3CaJ3/ZAa6s84kjg +JBMav+GxbvATSuWQEajiVQrkW9HvXD/NUQBCzzZsOfpzn0044Ls7XvWDCCXs+xtG +Uhd5cJfmlcbHbZ9PU1xTBqdbwiRX+XlmX7CJRcfgnYnU/B3m5IheA1XKYhoXikgv +geRwq5uZ8Z2E/WONmFts46MLSmH43Ft+gIXA1u1g3eDHkU2bx9u592lZoluZtL3m +bmebyk+5bd0GdiHjBGvDSCf/fgaWROgGO9e0PBgdsngHEFmRspipaH39qveM1Cdh +83q4I96BRmjU5tvFXydFCvp8ABpZz9Gj0h8IRP+bK5ukU46YrEIxQxjBee1c1AAb +oatRJSJc2J6zSYXRnQfwf5OkhpmVYc+1TAyqPBfixa2TQ7OOhXxDYsJHAb7WySKP +lfonAgMBAAGjYzBhMB0GA1UdDgQWBBT00Tua7un0KobEs1aXuSZV8x4Q7TAfBgNV +HSMEGDAWgBT00Tua7un0KobEs1aXuSZV8x4Q7TAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAgI8CSmjvzQgmnzcNwqX5 +o+KBWEMHJEqQfowaZE7o6xkvEljb1YHRDE0hlwUtD1vbKUthoHD8Mqim3No5z4J0 +dEE+mXQ3zlJWKl5gqHs9KtcLhk51mf4VJ2TW8Z7AoE2OjWSnycLNdlpqUvxzCQOn +CIhvyDfs4OV1RYywbfiLLmzTCYT7Mt5ye1ZafoRNZ37DCnI/uqoOaMb+a6VaE+0F +ZXlDonXmy54QUmt6foSG/+kYaqdVLribsE6H+GpePmPTKKOvgE1RutR5+nvMJUB3 ++zMQSPVVYLzizwV+Tq9il81qNQB2hZGvM8iSRraBNn8mwpx7M6kcoJ4gvCA3kHCI +rmuuzlhkNcmZYh0uG378CzhdEOV+JMmuCh4xt2SbQIr5Luqm/+Xoq4tDplKoUVkC +DScxPoFNoi9bZYW/ppcaeX5KT3Gt0JBaCfD7d0CtbUp/iPS1HtgXTIL9XiYPipsV +oPLtqvfeORl6aUuqs1xX8HvZrSgcld51+r8X31YIs6feYTFvlbfP0/Jhf2Cs0K/j +jhC0sGVdWO1C0akDlEBfuE5YMrehjYrrOnEavtTi9+H0vNaB+BGAJHIAj+BGj5C7 +0EkbQdEyhB0pliy9qzbPtN5nt+y0I1lgN9VlFMub6r1u5novNzuVm+5ceBrxG+ga +T6nsr9aTE1yghO6GTWEPssw= +-----END CERTIFICATE----- +]] assert:set_parameter("TableFormatLevel", 10) @@ -11,12 +78,18 @@ for _, strategy in helpers.each_strategy() do describe("kong.db [#" .. strategy .. "]", function() local db, bp, service, route local global_plugin + local ca1, ca2, other_ca + local routes = {} + local p1, p2, p3, p4, p5, p6 lazy_setup(function() bp, db = helpers.get_db_utils(strategy, { "routes", "services", "plugins", + "ca_certificates", + }, { + "reference-ca-cert", }) global_plugin = db.plugins:insert({ name = "key-auth", @@ -24,6 +97,71 @@ for _, strategy in helpers.each_strategy() do }) assert.truthy(global_plugin) + ca1 = assert(bp.ca_certificates:insert({ + cert = ssl_fixtures.cert_ca, + })) + + ca2 = assert(bp.ca_certificates:insert({ + cert = ca_cert2, + })) + + other_ca = assert(bp.ca_certificates:insert({ + cert = other_ca_cert, + })) + + for i = 1, 6 do + routes[i] = assert(bp.routes:insert({ + paths = { "/foo" .. i, }, + })) + end + + p1 = assert(bp.plugins:insert({ + name = "reference-ca-cert", + route = routes[1], + config = { + ca_certificates = { ca1.id }, + } + })) + + p2 = assert(bp.plugins:insert({ + name = "reference-ca-cert", + route = routes[2], + config = { + ca_certificates = { ca1.id }, + } + })) + + p3 = assert(bp.plugins:insert({ + name = "reference-ca-cert", + route = routes[3], + config = { + ca_certificates = { ca2.id }, + } + })) + + p4 = assert(bp.plugins:insert({ + name = "reference-ca-cert", + route = routes[4], + config = { + ca_certificates = { ca2.id }, + } + })) + + p5 = assert(bp.plugins:insert({ + name = "reference-ca-cert", + route = routes[5], + config = { + ca_certificates = { ca1.id, ca2.id }, + } + })) + + p6 = assert(bp.plugins:insert({ + name = "reference-ca-cert", + route = routes[6], + config = { + ca_certificates = { ca1.id, ca2.id }, + } + })) end) describe("Plugins #plugins", function() @@ -303,6 +441,162 @@ for _, strategy in helpers.each_strategy() do end) + describe(":select_by_ca_certificate()", function() + it("selects the correct plugins", function() + local plugins, err = db.plugins:select_by_ca_certificate(ca1.id, nil, { + ["reference-ca-cert"] = true, + }) + local expected = { + [p1.id] = true, + [p2.id] = true, + [p5.id] = true, + [p6.id] = true, + } + local res = {} + assert.is_nil(err) + assert(plugins) + assert(#plugins == 4) + + for _, p in ipairs(plugins) do + res[p.id] = true + end + assert.are.same(expected, res) + + local plugins, err = db.plugins:select_by_ca_certificate(ca2.id, nil, { + ["reference-ca-cert"] = true, + }) + local expected = { + [p3.id] = true, + [p4.id] = true, + [p5.id] = true, + [p6.id] = true, + } + local res = {} + assert.is_nil(err) + assert(plugins) + assert(#plugins == 4) + + for _, p in ipairs(plugins) do + res[p.id] = true + end + assert.are.same(expected, res) + + -- unreferenced ca certificate + local plugins, err = db.plugins:select_by_ca_certificate(other_ca.id, nil, { + ["reference-ca-cert"] = true, + }) + assert.is_nil(err) + assert(plugins) + assert(#plugins == 0) + end) + + it("plugin_names default to all plugins", function() + local plugins, err = db.plugins:select_by_ca_certificate(ca1.id, nil) + local expected = { + [p1.id] = true, + [p2.id] = true, + [p5.id] = true, + [p6.id] = true, + } + local res = {} + assert.is_nil(err) + assert(plugins) + assert(#plugins == 4) + + for _, p in ipairs(plugins) do + res[p.id] = true + end + assert.are.same(expected, res) + + local plugins, err = db.plugins:select_by_ca_certificate(ca2.id, nil) + local expected = { + [p3.id] = true, + [p4.id] = true, + [p5.id] = true, + [p6.id] = true, + } + local res = {} + assert.is_nil(err) + assert(plugins) + assert(#plugins == 4) + + for _, p in ipairs(plugins) do + res[p.id] = true + end + assert.are.same(expected, res) + + -- unreferenced ca certificate + local plugins, err = db.plugins:select_by_ca_certificate(other_ca.id, nil) + assert.is_nil(err) + assert(plugins) + assert(#plugins == 0) + end) + + it("limits the number of returned plugins", function() + local plugins, err = db.plugins:select_by_ca_certificate(ca1.id, 1, { + ["reference-ca-cert"] = true, + }) + local expected = { + [p1.id] = true, + [p2.id] = true, + [p5.id] = true, + [p6.id] = true, + } + assert.is_nil(err) + assert(plugins) + assert(#plugins == 1) + assert(expected[plugins[1].id]) + + local plugins, err = db.plugins:select_by_ca_certificate(ca2.id, 1, { + ["reference-ca-cert"] = true, + }) + local expected = { + [p3.id] = true, + [p4.id] = true, + [p5.id] = true, + [p6.id] = true, + } + assert.is_nil(err) + assert(plugins) + assert(#plugins == 1) + assert(expected[plugins[1].id]) + + -- unreferenced ca certificate + local plugins, err = db.plugins:select_by_ca_certificate(other_ca.id, 1, { + ["reference-ca-cert"] = true, + }) + assert.is_nil(err) + assert(plugins) + assert(#plugins == 0) + end) + + it("plugin_names supports string type", function() + local plugins, err = db.plugins:select_by_ca_certificate(ca1.id, nil, "reference-ca-cert") + local expected = { + [p1.id] = true, + [p2.id] = true, + [p5.id] = true, + [p6.id] = true, + } + local res = {} + assert.is_nil(err) + assert(plugins) + assert(#plugins == 4) + + for _, p in ipairs(plugins) do + res[p.id] = true + end + assert.are.same(expected, res) + end) + + it("return empty table when plugin doesn't reference ca_certificates", function() + local plugins, err = db.plugins:select_by_ca_certificate(ca1.id, nil, "key-auth") + assert.is_nil(err) + assert(plugins) + assert(#plugins == 0) + end) + + end) end) -- kong.db [strategy] end diff --git a/spec/02-integration/03-db/21-services_spec.lua b/spec/02-integration/03-db/21-services_spec.lua new file mode 100644 index 000000000000..0eede2e3d44d --- /dev/null +++ b/spec/02-integration/03-db/21-services_spec.lua @@ -0,0 +1,215 @@ +local helpers = require "spec.helpers" +local ssl_fixtures = require "spec.fixtures.ssl" + +local ca_cert2 = [[ +-----BEGIN CERTIFICATE----- +MIIEvjCCAqagAwIBAgIJALabx/Nup200MA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV +BAMMCFlvbG80Mi4xMCAXDTE5MDkxNTE2Mjc1M1oYDzIxMTkwODIyMTYyNzUzWjAT +MREwDwYDVQQDDAhZb2xvNDIuMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC +ggIBANIW67Ay0AtTeBY2mORaGet/VPL5jnBRz0zkZ4Jt7fEq3lbxYaJBnFI8wtz3 +bHLtLsxkvOFujEMY7HVd+iTqbJ7hLBtK0AdgXDjf+HMmoWM7x0PkZO+3XSqyRBbI +YNoEaQvYBNIXrKKJbXIU6higQaXYszeN8r3+RIbcTIlZxy28msivEGfGTrNujQFc +r/eyf+TLHbRqh0yg4Dy/U/T6fqamGhFrjupRmOMugwF/BHMH2JHhBYkkzuZLgV2u +7Yh1S5FRlh11am5vWuRSbarnx72hkJ99rUb6szOWnJKKew8RSn3CyhXbS5cb0QRc +ugRc33p/fMucJ4mtCJ2Om1QQe83G1iV2IBn6XJuCvYlyWH8XU0gkRxWD7ZQsl0bB +8AFTkVsdzb94OM8Y6tWI5ybS8rwl8b3r3fjyToIWrwK4WDJQuIUx4nUHObDyw+KK ++MmqwpAXQWbNeuAc27FjuJm90yr/163aGuInNY5Wiz6CM8WhFNAi/nkEY2vcxKKx +irSdSTkbnrmLFAYrThaq0BWTbW2mwkOatzv4R2kZzBUOiSjRLPnbyiPhI8dHLeGs +wMxiTXwyPi8iQvaIGyN4DPaSEiZ1GbexyYFdP7sJJD8tG8iccbtJYquq3cDaPTf+ +qv5M6R/JuMqtUDheLSpBNK+8vIe5e3MtGFyrKqFXdynJtfHVAgMBAAGjEzARMA8G +A1UdEwQIMAYBAf8CAQAwDQYJKoZIhvcNAQELBQADggIBAK0BmL5B1fPSMbFy8Hbc +/ESEunt4HGaRWmZZSa/aOtTjhKyDXLLJZz3C4McugfOf9BvvmAOZU4uYjfHTnNH2 +Z3neBkdTpQuJDvrBPNoCtJns01X/nuqFaTK/Tt9ZjAcVeQmp51RwhyiD7nqOJ/7E +Hp2rC6gH2ABXeexws4BDoZPoJktS8fzGWdFBCHzf4mCJcb4XkI+7GTYpglR818L3 +dMNJwXeuUsmxxKScBVH6rgbgcEC/6YwepLMTHB9VcH3X5VCfkDIyPYLWmvE0gKV7 +6OU91E2Rs8PzbJ3EuyQpJLxFUQp8ohv5zaNBlnMb76UJOPR6hXfst5V+e7l5Dgwv +Dh4CeO46exmkEsB+6R3pQR8uOFtubH2snA0S3JA1ji6baP5Y9Wh9bJ5McQUgbAPE +sCRBFoDLXOj3EgzibohC5WrxN3KIMxlQnxPl3VdQvp4gF899mn0Z9V5dAsGPbxRd +quE+DwfXkm0Sa6Ylwqrzu2OvSVgbMliF3UnWbNsDD5KcHGIaFxVC1qkwK4cT3pyS +58i/HAB2+P+O+MltQUDiuw0OSUFDC0IIjkDfxLVffbF+27ef9C5NG81QlwTz7TuN +zeigcsBKooMJTszxCl6dtxSyWTj7hJWXhy9pXsm1C1QulG6uT4RwCa3m0QZoO7G+ +6Wu6lP/kodPuoNubstIuPdi2 +-----END CERTIFICATE----- +]] + +local other_ca_cert = [[ +-----BEGIN CERTIFICATE----- +MIIFrTCCA5WgAwIBAgIUFQe9z25yjw26iWzS+P7+hz1zx6AwDQYJKoZIhvcNAQEL +BQAwXjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjENMAsG +A1UECgwES29uZzEUMBIGA1UECwwLRW5naW5lZXJpbmcxEDAOBgNVBAMMB3Jvb3Rf +Y2EwHhcNMjEwMzA0MTEyMjM0WhcNNDEwMjI3MTEyMjM0WjBeMQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExCzAJBgNVBAcMAlNGMQ0wCwYDVQQKDARLb25nMRQwEgYD +VQQLDAtFbmdpbmVlcmluZzEQMA4GA1UEAwwHcm9vdF9jYTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAKKjido39I5SEmPhme0Z+hG0buOylXg+jmqHpJ/K +rs+dSq/PsJCjSke81eOP2MFa5duyBxdnXmMJwZYxuQ91bKxdzWVE9ZgCJgNJYsB6 +y5+Fe7ypERwa2ebS/M99FFJ3EzpF017XdsgnSfVh1GEQOZkWQ1+7YrEUEgtwN5lO +MVUmj1EfoL+jQ/zwxwdxpLu3dh3Ica3szmx3YxqIPRnpyoYYqbktjL63gmFCjLeW +zEXdVZyoisdaA4iZ9e/wmuLR2/F4cbZ0SjU7QULZ2Zt/SCrs3CaJ3/ZAa6s84kjg +JBMav+GxbvATSuWQEajiVQrkW9HvXD/NUQBCzzZsOfpzn0044Ls7XvWDCCXs+xtG +Uhd5cJfmlcbHbZ9PU1xTBqdbwiRX+XlmX7CJRcfgnYnU/B3m5IheA1XKYhoXikgv +geRwq5uZ8Z2E/WONmFts46MLSmH43Ft+gIXA1u1g3eDHkU2bx9u592lZoluZtL3m +bmebyk+5bd0GdiHjBGvDSCf/fgaWROgGO9e0PBgdsngHEFmRspipaH39qveM1Cdh +83q4I96BRmjU5tvFXydFCvp8ABpZz9Gj0h8IRP+bK5ukU46YrEIxQxjBee1c1AAb +oatRJSJc2J6zSYXRnQfwf5OkhpmVYc+1TAyqPBfixa2TQ7OOhXxDYsJHAb7WySKP +lfonAgMBAAGjYzBhMB0GA1UdDgQWBBT00Tua7un0KobEs1aXuSZV8x4Q7TAfBgNV +HSMEGDAWgBT00Tua7un0KobEs1aXuSZV8x4Q7TAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAgI8CSmjvzQgmnzcNwqX5 +o+KBWEMHJEqQfowaZE7o6xkvEljb1YHRDE0hlwUtD1vbKUthoHD8Mqim3No5z4J0 +dEE+mXQ3zlJWKl5gqHs9KtcLhk51mf4VJ2TW8Z7AoE2OjWSnycLNdlpqUvxzCQOn +CIhvyDfs4OV1RYywbfiLLmzTCYT7Mt5ye1ZafoRNZ37DCnI/uqoOaMb+a6VaE+0F +ZXlDonXmy54QUmt6foSG/+kYaqdVLribsE6H+GpePmPTKKOvgE1RutR5+nvMJUB3 ++zMQSPVVYLzizwV+Tq9il81qNQB2hZGvM8iSRraBNn8mwpx7M6kcoJ4gvCA3kHCI +rmuuzlhkNcmZYh0uG378CzhdEOV+JMmuCh4xt2SbQIr5Luqm/+Xoq4tDplKoUVkC +DScxPoFNoi9bZYW/ppcaeX5KT3Gt0JBaCfD7d0CtbUp/iPS1HtgXTIL9XiYPipsV +oPLtqvfeORl6aUuqs1xX8HvZrSgcld51+r8X31YIs6feYTFvlbfP0/Jhf2Cs0K/j +jhC0sGVdWO1C0akDlEBfuE5YMrehjYrrOnEavtTi9+H0vNaB+BGAJHIAj+BGj5C7 +0EkbQdEyhB0pliy9qzbPtN5nt+y0I1lgN9VlFMub6r1u5novNzuVm+5ceBrxG+ga +T6nsr9aTE1yghO6GTWEPssw= +-----END CERTIFICATE----- +]] + +for _, strategy in helpers.each_strategy() do + describe("db.services #" .. strategy, function() + local bp, db + local ca1, ca2, other_ca + local srv1, srv2, srv3, srv4, srv5, srv6 + + lazy_setup(function() + bp, db = helpers.get_db_utils(strategy, { + "services", + "ca_certificates", + }) + + ca1 = assert(bp.ca_certificates:insert({ + cert = ssl_fixtures.cert_ca, + })) + + ca2 = assert(bp.ca_certificates:insert({ + cert = ca_cert2, + })) + + other_ca = assert(bp.ca_certificates:insert({ + cert = other_ca_cert, + })) + + local url = "https://" .. helpers.mock_upstream_host .. ":" .. helpers.mock_upstream_port + + srv1 = assert(bp.services:insert { + url = url, + protocol = "https", + ca_certificates = { ca1.id }, + }) + + srv2 = assert(bp.services:insert { + url = url, + protocol = "https", + ca_certificates = { ca1.id }, + }) + + srv3 = assert(bp.services:insert { + url = url, + protocol = "https", + ca_certificates = { ca2.id }, + }) + + srv4 = assert(bp.services:insert { + url = url, + protocol = "https", + ca_certificates = { ca2.id }, + }) + + srv5 = assert(bp.services:insert { + url = url, + protocol = "https", + ca_certificates = { ca1.id, ca2.id }, + }) + + srv6 = assert(bp.services:insert { + url = url, + protocol = "https", + ca_certificates = { ca1.id, ca2.id }, + }) + end) + + lazy_teardown(function() + db.services:truncate() + db.ca_certificates:truncate() + end) + + describe("services:select_by_ca_certificate()", function() + it("selects the correct services", function() + local services, err = db.services:select_by_ca_certificate(ca1.id) + local expected = { + [srv1.id] = true, + [srv2.id] = true, + [srv5.id] = true, + [srv6.id] = true, + } + local res = {} + assert.is_nil(err) + assert(services) + assert(#services == 4) + + for _, s in ipairs(services) do + res[s.id] = true + end + assert.are.same(expected, res) + + local services, err = db.services:select_by_ca_certificate(ca2.id) + local expected = { + [srv3.id] = true, + [srv4.id] = true, + [srv5.id] = true, + [srv6.id] = true, + } + local res = {} + assert.is_nil(err) + assert(services) + assert(#services == 4) + + for _, s in ipairs(services) do + res[s.id] = true + end + assert.are.same(expected, res) + + -- unreferenced ca certificate + local services, err = db.services:select_by_ca_certificate(other_ca.id) + assert.is_nil(err) + assert(services) + assert(#services == 0) + end) + + it("limits the number of returned services", function() + local services, err = db.services:select_by_ca_certificate(ca1.id, 1) + local expected = { + [srv1.id] = true, + [srv2.id] = true, + [srv5.id] = true, + [srv6.id] = true, + } + assert.is_nil(err) + assert(services) + assert(#services == 1) + assert(expected[services[1].id]) + + local services, err = db.services:select_by_ca_certificate(ca2.id, 1) + local expected = { + [srv3.id] = true, + [srv4.id] = true, + [srv5.id] = true, + [srv6.id] = true, + } + assert.is_nil(err) + assert(services) + assert(#services == 1) + assert(expected[services[1].id]) + + -- unreferenced ca certificate + local services, err = db.services:select_by_ca_certificate(other_ca.id, 1) + assert.is_nil(err) + assert(services) + assert(#services == 0) + end) + end) + end) +end diff --git a/spec/02-integration/03-db/22-ca_certificates_spec.lua b/spec/02-integration/03-db/22-ca_certificates_spec.lua new file mode 100644 index 000000000000..6fd94a4c5153 --- /dev/null +++ b/spec/02-integration/03-db/22-ca_certificates_spec.lua @@ -0,0 +1,145 @@ +local helpers = require "spec.helpers" +local ssl_fixtures = require "spec.fixtures.ssl" +local fmt = string.format + +local ca_cert2 = [[ +-----BEGIN CERTIFICATE----- +MIIEvjCCAqagAwIBAgIJALabx/Nup200MA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV +BAMMCFlvbG80Mi4xMCAXDTE5MDkxNTE2Mjc1M1oYDzIxMTkwODIyMTYyNzUzWjAT +MREwDwYDVQQDDAhZb2xvNDIuMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC +ggIBANIW67Ay0AtTeBY2mORaGet/VPL5jnBRz0zkZ4Jt7fEq3lbxYaJBnFI8wtz3 +bHLtLsxkvOFujEMY7HVd+iTqbJ7hLBtK0AdgXDjf+HMmoWM7x0PkZO+3XSqyRBbI +YNoEaQvYBNIXrKKJbXIU6higQaXYszeN8r3+RIbcTIlZxy28msivEGfGTrNujQFc +r/eyf+TLHbRqh0yg4Dy/U/T6fqamGhFrjupRmOMugwF/BHMH2JHhBYkkzuZLgV2u +7Yh1S5FRlh11am5vWuRSbarnx72hkJ99rUb6szOWnJKKew8RSn3CyhXbS5cb0QRc +ugRc33p/fMucJ4mtCJ2Om1QQe83G1iV2IBn6XJuCvYlyWH8XU0gkRxWD7ZQsl0bB +8AFTkVsdzb94OM8Y6tWI5ybS8rwl8b3r3fjyToIWrwK4WDJQuIUx4nUHObDyw+KK ++MmqwpAXQWbNeuAc27FjuJm90yr/163aGuInNY5Wiz6CM8WhFNAi/nkEY2vcxKKx +irSdSTkbnrmLFAYrThaq0BWTbW2mwkOatzv4R2kZzBUOiSjRLPnbyiPhI8dHLeGs +wMxiTXwyPi8iQvaIGyN4DPaSEiZ1GbexyYFdP7sJJD8tG8iccbtJYquq3cDaPTf+ +qv5M6R/JuMqtUDheLSpBNK+8vIe5e3MtGFyrKqFXdynJtfHVAgMBAAGjEzARMA8G +A1UdEwQIMAYBAf8CAQAwDQYJKoZIhvcNAQELBQADggIBAK0BmL5B1fPSMbFy8Hbc +/ESEunt4HGaRWmZZSa/aOtTjhKyDXLLJZz3C4McugfOf9BvvmAOZU4uYjfHTnNH2 +Z3neBkdTpQuJDvrBPNoCtJns01X/nuqFaTK/Tt9ZjAcVeQmp51RwhyiD7nqOJ/7E +Hp2rC6gH2ABXeexws4BDoZPoJktS8fzGWdFBCHzf4mCJcb4XkI+7GTYpglR818L3 +dMNJwXeuUsmxxKScBVH6rgbgcEC/6YwepLMTHB9VcH3X5VCfkDIyPYLWmvE0gKV7 +6OU91E2Rs8PzbJ3EuyQpJLxFUQp8ohv5zaNBlnMb76UJOPR6hXfst5V+e7l5Dgwv +Dh4CeO46exmkEsB+6R3pQR8uOFtubH2snA0S3JA1ji6baP5Y9Wh9bJ5McQUgbAPE +sCRBFoDLXOj3EgzibohC5WrxN3KIMxlQnxPl3VdQvp4gF899mn0Z9V5dAsGPbxRd +quE+DwfXkm0Sa6Ylwqrzu2OvSVgbMliF3UnWbNsDD5KcHGIaFxVC1qkwK4cT3pyS +58i/HAB2+P+O+MltQUDiuw0OSUFDC0IIjkDfxLVffbF+27ef9C5NG81QlwTz7TuN +zeigcsBKooMJTszxCl6dtxSyWTj7hJWXhy9pXsm1C1QulG6uT4RwCa3m0QZoO7G+ +6Wu6lP/kodPuoNubstIuPdi2 +-----END CERTIFICATE----- +]] + +local other_ca_cert = [[ +-----BEGIN CERTIFICATE----- +MIIFrTCCA5WgAwIBAgIUFQe9z25yjw26iWzS+P7+hz1zx6AwDQYJKoZIhvcNAQEL +BQAwXjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjENMAsG +A1UECgwES29uZzEUMBIGA1UECwwLRW5naW5lZXJpbmcxEDAOBgNVBAMMB3Jvb3Rf +Y2EwHhcNMjEwMzA0MTEyMjM0WhcNNDEwMjI3MTEyMjM0WjBeMQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExCzAJBgNVBAcMAlNGMQ0wCwYDVQQKDARLb25nMRQwEgYD +VQQLDAtFbmdpbmVlcmluZzEQMA4GA1UEAwwHcm9vdF9jYTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAKKjido39I5SEmPhme0Z+hG0buOylXg+jmqHpJ/K +rs+dSq/PsJCjSke81eOP2MFa5duyBxdnXmMJwZYxuQ91bKxdzWVE9ZgCJgNJYsB6 +y5+Fe7ypERwa2ebS/M99FFJ3EzpF017XdsgnSfVh1GEQOZkWQ1+7YrEUEgtwN5lO +MVUmj1EfoL+jQ/zwxwdxpLu3dh3Ica3szmx3YxqIPRnpyoYYqbktjL63gmFCjLeW +zEXdVZyoisdaA4iZ9e/wmuLR2/F4cbZ0SjU7QULZ2Zt/SCrs3CaJ3/ZAa6s84kjg +JBMav+GxbvATSuWQEajiVQrkW9HvXD/NUQBCzzZsOfpzn0044Ls7XvWDCCXs+xtG +Uhd5cJfmlcbHbZ9PU1xTBqdbwiRX+XlmX7CJRcfgnYnU/B3m5IheA1XKYhoXikgv +geRwq5uZ8Z2E/WONmFts46MLSmH43Ft+gIXA1u1g3eDHkU2bx9u592lZoluZtL3m +bmebyk+5bd0GdiHjBGvDSCf/fgaWROgGO9e0PBgdsngHEFmRspipaH39qveM1Cdh +83q4I96BRmjU5tvFXydFCvp8ABpZz9Gj0h8IRP+bK5ukU46YrEIxQxjBee1c1AAb +oatRJSJc2J6zSYXRnQfwf5OkhpmVYc+1TAyqPBfixa2TQ7OOhXxDYsJHAb7WySKP +lfonAgMBAAGjYzBhMB0GA1UdDgQWBBT00Tua7un0KobEs1aXuSZV8x4Q7TAfBgNV +HSMEGDAWgBT00Tua7un0KobEs1aXuSZV8x4Q7TAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAgI8CSmjvzQgmnzcNwqX5 +o+KBWEMHJEqQfowaZE7o6xkvEljb1YHRDE0hlwUtD1vbKUthoHD8Mqim3No5z4J0 +dEE+mXQ3zlJWKl5gqHs9KtcLhk51mf4VJ2TW8Z7AoE2OjWSnycLNdlpqUvxzCQOn +CIhvyDfs4OV1RYywbfiLLmzTCYT7Mt5ye1ZafoRNZ37DCnI/uqoOaMb+a6VaE+0F +ZXlDonXmy54QUmt6foSG/+kYaqdVLribsE6H+GpePmPTKKOvgE1RutR5+nvMJUB3 ++zMQSPVVYLzizwV+Tq9il81qNQB2hZGvM8iSRraBNn8mwpx7M6kcoJ4gvCA3kHCI +rmuuzlhkNcmZYh0uG378CzhdEOV+JMmuCh4xt2SbQIr5Luqm/+Xoq4tDplKoUVkC +DScxPoFNoi9bZYW/ppcaeX5KT3Gt0JBaCfD7d0CtbUp/iPS1HtgXTIL9XiYPipsV +oPLtqvfeORl6aUuqs1xX8HvZrSgcld51+r8X31YIs6feYTFvlbfP0/Jhf2Cs0K/j +jhC0sGVdWO1C0akDlEBfuE5YMrehjYrrOnEavtTi9+H0vNaB+BGAJHIAj+BGj5C7 +0EkbQdEyhB0pliy9qzbPtN5nt+y0I1lgN9VlFMub6r1u5novNzuVm+5ceBrxG+ga +T6nsr9aTE1yghO6GTWEPssw= +-----END CERTIFICATE----- +]] + +for _, strategy in helpers.each_strategy() do + describe("db.services #" .. strategy, function() + local bp, db + local ca1, ca2, other_ca + local service, plugin + + lazy_setup(function() + bp, db = helpers.get_db_utils(strategy, { + "services", + "plugins", + "ca_certificates", + }, { + "reference-ca-cert", + }) + + ca1 = assert(bp.ca_certificates:insert({ + cert = ssl_fixtures.cert_ca, + })) + + ca2 = assert(bp.ca_certificates:insert({ + cert = ca_cert2, + })) + + other_ca = assert(bp.ca_certificates:insert({ + cert = other_ca_cert, + })) + + local url = "https://" .. helpers.mock_upstream_host .. ":" .. helpers.mock_upstream_port + + service = assert(bp.services:insert { + url = url, + protocol = "https", + ca_certificates = { ca1.id }, + }) + + plugin = assert(bp.plugins:insert({ + name = "reference-ca-cert", + service = service, + config = { + ca_certificates = { ca2.id }, + } + })) + end) + + lazy_teardown(function() + db.services:truncate() + db.plugins:truncate() + db.ca_certificates:truncate() + end) + + describe("ca_certificates:delete()", function() + it("can delete ca certificate that is not being referenced", function() + local ok, err, err_t = db.ca_certificates:delete({ id = other_ca.id }) + assert.is_nil(err) + assert.is_nil(err_t) + assert(ok) + end) + + it("can't delete ca certificate that is referenced by services", function() + local ok, err = db.ca_certificates:delete({ id = ca1.id }) + assert.matches(fmt("ca certificate %s is still referenced by services (id = %s)", ca1.id, service.id), + err, nil, true) + assert.is_nil(ok) + end) + + it("can't delete ca certificate that is referenced by plugins", function() + local ok, err = db.ca_certificates:delete({ id = ca2.id }) + assert.matches(fmt("ca certificate %s is still referenced by plugins (id = %s)", ca2.id, plugin.id), + err, nil, true) + assert.is_nil(ok) + end) + end) + end) +end diff --git a/spec/02-integration/04-admin_api/16-ca_certificates_routes_spec.lua b/spec/02-integration/04-admin_api/16-ca_certificates_routes_spec.lua index 10d81b88a3b3..fc837000895b 100644 --- a/spec/02-integration/04-admin_api/16-ca_certificates_routes_spec.lua +++ b/spec/02-integration/04-admin_api/16-ca_certificates_routes_spec.lua @@ -42,6 +42,7 @@ for _, strategy in helpers.each_strategy() do lazy_setup(function() bp, db = helpers.get_db_utils(strategy, { "ca_certificates", + "services", }) assert(helpers.start_kong { @@ -148,6 +149,32 @@ for _, strategy in helpers.each_strategy() do ca = assert(bp.ca_certificates:insert()) end) + it("not allowed to delete if it is referenced by other entities", function() + -- add a service that references the ca + local res = client:post("/services/", { + body = { + url = "https://" .. helpers.mock_upstream_host .. ":" .. helpers.mock_upstream_port, + protocol = "https", + ca_certificates = { ca.id }, + }, + headers = { ["Content-Type"] = "application/json" }, + }) + local body = assert.res_status(201, res) + local service = cjson.decode(body) + + helpers.wait_for_all_config_update() + + local res = client:delete("/ca_certificates/" .. ca.id) + + local body = assert.res_status(400, res) + local json = cjson.decode(body) + + assert.equal("ca certificate " .. ca.id .. " is still referenced by services (id = " .. service.id .. ")", json.message) + + local res = client:delete("/services/" .. service.id) + assert.res_status(204, res) + end) + it("works", function() local res = client:delete("/ca_certificates/" .. ca.id) assert.res_status(204, res) diff --git a/spec/02-integration/05-proxy/18-upstream_tls_spec.lua b/spec/02-integration/05-proxy/18-upstream_tls_spec.lua index ec1723d9a71e..df51053ffb0f 100644 --- a/spec/02-integration/05-proxy/18-upstream_tls_spec.lua +++ b/spec/02-integration/05-proxy/18-upstream_tls_spec.lua @@ -3,6 +3,37 @@ local ssl_fixtures = require "spec.fixtures.ssl" local atc_compat = require "kong.router.compat" +local other_ca_cert = [[ +-----BEGIN CERTIFICATE----- +MIIEvjCCAqagAwIBAgIJALabx/Nup200MA0GCSqGSIb3DQEBCwUAMBMxETAPBgNV +BAMMCFlvbG80Mi4xMCAXDTE5MDkxNTE2Mjc1M1oYDzIxMTkwODIyMTYyNzUzWjAT +MREwDwYDVQQDDAhZb2xvNDIuMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC +ggIBANIW67Ay0AtTeBY2mORaGet/VPL5jnBRz0zkZ4Jt7fEq3lbxYaJBnFI8wtz3 +bHLtLsxkvOFujEMY7HVd+iTqbJ7hLBtK0AdgXDjf+HMmoWM7x0PkZO+3XSqyRBbI +YNoEaQvYBNIXrKKJbXIU6higQaXYszeN8r3+RIbcTIlZxy28msivEGfGTrNujQFc +r/eyf+TLHbRqh0yg4Dy/U/T6fqamGhFrjupRmOMugwF/BHMH2JHhBYkkzuZLgV2u +7Yh1S5FRlh11am5vWuRSbarnx72hkJ99rUb6szOWnJKKew8RSn3CyhXbS5cb0QRc +ugRc33p/fMucJ4mtCJ2Om1QQe83G1iV2IBn6XJuCvYlyWH8XU0gkRxWD7ZQsl0bB +8AFTkVsdzb94OM8Y6tWI5ybS8rwl8b3r3fjyToIWrwK4WDJQuIUx4nUHObDyw+KK ++MmqwpAXQWbNeuAc27FjuJm90yr/163aGuInNY5Wiz6CM8WhFNAi/nkEY2vcxKKx +irSdSTkbnrmLFAYrThaq0BWTbW2mwkOatzv4R2kZzBUOiSjRLPnbyiPhI8dHLeGs +wMxiTXwyPi8iQvaIGyN4DPaSEiZ1GbexyYFdP7sJJD8tG8iccbtJYquq3cDaPTf+ +qv5M6R/JuMqtUDheLSpBNK+8vIe5e3MtGFyrKqFXdynJtfHVAgMBAAGjEzARMA8G +A1UdEwQIMAYBAf8CAQAwDQYJKoZIhvcNAQELBQADggIBAK0BmL5B1fPSMbFy8Hbc +/ESEunt4HGaRWmZZSa/aOtTjhKyDXLLJZz3C4McugfOf9BvvmAOZU4uYjfHTnNH2 +Z3neBkdTpQuJDvrBPNoCtJns01X/nuqFaTK/Tt9ZjAcVeQmp51RwhyiD7nqOJ/7E +Hp2rC6gH2ABXeexws4BDoZPoJktS8fzGWdFBCHzf4mCJcb4XkI+7GTYpglR818L3 +dMNJwXeuUsmxxKScBVH6rgbgcEC/6YwepLMTHB9VcH3X5VCfkDIyPYLWmvE0gKV7 +6OU91E2Rs8PzbJ3EuyQpJLxFUQp8ohv5zaNBlnMb76UJOPR6hXfst5V+e7l5Dgwv +Dh4CeO46exmkEsB+6R3pQR8uOFtubH2snA0S3JA1ji6baP5Y9Wh9bJ5McQUgbAPE +sCRBFoDLXOj3EgzibohC5WrxN3KIMxlQnxPl3VdQvp4gF899mn0Z9V5dAsGPbxRd +quE+DwfXkm0Sa6Ylwqrzu2OvSVgbMliF3UnWbNsDD5KcHGIaFxVC1qkwK4cT3pyS +58i/HAB2+P+O+MltQUDiuw0OSUFDC0IIjkDfxLVffbF+27ef9C5NG81QlwTz7TuN +zeigcsBKooMJTszxCl6dtxSyWTj7hJWXhy9pXsm1C1QulG6uT4RwCa3m0QZoO7G+ +6Wu6lP/kodPuoNubstIuPdi2 +-----END CERTIFICATE----- +]] + local fixtures = { http_mock = { upstream_mtls = [[ @@ -952,6 +983,129 @@ for _, strategy in helpers.each_strategy() do assert.equals("it works", body) end end) + + it("#db request is not allowed through once the CA certificate is updated to other ca", function() + local res = assert(admin_client:patch("/ca_certificates/" .. ca_certificate.id, { + body = { + cert = other_ca_cert, + }, + headers = { ["Content-Type"] = "application/json" }, + })) + + assert.res_status(200, res) + + wait_for_all_config_update(subsystems) + + local body + helpers.wait_until(function() + local proxy_client = get_proxy_client(subsystems, 19001) + local path + if subsystems == "http" then + path = "/tls" + else + path = "/" + end + local res, err = proxy_client:send { + path = path, + headers = { + ["Host"] = "example.com", + } + } + + if subsystems == "http" then + return pcall(function() + body = assert.res_status(502, res) + assert(proxy_client:close()) + end) + else + return pcall(function() + assert.equals("connection reset by peer", err) + assert(proxy_client:close()) + end) + end + end, 10) + + if subsystems == "http" then + assert.matches("An invalid response was received from the upstream server", body) + end + + -- buffered_proxying + if subsystems == "http" then + helpers.wait_until(function() + local proxy_client = get_proxy_client(subsystems, 19001) + local path = "/tls-buffered-proxying" + local res = proxy_client:send { + path = path, + headers = { + ["Host"] = "example.com", + } + } + + return pcall(function() + body = assert.res_status(502, res) + assert(proxy_client:close()) + end) + end, 10) + assert.matches("An invalid response was received from the upstream server", body) + end + end) + + it("#db request is allowed through once the CA certificate is updated back to the correct ca", function() + local res = assert(admin_client:patch("/ca_certificates/" .. ca_certificate.id, { + body = { + cert = ssl_fixtures.cert_ca, + }, + headers = { ["Content-Type"] = "application/json" }, + })) + + assert.res_status(200, res) + + wait_for_all_config_update(subsystems) + + local body + helpers.wait_until(function() + local proxy_client = get_proxy_client(subsystems, 19001) + local path + if subsystems == "http" then + path = "/tls" + else + path = "/" + end + local res = proxy_client:send { + path = path, + headers = { + ["Host"] = "example.com", + } + } + + return pcall(function() + body = assert.res_status(200, res) + assert(proxy_client:close()) + end) + end, 10) + + assert.equals("it works", body) + + -- buffered_proxying + if subsystems == "http" then + helpers.wait_until(function() + local proxy_client = get_proxy_client(subsystems, 19001) + local path = "/tls-buffered-proxying" + local res = proxy_client:send { + path = path, + headers = { + ["Host"] = "example.com", + } + } + + return pcall(function() + body = assert.res_status(200, res) + assert(proxy_client:close()) + end) + end, 10) + assert.equals("it works", body) + end + end) end) describe("#db tls_verify_depth", function() @@ -1004,19 +1158,17 @@ for _, strategy in helpers.each_strategy() do } } - return pcall(function() - if subsystems == "http" then - return pcall(function() - body = assert.res_status(502, res) - assert(proxy_client:close()) - end) - else - return pcall(function() - assert.equals("connection reset by peer", err) - assert(proxy_client:close()) - end) - end - end) + if subsystems == "http" then + return pcall(function() + body = assert.res_status(502, res) + assert(proxy_client:close()) + end) + else + return pcall(function() + assert.equals("connection reset by peer", err) + assert(proxy_client:close()) + end) + end end, 10) if subsystems == "http" then assert.matches("An invalid response was received from the upstream server", body) diff --git a/spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/handler.lua b/spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/handler.lua new file mode 100644 index 000000000000..dfff3ebcbd08 --- /dev/null +++ b/spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/handler.lua @@ -0,0 +1,6 @@ +local ReferenceCaCertHandler = { + VERSION = "1.0.0", + PRIORITY = 1, +} + +return ReferenceCaCertHandler diff --git a/spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/schema.lua b/spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/schema.lua new file mode 100644 index 000000000000..8e388fe650a8 --- /dev/null +++ b/spec/fixtures/custom_plugins/kong/plugins/reference-ca-cert/schema.lua @@ -0,0 +1,15 @@ +return { + name = "reference-ca-cert", + fields = { + { + config = { + type = "record", + fields = { + { pre_key = { type = "string", }, }, + { ca_certificates = { type = "array", required = true, elements = { type = "string", uuid = true, }, }, }, + { post_key = { type = "string", }, }, + }, + }, + }, + }, +} From ef13d3949762c13afc5e9e68a625c5d23719a907 Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 23 Nov 2023 15:18:05 +0800 Subject: [PATCH 145/249] refactor(plugins/datadog): use tools.string.replace_dashes (#12081) tools.string.replace_dashes has better performance. --- kong/plugins/datadog/handler.lua | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/kong/plugins/datadog/handler.lua b/kong/plugins/datadog/handler.lua index b0d387f52236..3158a9e45e88 100644 --- a/kong/plugins/datadog/handler.lua +++ b/kong/plugins/datadog/handler.lua @@ -3,6 +3,9 @@ local statsd_logger = require "kong.plugins.datadog.statsd_logger" local kong_meta = require "kong.meta" +local replace_dashes = require("kong.tools.string").replace_dashes + + local kong = kong local ngx = ngx local null = ngx.null @@ -14,7 +17,7 @@ local ipairs = ipairs local get_consumer_id = { consumer_id = function(consumer) - return consumer and gsub(consumer.id, "-", "_") + return consumer and replace_dashes(consumer.id) end, custom_id = function(consumer) return consumer and consumer.custom_id From f75482f522ea83080737309df4a1746864797413 Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 23 Nov 2023 15:18:53 +0800 Subject: [PATCH 146/249] perf(plugins/jwt): use string.buffer to replace table.concat (#12075) As other PRs did, string.buffer can replace table.concat to get more performance. Reference: https://github.com/Kong/kong/pull/11304#issuecomment-1671212708 --- kong/plugins/jwt/jwt_parser.lua | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/kong/plugins/jwt/jwt_parser.lua b/kong/plugins/jwt/jwt_parser.lua index e22b6b11f621..5bad71635915 100644 --- a/kong/plugins/jwt/jwt_parser.lua +++ b/kong/plugins/jwt/jwt_parser.lua @@ -7,6 +7,7 @@ local json = require "cjson" local b64 = require "ngx.base64" +local buffer = require "string.buffer" local openssl_digest = require "resty.openssl.digest" local openssl_hmac = require "resty.openssl.hmac" local openssl_pkey = require "resty.openssl.pkey" @@ -20,7 +21,6 @@ local time = ngx.time local pairs = pairs local error = error local pcall = pcall -local concat = table.concat local insert = table.insert local unpack = unpack local assert = assert @@ -237,17 +237,17 @@ local function encode_token(data, key, alg, header) end local header = header or { typ = "JWT", alg = alg } - local segments = { - base64_encode(json.encode(header)), - base64_encode(json.encode(data)) - } + local buf = buffer.new() + + buf:put(base64_encode(json.encode(header))):put(".") + :put(base64_encode(json.encode(data))) - local signing_input = concat(segments, ".") - local signature = alg_sign[alg](signing_input, key) + local signature = alg_sign[alg](buf:tostring(), key) - segments[#segments+1] = base64_encode(signature) + buf:put(".") + :put(base64_encode(signature)) - return concat(segments, ".") + return buf:get() end From 6191cda8d3c11a3a6ca90c8918ee78bf9de81c8a Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 23 Nov 2023 15:19:39 +0800 Subject: [PATCH 147/249] refactor(plugins/oauth2): use build-in functions to replace sha256 (#12067) Use build-in functions oftools.sha256 to simplify code. KAG-3156 --- kong/plugins/oauth2/access.lua | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/kong/plugins/oauth2/access.lua b/kong/plugins/oauth2/access.lua index 2acdc741ad10..263317509e90 100644 --- a/kong/plugins/oauth2/access.lua +++ b/kong/plugins/oauth2/access.lua @@ -1,11 +1,13 @@ local url = require "socket.url" local utils = require "kong.tools.utils" local constants = require "kong.constants" -local sha256 = require "resty.sha256" local timestamp = require "kong.tools.timestamp" local secret = require "kong.plugins.oauth2.secret" +local sha256_base64url = require "kong.tools.sha256".sha256_base64url + + local kong = kong local type = type local next = next @@ -485,11 +487,7 @@ local function validate_pkce_verifier(parameters, auth_code) } end - local s256 = sha256:new() - s256:update(verifier) - local digest = s256:final() - - local challenge = base64url_encode(digest) + local challenge = sha256_base64url(verifier) if not challenge or not auth_code.challenge From c976cbee745db655e6c35155cbdb4dd7a100a00d Mon Sep 17 00:00:00 2001 From: Robin Xiang Date: Thu, 23 Nov 2023 15:29:18 +0800 Subject: [PATCH 148/249] fix(datadog): fix a bug that datalog plugin is not triggered for serviceless routes (#12068) fix a bug that datadog plugin is not triggered for serviceless routes. In this PR, the datadog plugin can be triggered even if the serviceless routes are hit, and the value of tag name for the metric data is set to an empty string which is still a valid tag for datadog. FTI-5576 --- ...ss-routes-still-trigger-datalog-plugin.yml | 3 + kong/plugins/datadog/handler.lua | 15 +-- spec/03-plugins/08-datadog/01-log_spec.lua | 95 +++++++++++++++---- 3 files changed, 83 insertions(+), 30 deletions(-) create mode 100644 changelog/unreleased/kong/serviceless-routes-still-trigger-datalog-plugin.yml diff --git a/changelog/unreleased/kong/serviceless-routes-still-trigger-datalog-plugin.yml b/changelog/unreleased/kong/serviceless-routes-still-trigger-datalog-plugin.yml new file mode 100644 index 000000000000..71df7dd33bcc --- /dev/null +++ b/changelog/unreleased/kong/serviceless-routes-still-trigger-datalog-plugin.yml @@ -0,0 +1,3 @@ +message: "**Datadog**: Fix a bug that datadog plugin is not triggered for serviceless routes. In this fix, datadog plugin is always triggered, and the value of tag `name`(service_name) is set as an empty value." +type: bugfix +scope: Plugin \ No newline at end of file diff --git a/kong/plugins/datadog/handler.lua b/kong/plugins/datadog/handler.lua index 3158a9e45e88..4b68e8487a47 100644 --- a/kong/plugins/datadog/handler.lua +++ b/kong/plugins/datadog/handler.lua @@ -56,10 +56,6 @@ local function send_entries_to_datadog(conf, messages) end for _, message in ipairs(messages) do - local name = gsub(message.service.name ~= null and - message.service.name or message.service.host, - "%.", "_") - local stat_name = { request_size = "request.size", response_size = "response.size", @@ -87,8 +83,10 @@ local function send_entries_to_datadog(conf, messages) local get_consumer_id = get_consumer_id[metric_config.consumer_identifier] local consumer_id = get_consumer_id and get_consumer_id(message.consumer) or nil local tags = compose_tags( - name, message.response and message.response.status or "-", - consumer_id, metric_config.tags, conf) + message.service and gsub(message.service.name ~= null and + message.service.name or message.service.host, "%.", "_") or "", + message.response and message.response.status or "-", + consumer_id, metric_config.tags, conf) logger:send_statsd(stat_name, stat_value, logger.stat_types[metric_config.stat_type], @@ -107,12 +105,7 @@ local DatadogHandler = { VERSION = kong_meta.version, } - function DatadogHandler:log(conf) - if not ngx.ctx.service then - return - end - local ok, err = Queue.enqueue( Queue.get_plugin_params("datadog", conf), send_entries_to_datadog, diff --git a/spec/03-plugins/08-datadog/01-log_spec.lua b/spec/03-plugins/08-datadog/01-log_spec.lua index 8ec13a9a7c83..90b9e2f9f266 100644 --- a/spec/03-plugins/08-datadog/01-log_spec.lua +++ b/spec/03-plugins/08-datadog/01-log_spec.lua @@ -1,8 +1,10 @@ local helpers = require "spec.helpers" local cjson = require "cjson" +local stringx = require "pl.stringx" describe("Plugin: datadog (log)", function() + local DEFAULT_METRICS_COUNT = 6 lazy_setup(function() helpers.setenv('KONG_DATADOG_AGENT_HOST', 'localhost') @@ -93,6 +95,11 @@ describe("Plugin: datadog (log)", function() } } + local route9 = bp.routes:insert { + paths = { "/serviceless" }, + no_service = true, + } + bp.plugins:insert { name = "key-auth", route = { id = route1.id }, @@ -237,6 +244,25 @@ describe("Plugin: datadog (log)", function() }, } + bp.plugins:insert { + name = "datadog", + route = { id = route9.id }, + config = { + host = "127.0.0.1", + port = 9999, + queue_size = 2, + }, + } + + bp.plugins:insert { + name = "request-termination", + route = { id = route9.id }, + config = { + status_code = 200, + message = "OK", + } + } + assert(helpers.start_kong({ database = strategy, nginx_conf = "spec/fixtures/custom_nginx.template", @@ -245,17 +271,23 @@ describe("Plugin: datadog (log)", function() proxy_client = helpers.proxy_client() end) + lazy_teardown(function() + helpers.stop_kong() + end) + + before_each(function() + proxy_client = helpers.proxy_client() + end) + + after_each(function() if proxy_client then proxy_client:close() end - - - helpers.stop_kong() end) it("logs metrics over UDP", function() - local thread = helpers.udp_server(9999, 6) + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT) local res = assert(proxy_client:send { method = "GET", @@ -268,7 +300,7 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) assert.contains("kong.request.count:1|c|#name:dd1,status:200,consumer:bar,app:kong" , gauges) assert.contains("kong.latency:%d+|ms|#name:dd1,status:200,consumer:bar,app:kong", gauges, true) assert.contains("kong.request.size:%d+|ms|#name:dd1,status:200,consumer:bar,app:kong", gauges, true) @@ -278,7 +310,7 @@ describe("Plugin: datadog (log)", function() end) it("logs metrics over UDP #grpc", function() - local thread = helpers.udp_server(9999, 6) + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT) local grpc_cleint = assert(helpers.proxy_client_grpc()) @@ -293,7 +325,7 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) assert.contains("kong.request.count:1|c|#name:grpc,status:200,consumer:bar,app:kong" , gauges) assert.contains("kong.latency:%d+|ms|#name:grpc,status:200,consumer:bar,app:kong", gauges, true) assert.contains("kong.request.size:%d+|ms|#name:grpc,status:200,consumer:bar,app:kong", gauges, true) @@ -303,7 +335,7 @@ describe("Plugin: datadog (log)", function() end) it("logs metrics over UDP with custom prefix", function() - local thread = helpers.udp_server(9999, 6) + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT) local res = assert(proxy_client:send { method = "GET", @@ -316,7 +348,7 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) assert.contains("prefix.request.count:1|c|#name:dd4,status:200,consumer:bar,app:kong",gauges) assert.contains("prefix.latency:%d+|ms|#name:dd4,status:200,consumer:bar,app:kong", gauges, true) assert.contains("prefix.request.size:%d+|ms|#name:dd4,status:200,consumer:bar,app:kong", gauges, true) @@ -326,7 +358,7 @@ describe("Plugin: datadog (log)", function() end) it("logs metrics over UDP with custom tag names", function() - local thread = helpers.udp_server(9999, 6) + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT) local res = assert(proxy_client:send { method = "GET", @@ -339,7 +371,7 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) assert.contains("kong.request.count:1|c|#upstream:dd6,http_status:200,user:bar,app:kong",gauges) assert.contains("kong.latency:%d+|ms|#upstream:dd6,http_status:200,user:bar,app:kong", gauges, true) assert.contains("kong.request.size:%d+|ms|#upstream:dd6,http_status:200,user:bar,app:kong", gauges, true) @@ -387,7 +419,7 @@ describe("Plugin: datadog (log)", function() end) it("logs metrics to host/port defined via environment variables", function() - local thread = helpers.udp_server(9999, 6) + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT) local res = assert(proxy_client:send { method = "GET", @@ -400,7 +432,7 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) assert.contains("kong.request.count:1|c|#name:dd5,status:200,consumer:bar,app:kong" , gauges) assert.contains("kong.latency:%d+|ms|#name:dd5,status:200,consumer:bar,app:kong", gauges, true) assert.contains("kong.request.size:%d+|ms|#name:dd5,status:200,consumer:bar,app:kong", gauges, true) @@ -410,7 +442,7 @@ describe("Plugin: datadog (log)", function() end) it("logs metrics in several batches", function() - local thread = helpers.udp_server(9999, 6) + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT) local res = assert(proxy_client:send { method = "GET", @@ -423,7 +455,7 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) assert.contains("kong.request.count:1|c|#name:dd7,status:200,consumer:bar,app:kong" , gauges) assert.contains("kong.latency:%d+|ms|#name:dd7,status:200,consumer:bar,app:kong", gauges, true) assert.contains("kong.request.size:%d+|ms|#name:dd7,status:200,consumer:bar,app:kong", gauges, true) @@ -448,7 +480,7 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) end) it("should not return a runtime error (regression)", function() @@ -476,9 +508,9 @@ describe("Plugin: datadog (log)", function() thread:join() end) - + it("referenceable fields works", function() - local thread = helpers.udp_server(9999, 6, 6) + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT, 6) local another_proxy_client = helpers.proxy_client() local res = assert(another_proxy_client:send { @@ -493,7 +525,32 @@ describe("Plugin: datadog (log)", function() local ok, gauges = thread:join() assert.True(ok) - assert.equal(6, #gauges) + assert.equal(DEFAULT_METRICS_COUNT, #gauges) + end) + + it("datadog plugin is triggered for serviceless routes", function() + local thread = helpers.udp_server(9999, DEFAULT_METRICS_COUNT) + local res = assert(proxy_client:send { + method = "GET", + path = "/serviceless", + }) + + local body = assert.res_status(200, res) + assert.equals(body, '{"message":"OK"}') + + local ok, gauges = thread:join() + assert.True(ok) + assert.equals(DEFAULT_METRICS_COUNT, #gauges) + + for _, g in ipairs(gauges) do + -- tags start with `#` + local tmp = stringx.split(g, '#') + local tag_idx = #tmp + assert(tag_idx == 2, "Error: missing tags") + local tags = tmp[tag_idx] + assert(tags, "Error: missing tags") + assert(string.match(tags, "name:,"), "Error: the value of `name` must be an empty string for serviceless routes") + end end) end) end From cc6f139f5428c7e47786f7be283d53a4c6394b8a Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Thu, 23 Nov 2023 16:30:53 +0800 Subject: [PATCH 149/249] hotfix(cd): skip comment on commit step (#12090) The token seems to be changed/expired and no longer working. Allow the step to fail to unblock the workflow. --- .github/workflows/release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 39507c76f691..198f34c6ad07 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -396,6 +396,7 @@ jobs: - name: Comment on commit if: github.event_name == 'push' && matrix.label == 'ubuntu' uses: peter-evans/commit-comment@5a6f8285b8f2e8376e41fe1b563db48e6cf78c09 # v3.0.0 + continue-on-error: true # TODO: temporary fix until the token is back with: token: ${{ secrets.GHA_COMMENT_TOKEN }} body: | From aa7074f620b7c56b8037d24c391ef97f9ecde7d7 Mon Sep 17 00:00:00 2001 From: samugi Date: Tue, 21 Nov 2023 18:08:11 +0100 Subject: [PATCH 150/249] perf(tracing): do not create spans in timer phase Before this change timers would generate spans, which means DB and DNS spans in recurring timers would be continuously generated and garbage-collected. This commit checks the exact ngx phase and runs it against a whitelist to ensure `timer` phase does not generate spans. --- .../unreleased/kong/perf-tracing-from-timers.yml | 3 +++ kong/pdk/tracing.lua | 16 ++++++++++++---- spec/01-unit/26-tracing/01-tracer_pdk_spec.lua | 10 ++++++++-- .../03-plugins/37-opentelemetry/01-otlp_spec.lua | 6 ++++++ 4 files changed, 29 insertions(+), 6 deletions(-) create mode 100644 changelog/unreleased/kong/perf-tracing-from-timers.yml diff --git a/changelog/unreleased/kong/perf-tracing-from-timers.yml b/changelog/unreleased/kong/perf-tracing-from-timers.yml new file mode 100644 index 000000000000..bc081ed674b8 --- /dev/null +++ b/changelog/unreleased/kong/perf-tracing-from-timers.yml @@ -0,0 +1,3 @@ +message: "Performance optimization to avoid unnecessary creations and garbage-collections of spans" +type: "performance" +scope: "PDK" diff --git a/kong/pdk/tracing.lua b/kong/pdk/tracing.lua index ef9d81e0db94..6337e1fddc03 100644 --- a/kong/pdk/tracing.lua +++ b/kong/pdk/tracing.lua @@ -9,7 +9,6 @@ local require = require local ffi = require "ffi" local tablepool = require "tablepool" local new_tab = require "table.new" -local base = require "resty.core.base" local utils = require "kong.tools.utils" local phase_checker = require "kong.pdk.private.phases" @@ -421,6 +420,15 @@ noop_tracer.set_active_span = NOOP noop_tracer.process_span = NOOP noop_tracer.set_should_sample = NOOP +local VALID_TRACING_PHASES = { + rewrite = true, + access = true, + header_filter = true, + body_filter = true, + log = true, + content = true, +} + --- New Tracer local function new_tracer(name, options) name = name or "default" @@ -450,7 +458,7 @@ local function new_tracer(name, options) -- @phases rewrite, access, header_filter, response, body_filter, log, admin_api -- @treturn table span function self.active_span() - if not base.get_request() then + if not VALID_TRACING_PHASES[ngx.get_phase()] then return end @@ -463,7 +471,7 @@ local function new_tracer(name, options) -- @phases rewrite, access, header_filter, response, body_filter, log, admin_api -- @tparam table span function self.set_active_span(span) - if not base.get_request() then + if not VALID_TRACING_PHASES[ngx.get_phase()] then return end @@ -482,7 +490,7 @@ local function new_tracer(name, options) -- @tparam table options TODO(mayo) -- @treturn table span function self.start_span(...) - if not base.get_request() then + if not VALID_TRACING_PHASES[ngx.get_phase()] then return noop_span end diff --git a/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua b/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua index 285c980adf8e..2cd05a72a0f0 100644 --- a/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua +++ b/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua @@ -49,7 +49,7 @@ end local unhook_log_spy = debug.sethook describe("Tracer PDK", function() - local ok, err, _ + local ok, err, old_ngx_get_phase, _ local log_spy lazy_setup(function() @@ -57,9 +57,15 @@ describe("Tracer PDK", function() _G.kong = kong_global.new() kong_global.init_pdk(kong) log_spy = hook_log_spy() + old_ngx_get_phase = ngx.get_phase + -- trick the pdk into thinking we are not in the timer context + _G.ngx.get_phase = function() return "access" end -- luacheck: ignore end) - lazy_teardown(unhook_log_spy) + lazy_teardown(function() + unhook_log_spy() + _G.ngx.get_phase = old_ngx_get_phase -- luacheck: ignore + end) describe("initialize tracer", function() diff --git a/spec/03-plugins/37-opentelemetry/01-otlp_spec.lua b/spec/03-plugins/37-opentelemetry/01-otlp_spec.lua index eead16142b2e..754743ffe60e 100644 --- a/spec/03-plugins/37-opentelemetry/01-otlp_spec.lua +++ b/spec/03-plugins/37-opentelemetry/01-otlp_spec.lua @@ -44,16 +44,22 @@ local pb_decode_span = function(data) end describe("Plugin: opentelemetry (otlp)", function() + local old_ngx_get_phase + lazy_setup(function () -- overwrite for testing pb.option("enum_as_value") pb.option("auto_default_values") + old_ngx_get_phase = ngx.get_phase + -- trick the pdk into thinking we are not in the timer context + _G.ngx.get_phase = function() return "access" end -- luacheck: ignore end) lazy_teardown(function() -- revert it back pb.option("enum_as_name") pb.option("no_default_values") + _G.ngx.get_phase = old_ngx_get_phase -- luacheck: ignore end) after_each(function () From 0ddde040539064e78a623956a41d0aae2ad64bb7 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Thu, 23 Nov 2023 17:52:33 +0800 Subject: [PATCH 151/249] chore(deps): bump lua-resty-openssl to 1.0.2 (#12088) --- changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml | 3 --- changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) delete mode 100644 changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml create mode 100644 changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml diff --git a/changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml b/changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml deleted file mode 100644 index d90a6effd810..000000000000 --- a/changelog/unreleased/kong/bump-resty-openssl-1.0.1.yml +++ /dev/null @@ -1,3 +0,0 @@ -message: Bump resty-openssl from 0.8.25 to 1.0.1 -type: dependency -scope: Core diff --git a/changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml b/changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml new file mode 100644 index 000000000000..05ba386d7076 --- /dev/null +++ b/changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml @@ -0,0 +1,3 @@ +message: Bump resty-openssl from 0.8.25 to 1.0.2 +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 1617e7ff99e5..06a3ec366454 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -34,7 +34,7 @@ dependencies = { "lua-resty-healthcheck == 3.0.0", "lua-messagepack == 0.5.4", "lua-resty-aws == 1.3.5", - "lua-resty-openssl == 1.0.1", + "lua-resty-openssl == 1.0.2", "lua-resty-counter == 0.2.1", "lua-resty-ipmatcher == 0.6.1", "lua-resty-acme == 0.12.0", From fc259b4ded41ea304f2489ecfbbd3bdc3a7803b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Nov 2023 08:23:15 +0000 Subject: [PATCH 152/249] chore(deps): bump actions/github-script from 6 to 7 Bumps [actions/github-script](https://github.com/actions/github-script) from 6 to 7. - [Release notes](https://github.com/actions/github-script/releases) - [Commits](https://github.com/actions/github-script/compare/v6...v7) --- updated-dependencies: - dependency-name: actions/github-script dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/backport-fail-bot.yml | 2 +- .github/workflows/release-and-tests-fail-bot.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/backport-fail-bot.yml b/.github/workflows/backport-fail-bot.yml index a11015622cb6..f8393da03522 100644 --- a/.github/workflows/backport-fail-bot.yml +++ b/.github/workflows/backport-fail-bot.yml @@ -11,7 +11,7 @@ jobs: steps: - name: Generate Slack Payload id: generate-payload - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: script: | const slack_mapping = JSON.parse(process.env.SLACK_MAPPING); diff --git a/.github/workflows/release-and-tests-fail-bot.yml b/.github/workflows/release-and-tests-fail-bot.yml index 0f504f7cbab9..d651bef52903 100644 --- a/.github/workflows/release-and-tests-fail-bot.yml +++ b/.github/workflows/release-and-tests-fail-bot.yml @@ -20,7 +20,7 @@ jobs: env: SLACK_CHANNEL: gateway-notifications SLACK_MAPPING: "${{ vars.GH_ID_2_SLACK_ID_MAPPING }}" - uses: actions/github-script@v6 + uses: actions/github-script@v7 with: script: | const slack_mapping = JSON.parse(process.env.SLACK_MAPPING); From 286867da94cbc6b81010b106c37487ac589820a1 Mon Sep 17 00:00:00 2001 From: windmgc Date: Thu, 23 Nov 2023 16:20:12 +0800 Subject: [PATCH 153/249] Revert "chore: add write permission for backport action" This reverts commit c468b77efae40c044031760120889af37fe8cb0d. --- .github/workflows/backport.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 901580fe073b..2d2d2c1d8f11 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -5,7 +5,6 @@ on: permissions: contents: write # so it can comment pull-requests: write # so it can create pull requests - actions: write jobs: backport: name: Backport From ecee51fe7b51565f5ceb5f50fdc3df90809d22ef Mon Sep 17 00:00:00 2001 From: windmgc Date: Thu, 23 Nov 2023 16:20:21 +0800 Subject: [PATCH 154/249] Revert "chore: trigger backport on label addition" This reverts commit 7e4c654aef13ef4137b6d33260ab7f50461e497b. --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 2d2d2c1d8f11..290eb67c8912 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,7 +1,7 @@ name: Backport on: pull_request_target: - types: [closed, labeled] # runs when the pull request is closed/merged or labeled (to trigger a backport in hindsight) + types: [closed] permissions: contents: write # so it can comment pull-requests: write # so it can create pull requests From 4c70cfd3544d8639516c6e07495bea5ffe775f6d Mon Sep 17 00:00:00 2001 From: windmgc Date: Thu, 23 Nov 2023 16:22:59 +0800 Subject: [PATCH 155/249] Revert "chore(deps): bump korthout/backport-action from 2.1.0 to 2.1.1" This reverts commit 9ffc223671e92149e75a7980fcbec8bd030356c8. --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 290eb67c8912..c2cc8d2a5100 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -13,7 +13,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Create backport pull requests - uses: korthout/backport-action@08bafb375e6e9a9a2b53a744b987e5d81a133191 # v2.1.1 + uses: korthout/backport-action@cb79e4e5f46c7d7d653dd3d5fa8a9b0a945dfe4b # v2.1.0 with: github_token: ${{ secrets.PAT }} pull_title: '[backport -> ${target_branch}] ${pull_title}' From 6077171c2ca697322ed562335a6aff10a390ac52 Mon Sep 17 00:00:00 2001 From: windmgc Date: Thu, 23 Nov 2023 16:23:09 +0800 Subject: [PATCH 156/249] Revert "chore(ci): improve backporting process (#11924)" This reverts commit 0c1c94ce0cc964cb01f951af98a62dd6ad5c667e. --- .github/workflows/backport.yml | 31 ++++++++++++++----------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index c2cc8d2a5100..7cc4b9c134a3 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,27 +1,24 @@ name: Backport on: pull_request_target: - types: [closed] -permissions: - contents: write # so it can comment - pull-requests: write # so it can create pull requests + types: + - closed + - labeled + jobs: backport: name: Backport runs-on: ubuntu-latest - if: github.event.pull_request.merged + if: > + github.event.pull_request.merged + && ( + github.event.action == 'closed' + || ( + github.event.action == 'labeled' + && contains(github.event.label.name, 'backport') + ) + ) steps: - - uses: actions/checkout@v4 - - name: Create backport pull requests - uses: korthout/backport-action@cb79e4e5f46c7d7d653dd3d5fa8a9b0a945dfe4b # v2.1.0 + - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 with: github_token: ${{ secrets.PAT }} - pull_title: '[backport -> ${target_branch}] ${pull_title}' - merge_commits: 'skip' - copy_labels_pattern: ^(?!backport ).* # copies all labels except those starting with "backport " - label_pattern: ^backport (release\/[^ ]+)$ # filters for labels starting with "backport " and extracts the branch name - pull_description: |- - Automated backport to `${target_branch}`, triggered by a label in #${pull_number}. - copy_assignees: true - copy_milestone: true - copy_requested_reviewers: true From c160360bf3c1aaad3e26217de95a4f120abc4fe1 Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Thu, 23 Nov 2023 15:36:15 +0100 Subject: [PATCH 157/249] fix(cherry-picks): prevent comment flood in case of errors Signed-off-by: Joshua Schmid --- .github/workflows/cherry-picks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cherry-picks.yml b/.github/workflows/cherry-picks.yml index 6383c1d5fd6a..82c1a0df4130 100644 --- a/.github/workflows/cherry-picks.yml +++ b/.github/workflows/cherry-picks.yml @@ -19,7 +19,7 @@ jobs: github.event_name == 'issue_comment' && github.event.issue.pull_request && contains(fromJSON('["MEMBER", "COLLABORATOR", "OWNER"]'), github.event.comment.author_association) && - contains(github.event.comment.body, '/cherry-pick') + startsWith(github.event.comment.body, '/cherry-pick') ) steps: - uses: actions/checkout@v4 From 796af06b3b9f747dd1e89f01ac6375f25b266030 Mon Sep 17 00:00:00 2001 From: tzssangglass Date: Fri, 24 Nov 2023 15:46:52 +0800 Subject: [PATCH 158/249] chore(*): revise the comment of the tls.validate_client_cert (#12070) --- kong/clustering/tls.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kong/clustering/tls.lua b/kong/clustering/tls.lua index cc528ff24d14..0f3098b055bd 100644 --- a/kong/clustering/tls.lua +++ b/kong/clustering/tls.lua @@ -189,8 +189,8 @@ end ---@param cp_cert kong.clustering.certinfo # clustering certinfo table ---@param dp_cert_pem string # data plane cert text --- ----@return boolean? success ----@return string? error +---@return table|nil x509 instance +---@return string? error function tls.validate_client_cert(kong_config, cp_cert, dp_cert_pem) if not dp_cert_pem then return nil, "data plane failed to present client certificate during handshake" From ea6a73c5d42bf3cbdc474d0e9a142929d8f823be Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 27 Nov 2023 13:58:09 +0800 Subject: [PATCH 159/249] docs(changelog): tune the message of atc-router version bump (#12035) --- changelog/unreleased/kong/bump-atc-router-1.3.1.yml | 3 +++ changelog/unreleased/kong/bump_atc_router.yml | 2 -- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/bump-atc-router-1.3.1.yml delete mode 100644 changelog/unreleased/kong/bump_atc_router.yml diff --git a/changelog/unreleased/kong/bump-atc-router-1.3.1.yml b/changelog/unreleased/kong/bump-atc-router-1.3.1.yml new file mode 100644 index 000000000000..b1cbe7fa8949 --- /dev/null +++ b/changelog/unreleased/kong/bump-atc-router-1.3.1.yml @@ -0,0 +1,3 @@ +message: Bumped atc-router from 1.2.0 to 1.3.1 +type: dependency +scope: Core diff --git a/changelog/unreleased/kong/bump_atc_router.yml b/changelog/unreleased/kong/bump_atc_router.yml deleted file mode 100644 index a0013d1e64db..000000000000 --- a/changelog/unreleased/kong/bump_atc_router.yml +++ /dev/null @@ -1,2 +0,0 @@ -message: Bump `atc-router` to `v1.3.1` -type: "dependency" From 53d50e740badb59caa67ee002edfddb8396fbc24 Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 27 Nov 2023 14:01:33 +0800 Subject: [PATCH 160/249] refactor(router): only load configured flavor module (#11997) KAG-3135 --- kong/router/init.lua | 33 ++++++++++++++++----------------- spec/01-unit/08-router_spec.lua | 4 +++- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/kong/router/init.lua b/kong/router/init.lua index ebd065c18bdb..abec995a5091 100644 --- a/kong/router/init.lua +++ b/kong/router/init.lua @@ -5,9 +5,6 @@ local _MT = { __index = _M, } local kong = kong -local traditional = require("kong.router.traditional") -local expressions = require("kong.router.expressions") -local compat = require("kong.router.compat") local utils = require("kong.router.utils") @@ -17,6 +14,13 @@ local phonehome_statistics = utils.phonehome_statistics _M.DEFAULT_MATCH_LRUCACHE_SIZE = utils.DEFAULT_MATCH_LRUCACHE_SIZE +local FLAVOR_TO_MODULE = { + traditional = "kong.router.traditional", + expressions = "kong.router.expressions", + traditional_compatible = "kong.router.compat", +} + + function _M:exec(ctx) return self.trad.exec(ctx) end @@ -36,33 +40,28 @@ end function _M.new(routes, cache, cache_neg, old_router) local flavor = kong and kong.configuration and - kong.configuration.router_flavor + kong.configuration.router_flavor or + "traditional" - phonehome_statistics(routes) + local router = require(FLAVOR_TO_MODULE[flavor]) - if not flavor or flavor == "traditional" then + phonehome_statistics(routes) - local trad, err = traditional.new(routes, cache, cache_neg) + if flavor == "traditional" then + local trad, err = router.new(routes, cache, cache_neg) if not trad then return nil, err end return setmetatable({ trad = trad, + _set_ngx = trad._set_ngx, -- for unit-testing only }, _MT) end - if flavor == "expressions" then - return expressions.new(routes, cache, cache_neg, old_router) - end - - -- flavor == "traditional_compatible" - return compat.new(routes, cache, cache_neg, old_router) + -- flavor == "expressions" or "traditional_compatible" + return router.new(routes, cache, cache_neg, old_router) end -_M._set_ngx = traditional._set_ngx -_M.split_port = traditional.split_port - - return _M diff --git a/spec/01-unit/08-router_spec.lua b/spec/01-unit/08-router_spec.lua index 4ab4539d48ff..fa7af30c1a33 100644 --- a/spec/01-unit/08-router_spec.lua +++ b/spec/01-unit/08-router_spec.lua @@ -92,6 +92,8 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" local it_trad_only = (flavor == "traditional") and it or pending describe("split_port()", function() + local split_port = require("kong.router.traditional").split_port + it("splits port number", function() for _, case in ipairs({ { { "" }, { "", "", false } }, @@ -120,7 +122,7 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" { { "[::1]:80b", 88 }, { "[::1]:80b", "[::1]:80b:88", false } }, { { "[::1]/96", 88 }, { "[::1]/96", "[::1]/96:88", false } }, }) do - assert.same(case[2], { Router.split_port(unpack(case[1])) }) + assert.same(case[2], { split_port(unpack(case[1])) }) end end) end) From c0147273942d7d482b70788855f16adf86a69313 Mon Sep 17 00:00:00 2001 From: Yi S Date: Mon, 27 Nov 2023 15:08:02 +0800 Subject: [PATCH 161/249] feat(admin-api): add gateway edition info to the endpoint `/` (#12097) This commit is the follow-up change to the PR https://github.com/Kong/kong/pull/12045, since the the edition info is still useful to the kong manager, we choose to introduce the gateway edition information in the response of the `/` endpoint. Fix FTI-5557 --- .../kong/add-gateway-edition-to-root-endpoint-admin-api.yml | 3 +++ kong/api/routes/kong.lua | 1 + spec/02-integration/04-admin_api/02-kong_routes_spec.lua | 3 ++- 3 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/add-gateway-edition-to-root-endpoint-admin-api.yml diff --git a/changelog/unreleased/kong/add-gateway-edition-to-root-endpoint-admin-api.yml b/changelog/unreleased/kong/add-gateway-edition-to-root-endpoint-admin-api.yml new file mode 100644 index 000000000000..a332be2ecced --- /dev/null +++ b/changelog/unreleased/kong/add-gateway-edition-to-root-endpoint-admin-api.yml @@ -0,0 +1,3 @@ +message: add gateway edition to the root endpoint of the admin api +type: feature +scope: Admin API diff --git a/kong/api/routes/kong.lua b/kong/api/routes/kong.lua index 212ddf64a826..16a2d4c7dcd6 100644 --- a/kong/api/routes/kong.lua +++ b/kong/api/routes/kong.lua @@ -130,6 +130,7 @@ return { return kong.response.exit(200, { tagline = tagline, version = version, + edition = meta._VERSION:match("enterprise") and "enterprise" or "community", hostname = knode.get_hostname(), node_id = node_id, timers = { diff --git a/spec/02-integration/04-admin_api/02-kong_routes_spec.lua b/spec/02-integration/04-admin_api/02-kong_routes_spec.lua index 66cc828503f6..06e5ae656958 100644 --- a/spec/02-integration/04-admin_api/02-kong_routes_spec.lua +++ b/spec/02-integration/04-admin_api/02-kong_routes_spec.lua @@ -71,7 +71,7 @@ describe("Admin API - Kong routes with strategy #" .. strategy, function() assert.not_nil(res.headers["X-Kong-Admin-Latency"]) end) - it("returns Kong's version number and tagline", function() + it("returns Kong's version number, edition info and tagline", function() local res = assert(client:send { method = "GET", path = "/" @@ -79,6 +79,7 @@ describe("Admin API - Kong routes with strategy #" .. strategy, function() local body = assert.res_status(200, res) local json = cjson.decode(body) assert.equal(meta._VERSION, json.version) + assert.equal(meta._VERSION:match("enterprise") and "enterprise" or "community", json.edition) assert.equal("Welcome to kong", json.tagline) end) it("returns a UUID as the node_id", function() From f920f1f26ffe44dd873621eca50a03a721d608d5 Mon Sep 17 00:00:00 2001 From: Yi S Date: Mon, 27 Nov 2023 15:09:12 +0800 Subject: [PATCH 162/249] feat(configuration): display a warning message when Kong Manager is enabled but the Admin API is not enabled (#12071) Feedback from issue Kong/kong#11995 highlighted potential user confusion due to the internal connection between Kong Manager and the Admin API. To address this, a warning message will now be displayed to notify users that the current configuration combination will not function as expected. This resolves KAG-3158 --- ...splay-warning-message-for-km-misconfig.yml | 3 ++ kong/conf_loader/init.lua | 6 +++ spec/01-unit/03-conf_loader_spec.lua | 49 +++++++++++++++++++ 3 files changed, 58 insertions(+) create mode 100644 changelog/unreleased/kong/display-warning-message-for-km-misconfig.yml diff --git a/changelog/unreleased/kong/display-warning-message-for-km-misconfig.yml b/changelog/unreleased/kong/display-warning-message-for-km-misconfig.yml new file mode 100644 index 000000000000..682716a5bc51 --- /dev/null +++ b/changelog/unreleased/kong/display-warning-message-for-km-misconfig.yml @@ -0,0 +1,3 @@ +message: display a warning message when Kong Manager is enabled but the Admin API is not enabled +type: feature +scope: Configuration diff --git a/kong/conf_loader/init.lua b/kong/conf_loader/init.lua index 92a9f05e9464..7d8fb7a3f8c9 100644 --- a/kong/conf_loader/init.lua +++ b/kong/conf_loader/init.lua @@ -1449,6 +1449,12 @@ local function check_and_parse(conf, opts) end end + if #conf.admin_listen < 1 or strip(conf.admin_listen[1]) == "off" then + if #conf.admin_gui_listen > 0 and strip(conf.admin_gui_listen[1]) ~= "off" then + log.warn("Kong Manager won't be functional because the Admin API is not listened on any interface") + end + end + return #errors == 0, errors[1], errors end diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index ad41d52ea8bd..9a79256e3fa3 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -1,5 +1,6 @@ local conf_loader = require "kong.conf_loader" local utils = require "kong.tools.utils" +local log = require "kong.cmd.utils.log" local helpers = require "spec.helpers" local tablex = require "pl.tablex" local pl_path = require "pl.path" @@ -1630,6 +1631,54 @@ describe("Configuration loader", function() local conf = assert(conf_loader(helpers.test_conf_path)) assert.equal(DATABASE, conf.database) end) + it("should warns user if kong manager is enabled but admin API is not enabled", function () + local spy_log = spy.on(log, "warn") + + finally(function() + log.warn:revert() + assert:unregister("matcher", "str_match") + end) + + assert:register("matcher", "str_match", function (_state, arguments) + local expected = arguments[1] + return function(value) + return string.match(value, expected) ~= nil + end + end) + + local conf, err = conf_loader(nil, { + admin_listen = "off", + admin_gui_listen = "off", + }) + assert.is_nil(err) + assert.is_table(conf) + assert.spy(spy_log).was_called(0) + + conf, err = conf_loader(nil, { + admin_listen = "localhost:8001", + admin_gui_listen = "off", + }) + assert.is_nil(err) + assert.is_table(conf) + assert.spy(spy_log).was_called(0) + + conf, err = conf_loader(nil, { + admin_listen = "localhost:8001", + admin_gui_listen = "localhost:8002", + }) + assert.is_nil(err) + assert.is_table(conf) + assert.spy(spy_log).was_called(0) + + conf, err = conf_loader(nil, { + admin_listen = "off", + admin_gui_listen = "localhost:8002", + }) + assert.is_nil(err) + assert.is_table(conf) + assert.spy(spy_log).was_called(1) + assert.spy(spy_log).was_called_with("Kong Manager won't be functional because the Admin API is not listened on any interface") + end) end) describe("pg_semaphore options", function() From 9ec3494cb558ee03223218c7c74003f8bce3b267 Mon Sep 17 00:00:00 2001 From: Zachary Hu <6426329+outsinre@users.noreply.github.com> Date: Tue, 28 Nov 2023 17:27:18 +0800 Subject: [PATCH 163/249] fix(core): respect custom proxy_access_log (#12073) * fix(core): respect custom proxy_access_log Kong now has a fixed access log format `kong_log_format` that prevents customization and error on `kong start`. Related to #11663. If the `proxy_access_log` is not a valid pathname, then replace `kong_log_format` with the custom value. * fix(config): cover log_format name with hyphen * fix(config): early error when access log format is not defined * fix(config): discard warning or return nil * chore(config): style and comments * chore(*): comments --- .../kong/respect-custom-proxy_access_log.yml | 3 + kong/cmd/utils/prefix_handler.lua | 13 ++++- kong/templates/nginx_kong.lua | 4 ++ spec/01-unit/04-prefix_handler_spec.lua | 56 ++++++++++++++++--- 4 files changed, 66 insertions(+), 10 deletions(-) create mode 100644 changelog/unreleased/kong/respect-custom-proxy_access_log.yml diff --git a/changelog/unreleased/kong/respect-custom-proxy_access_log.yml b/changelog/unreleased/kong/respect-custom-proxy_access_log.yml new file mode 100644 index 000000000000..92b77e6d0680 --- /dev/null +++ b/changelog/unreleased/kong/respect-custom-proxy_access_log.yml @@ -0,0 +1,3 @@ +message: "respect custom `proxy_access_log`" +type: bugfix +scope: Configuration diff --git a/kong/cmd/utils/prefix_handler.lua b/kong/cmd/utils/prefix_handler.lua index ea661fbf4ca0..189c3a03981c 100644 --- a/kong/cmd/utils/prefix_handler.lua +++ b/kong/cmd/utils/prefix_handler.lua @@ -239,7 +239,6 @@ local function compile_conf(kong_config, conf_template, template_env_inject) -- computed config properties for templating local compile_env = { _escape = ">", - proxy_access_log_enabled = kong_config.proxy_access_log ~= "off", pairs = pairs, ipairs = ipairs, tostring = tostring, @@ -248,6 +247,18 @@ local function compile_conf(kong_config, conf_template, template_env_inject) } } + local kong_proxy_access_log = kong_config.proxy_access_log + if kong_proxy_access_log ~= "off" then + compile_env.proxy_access_log_enabled = true + end + if kong_proxy_access_log then + -- example: proxy_access_log = 'logs/some-file.log apigw_json' + local _, custom_format_name = string.match(kong_proxy_access_log, "^(%S+)%s(%S+)") + if custom_format_name then + compile_env.custom_proxy_access_log = true + end + end + compile_env = pl_tablex.merge(compile_env, template_env_inject or {}, true) do diff --git a/kong/templates/nginx_kong.lua b/kong/templates/nginx_kong.lua index c12ba4b3f82e..3375dcf14572 100644 --- a/kong/templates/nginx_kong.lua +++ b/kong/templates/nginx_kong.lua @@ -89,7 +89,11 @@ server { lua_kong_error_log_request_id $kong_request_id; > if proxy_access_log_enabled then +> if custom_proxy_access_log then + access_log ${{PROXY_ACCESS_LOG}}; +> else access_log ${{PROXY_ACCESS_LOG}} kong_log_format; +> end > else access_log off; > end diff --git a/spec/01-unit/04-prefix_handler_spec.lua b/spec/01-unit/04-prefix_handler_spec.lua index 7cc4d9c56769..35c1d703e767 100644 --- a/spec/01-unit/04-prefix_handler_spec.lua +++ b/spec/01-unit/04-prefix_handler_spec.lua @@ -486,34 +486,72 @@ describe("NGINX conf compiler", function() describe("injected NGINX directives", function() it("injects proxy_access_log directive", function() - local conf = assert(conf_loader(nil, { + local conf, nginx_conf + conf = assert(conf_loader(nil, { proxy_access_log = "/dev/stdout", stream_listen = "0.0.0.0:9100", nginx_stream_tcp_nodelay = "on", })) - local nginx_conf = prefix_handler.compile_kong_conf(conf) + nginx_conf = prefix_handler.compile_kong_conf(conf) assert.matches("access_log%s/dev/stdout%skong_log_format;", nginx_conf) - local nginx_conf = prefix_handler.compile_kong_stream_conf(conf) + nginx_conf = prefix_handler.compile_kong_stream_conf(conf) assert.matches("access_log%slogs/access.log%sbasic;", nginx_conf) - local conf = assert(conf_loader(nil, { + conf = assert(conf_loader(nil, { proxy_access_log = "off", stream_listen = "0.0.0.0:9100", nginx_stream_tcp_nodelay = "on", })) - local nginx_conf = prefix_handler.compile_kong_conf(conf) + nginx_conf = prefix_handler.compile_kong_conf(conf) assert.matches("access_log%soff;", nginx_conf) - local nginx_conf = prefix_handler.compile_kong_stream_conf(conf) + nginx_conf = prefix_handler.compile_kong_stream_conf(conf) assert.matches("access_log%slogs/access.log%sbasic;", nginx_conf) - local conf = assert(conf_loader(nil, { + conf = assert(conf_loader(nil, { + proxy_access_log = "/dev/stdout apigw-json", + nginx_http_log_format = 'apigw-json "$kong_request_id"', + stream_listen = "0.0.0.0:9100", + nginx_stream_tcp_nodelay = "on", + })) + nginx_conf = prefix_handler.compile_kong_conf(conf) + assert.matches("access_log%s/dev/stdout%sapigw%-json;", nginx_conf) + nginx_conf = prefix_handler.compile_kong_stream_conf(conf) + assert.matches("access_log%slogs/access.log%sbasic;", nginx_conf) + + -- configure an undefined log format will error + -- on kong start. This is expected + conf = assert(conf_loader(nil, { + proxy_access_log = "/dev/stdout not-exist", + nginx_http_log_format = 'apigw-json "$kong_request_id"', + stream_listen = "0.0.0.0:9100", + nginx_stream_tcp_nodelay = "on", + })) + nginx_conf = prefix_handler.compile_kong_conf(conf) + assert.matches("access_log%s/dev/stdout%snot%-exist;", nginx_conf) + nginx_conf = prefix_handler.compile_kong_stream_conf(conf) + assert.matches("access_log%slogs/access.log%sbasic;", nginx_conf) + + conf = assert(conf_loader(nil, { + proxy_access_log = "/tmp/not-exist.log", + stream_listen = "0.0.0.0:9100", + nginx_stream_tcp_nodelay = "on", + })) + nginx_conf = prefix_handler.compile_kong_conf(conf) + assert.matches("access_log%s/tmp/not%-exist.log%skong_log_format;", nginx_conf) + nginx_conf = prefix_handler.compile_kong_stream_conf(conf) + assert.matches("access_log%slogs/access.log%sbasic;", nginx_conf) + + conf = assert(conf_loader(nil, { + prefix = "servroot_tmp", + nginx_stream_log_format = "custom '$protocol $status'", proxy_stream_access_log = "/dev/stdout custom", stream_listen = "0.0.0.0:9100", nginx_stream_tcp_nodelay = "on", })) - local nginx_conf = prefix_handler.compile_kong_conf(conf) + assert(prefix_handler.prepare_prefix(conf)) + nginx_conf = prefix_handler.compile_kong_conf(conf) assert.matches("access_log%slogs/access.log%skong_log_format;", nginx_conf) - local nginx_conf = prefix_handler.compile_kong_stream_conf(conf) + nginx_conf = prefix_handler.compile_kong_stream_conf(conf) assert.matches("access_log%s/dev/stdout%scustom;", nginx_conf) end) From 3b530391512798a23b89eb762e9ac060509c5d24 Mon Sep 17 00:00:00 2001 From: Samuele Date: Tue, 28 Nov 2023 12:01:41 +0100 Subject: [PATCH 164/249] refactor(tracing): add tracing context (#12062) Add a Tracing Context module for managing request-scoped tracing-related information. This provides an interface with ngx.ctx.TRACING_CONTEXT for plugins and core to read/update tracing information through. This commit adds support to read/write: * Trace ID (raw and all formats) * Unlinked spans Follow ups will likely include: * Incoming/outgoing tracing headers information --- kong-3.6.0-0.rockspec | 1 + kong/plugins/opentelemetry/handler.lua | 11 +- kong/tracing/instrumentation.lua | 24 ++-- kong/tracing/propagation.lua | 49 +------- kong/tracing/tracing_context.lua | 111 ++++++++++++++++++ .../kong/plugins/trace-propagator/handler.lua | 4 +- 6 files changed, 135 insertions(+), 65 deletions(-) create mode 100644 kong/tracing/tracing_context.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 06a3ec366454..c311b824f5ff 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -553,6 +553,7 @@ build = { ["kong.tracing.instrumentation"] = "kong/tracing/instrumentation.lua", ["kong.tracing.propagation"] = "kong/tracing/propagation.lua", ["kong.tracing.request_id"] = "kong/tracing/request_id.lua", + ["kong.tracing.tracing_context"] = "kong/tracing/tracing_context.lua", ["kong.timing"] = "kong/timing/init.lua", ["kong.timing.context"] = "kong/timing/context.lua", diff --git a/kong/plugins/opentelemetry/handler.lua b/kong/plugins/opentelemetry/handler.lua index b0a4bfa67d35..db296fe045b0 100644 --- a/kong/plugins/opentelemetry/handler.lua +++ b/kong/plugins/opentelemetry/handler.lua @@ -3,6 +3,7 @@ local http = require "resty.http" local clone = require "table.clone" local otlp = require "kong.plugins.opentelemetry.otlp" local propagation = require "kong.tracing.propagation" +local tracing_context = require "kong.tracing.tracing_context" local ngx = ngx @@ -103,8 +104,7 @@ function OpenTelemetryHandler:access(conf) kong.ctx.plugin.should_sample = false end - local injected_parent_span = ngx.ctx.tracing and - ngx.ctx.tracing.injected.balancer_span or root_span + local injected_parent_span = tracing_context.get_unlinked_span("balancer") or root_span local header_type, trace_id, span_id, parent_id, should_sample, _ = propagation_parse(headers, conf.header_type) if should_sample == false then @@ -118,7 +118,8 @@ function OpenTelemetryHandler:access(conf) -- to propagate the correct trace ID we have to set it here -- before passing this span to propagation.set() injected_parent_span.trace_id = trace_id - kong.ctx.plugin.trace_id = trace_id + -- update the Tracing Context with the trace ID extracted from headers + tracing_context.set_raw_trace_id(trace_id) end -- overwrite root span's parent_id @@ -135,7 +136,7 @@ end function OpenTelemetryHandler:header_filter(conf) if conf.http_response_header_for_traceid then - local trace_id = kong.ctx.plugin.trace_id + local trace_id = tracing_context.get_raw_trace_id() if not trace_id then local root_span = ngx.ctx.KONG_SPANS and ngx.ctx.KONG_SPANS[1] trace_id = root_span and root_span.trace_id @@ -156,7 +157,7 @@ function OpenTelemetryHandler:log(conf) end -- overwrite - local trace_id = kong.ctx.plugin.trace_id + local trace_id = tracing_context.get_raw_trace_id() if trace_id then span.trace_id = trace_id end diff --git a/kong/tracing/instrumentation.lua b/kong/tracing/instrumentation.lua index 717b9121445b..b98099351714 100644 --- a/kong/tracing/instrumentation.lua +++ b/kong/tracing/instrumentation.lua @@ -6,6 +6,7 @@ local tablex = require "pl.tablex" local base = require "resty.core.base" local cjson = require "cjson" local ngx_re = require "ngx.re" +local tracing_context = require "kong.tracing.tracing_context" local ngx = ngx local var = ngx.var @@ -83,7 +84,7 @@ function _M.balancer(ctx) local last_try_balancer_span do - local balancer_span = ctx.tracing and ctx.tracing.injected.balancer_span + local balancer_span = tracing_context.get_unlinked_span("balancer", ctx) -- pre-created balancer span was not linked yet if balancer_span and not balancer_span.linked then last_try_balancer_span = balancer_span @@ -216,10 +217,6 @@ _M.available_types = available_types -- Record inbound request function _M.request(ctx) - ctx.tracing = { - injected = {}, - } - local client = kong.client local method = get_method() @@ -252,6 +249,9 @@ function _M.request(ctx) }, }) + -- update the tracing context with the request span trace ID + tracing_context.set_raw_trace_id(active_span.trace_id, ctx) + tracer.set_active_span(active_span) end @@ -263,12 +263,14 @@ function _M.precreate_balancer_span(ctx) end local root_span = ctx.KONG_SPANS and ctx.KONG_SPANS[1] - if ctx.tracing then - ctx.tracing.injected.balancer_span = tracer.create_span(nil, { - span_kind = 3, - parent = root_span, - }) - end + local balancer_span = tracer.create_span(nil, { + span_kind = 3, + parent = root_span, + }) + -- The balancer span is created during headers propagation, but is + -- linked later when the balancer data is available, so we add it + -- to the unlinked spans table to keep track of it. + tracing_context.set_unlinked_span("balancer", balancer_span, ctx) end diff --git a/kong/tracing/propagation.lua b/kong/tracing/propagation.lua index dbd7fa70d9a6..606fcfa5b871 100644 --- a/kong/tracing/propagation.lua +++ b/kong/tracing/propagation.lua @@ -3,6 +3,7 @@ local openssl_bignum = require "resty.openssl.bn" local table_merge = require "kong.tools.utils".table_merge local split = require "kong.tools.utils".split local strip = require "kong.tools.utils".strip +local tracing_context = require "kong.tracing.tracing_context" local unescape_uri = ngx.unescape_uri local char = string.char local match = string.match @@ -520,52 +521,6 @@ local function find_header_type(headers) end --- Performs a table merge to add trace ID formats to the current request's --- trace ID and returns a table containing all the formats. --- --- Plugins can handle different formats of trace ids depending on their headers --- configuration, multiple plugins executions may result in additional formats --- of the current request's trace id. --- --- The `propagation_trace_id_all_fmt` table is stored in `ngx.ctx` to keep the --- list of formats updated for the current request. --- --- Each item in the resulting `propagation_trace_id_all_fmt` table represents a --- format associated with the trace ID for the current request. --- --- @param trace_id_new_fmt table containing the trace ID formats to be added --- @returns propagation_trace_id_all_fmt table contains all the formats for --- the current request --- --- @example --- --- propagation_trace_id_all_fmt = { datadog = "1234", --- w3c = "abcd" } --- --- trace_id_new_fmt = { ot = "abcd", --- w3c = "abcd" } --- --- propagation_trace_id_all_fmt = { datadog = "1234", --- ot = "abcd", --- w3c = "abcd" } --- -local function add_trace_id_formats(trace_id_new_fmt) - -- TODO: @samugi - move trace ID table in the unified tracing context - local trace_id_all_fmt = ngx.ctx.propagation_trace_id_all_fmt - if not trace_id_all_fmt then - ngx.ctx.propagation_trace_id_all_fmt = trace_id_new_fmt - return trace_id_new_fmt - end - - -- add new formats to trace ID formats table - for format, value in pairs(trace_id_new_fmt) do - trace_id_all_fmt[format] = value - end - - return trace_id_all_fmt -end - - local function parse(headers, conf_header_type) if conf_header_type == "ignore" then return nil @@ -738,7 +693,7 @@ local function set(conf_header_type, found_header_type, proxy_span, conf_default ) end - trace_id_formats = add_trace_id_formats(trace_id_formats) + trace_id_formats = tracing_context.add_trace_id_formats(trace_id_formats) -- add trace IDs to log serializer output kong.log.set_serialize_value("trace_id", trace_id_formats) end diff --git a/kong/tracing/tracing_context.lua b/kong/tracing/tracing_context.lua new file mode 100644 index 000000000000..ebf42ec4bceb --- /dev/null +++ b/kong/tracing/tracing_context.lua @@ -0,0 +1,111 @@ +local table_new = require "table.new" + +local ngx = ngx + + +local function init_tracing_context(ctx) + ctx.TRACING_CONTEXT = { + -- trace ID information which includes its raw value (binary) and all the + -- available formats set during headers propagation + trace_id = { + raw = nil, + formatted = table_new(0, 6), + }, + -- Unlinked spans are spans that were created (to generate their ID) + -- but not added to `KONG_SPANS` (because their execution details were not + -- yet available). + unlinked_spans = table_new(0, 1) + } + + return ctx.TRACING_CONTEXT +end + + +local function get_tracing_context(ctx) + ctx = ctx or ngx.ctx + + if not ctx.TRACING_CONTEXT then + return init_tracing_context(ctx) + end + + return ctx.TRACING_CONTEXT +end + + +-- Performs a table merge to add trace ID formats to the current request's +-- trace ID and returns a table containing all the formats. +-- +-- Plugins can handle different formats of trace ids depending on their headers +-- configuration, multiple plugins executions may result in additional formats +-- of the current request's trace id. +-- +-- Each item in the resulting table represents a format associated with the +-- trace ID for the current request. +-- +-- @param trace_id_new_fmt table containing the trace ID formats to be added +-- @param ctx table the current ctx, if available +-- @returns propagation_trace_id_all_fmt table contains all the formats for +-- the current request +-- +-- @example +-- +-- propagation_trace_id_all_fmt = { datadog = "1234", +-- w3c = "abcd" } +-- +-- trace_id_new_fmt = { ot = "abcd", +-- w3c = "abcd" } +-- +-- propagation_trace_id_all_fmt = { datadog = "1234", +-- ot = "abcd", +-- w3c = "abcd" } +-- +local function add_trace_id_formats(trace_id_new_fmt, ctx) + local tracing_context = get_tracing_context(ctx) + local trace_id_all_fmt = tracing_context.trace_id.formatted + + if next(trace_id_all_fmt) == nil then + tracing_context.trace_id.formatted = trace_id_new_fmt + return trace_id_new_fmt + end + + -- add new formats to existing trace ID formats table + for format, value in pairs(trace_id_new_fmt) do + trace_id_all_fmt[format] = value + end + + return trace_id_all_fmt +end + + +local function get_raw_trace_id(ctx) + local tracing_context = get_tracing_context(ctx) + return tracing_context.trace_id.raw +end + + +local function set_raw_trace_id(trace_id, ctx) + local tracing_context = get_tracing_context(ctx) + tracing_context.trace_id.raw = trace_id +end + + +local function get_unlinked_span(name, ctx) + local tracing_context = get_tracing_context(ctx) + return tracing_context.unlinked_spans[name] +end + + +local function set_unlinked_span(name, span, ctx) + local tracing_context = get_tracing_context(ctx) + tracing_context.unlinked_spans[name] = span +end + + + +return { + add_trace_id_formats = add_trace_id_formats, + get_raw_trace_id = get_raw_trace_id, + set_raw_trace_id = set_raw_trace_id, + get_unlinked_span = get_unlinked_span, + set_unlinked_span = set_unlinked_span, +} diff --git a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua index daf8a36c3581..909a11f093ba 100644 --- a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua +++ b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua @@ -1,4 +1,5 @@ local propagation = require "kong.tracing.propagation" +local tracing_context = require "kong.tracing.tracing_context" local ngx = ngx local kong = kong @@ -18,8 +19,7 @@ function _M:access(conf) if not root_span then root_span = tracer.start_span("root") end - local injected_parent_span = ngx.ctx.tracing and - ngx.ctx.tracing.injected.balancer_span or root_span + local injected_parent_span = tracing_context.get_unlinked_span("balancer") or root_span local header_type, trace_id, span_id, parent_id, should_sample = propagation_parse(headers) From 6d44e81235738a0466ec158dccfb73cb78af3f5a Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 29 Nov 2023 11:15:13 +0800 Subject: [PATCH 165/249] feat(templates): add LMDB validation tag directive (#12026) This PR adds validation of LMDB cache by Kong's version (major + minor), wiping the content if tag mismatch to avoid compatibility issues during minor version upgrade. KAG-3093 --- .requirements | 2 +- .../unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml | 3 --- .../unreleased/kong/bump-lua-resty-lmdb-1.4.1.yml | 3 +++ .../unreleased/kong/introduce_lmdb_validation_tag.yml | 6 ++++++ kong/conf_loader/init.lua | 10 ++++++++++ kong/templates/nginx_inject.lua | 5 +++++ spec/01-unit/03-conf_loader_spec.lua | 8 ++++++++ spec/01-unit/04-prefix_handler_spec.lua | 6 ++++++ spec/fixtures/custom_nginx.template | 5 +++++ 9 files changed, 44 insertions(+), 4 deletions(-) delete mode 100644 changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml create mode 100644 changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.1.yml create mode 100644 changelog/unreleased/kong/introduce_lmdb_validation_tag.yml diff --git a/.requirements b/.requirements index 0c18973a4b66..d3543e59b819 100644 --- a/.requirements +++ b/.requirements @@ -7,7 +7,7 @@ PCRE=8.45 LIBEXPAT=2.5.0 LUA_KONG_NGINX_MODULE=4fbc3ddc7dcbc706ed286b95344f3cb6da17e637 # 0.8.0 -LUA_RESTY_LMDB=d236fc5ba339897e8f2c6ada1c1b4ab9311feee8 # 1.4.0 +LUA_RESTY_LMDB=19a6da0616db43baf8197dace59e64244430b3c4 # 1.4.1 LUA_RESTY_EVENTS=8448a92cec36ac04ea522e78f6496ba03c9b1fd8 # 0.2.0 LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 ATC_ROUTER=7a2ad42d4246598ba1f753b6ae79cb1456040afa # 1.3.1 diff --git a/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml b/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml deleted file mode 100644 index ea9b62f3d999..000000000000 --- a/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.0.yml +++ /dev/null @@ -1,3 +0,0 @@ -message: Bumped lua-resty-lmdb from 1.3.0 to 1.4.0 -type: dependency -scope: Core diff --git a/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.1.yml b/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.1.yml new file mode 100644 index 000000000000..c355f59c9722 --- /dev/null +++ b/changelog/unreleased/kong/bump-lua-resty-lmdb-1.4.1.yml @@ -0,0 +1,3 @@ +message: Bumped lua-resty-lmdb from 1.3.0 to 1.4.1 +type: dependency +scope: Core diff --git a/changelog/unreleased/kong/introduce_lmdb_validation_tag.yml b/changelog/unreleased/kong/introduce_lmdb_validation_tag.yml new file mode 100644 index 000000000000..6fd2ea4357a2 --- /dev/null +++ b/changelog/unreleased/kong/introduce_lmdb_validation_tag.yml @@ -0,0 +1,6 @@ +message: | + Validate LMDB cache by Kong's version (major + minor), + wiping the content if tag mismatch to avoid compatibility issues + during minor version upgrade. +type: feature +scope: Configuration diff --git a/kong/conf_loader/init.lua b/kong/conf_loader/init.lua index 7d8fb7a3f8c9..b9823e7f2601 100644 --- a/kong/conf_loader/init.lua +++ b/kong/conf_loader/init.lua @@ -1,6 +1,7 @@ local require = require +local kong_meta = require "kong.meta" local kong_default_conf = require "kong.templates.kong_defaults" local process_secrets = require "kong.cmd.utils.process_secrets" local nginx_signals = require "kong.cmd.utils.nginx_signals" @@ -683,6 +684,12 @@ local _nop_tostring_mt = { } +-- using kong version, "major.minor" +local LMDB_VALIDATION_TAG = string.format("%d.%d", + kong_meta._VERSION_TABLE.major, + kong_meta._VERSION_TABLE.minor) + + local function parse_value(value, typ) if type(value) == "string" then value = strip(value) @@ -2008,6 +2015,9 @@ local function load(path, custom_conf, opts) end end + -- lmdb validation tag + conf.lmdb_validation_tag = LMDB_VALIDATION_TAG + -- Wasm module support if conf.wasm then local wasm_filters = get_wasm_filters(conf.wasm_filters_path) diff --git a/kong/templates/nginx_inject.lua b/kong/templates/nginx_inject.lua index 37164044ad5b..06a0912e009a 100644 --- a/kong/templates/nginx_inject.lua +++ b/kong/templates/nginx_inject.lua @@ -2,5 +2,10 @@ return [[ > if database == "off" then lmdb_environment_path ${{LMDB_ENVIRONMENT_PATH}}; lmdb_map_size ${{LMDB_MAP_SIZE}}; + +> if lmdb_validation_tag then +lmdb_validation_tag $(lmdb_validation_tag); +> end + > end ]] diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index 9a79256e3fa3..10743b25eff3 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -1,3 +1,4 @@ +local kong_meta = require "kong.meta" local conf_loader = require "kong.conf_loader" local utils = require "kong.tools.utils" local log = require "kong.cmd.utils.log" @@ -16,6 +17,11 @@ ffi.cdef([[ ]]) +local KONG_VERSION = string.format("%d.%d", + kong_meta._VERSION_TABLE.major, + kong_meta._VERSION_TABLE.minor) + + local function kong_user_group_exists() if C.getpwnam("kong") == nil or C.getgrnam("kong") == nil then return false @@ -68,6 +74,7 @@ describe("Configuration loader", function() assert.same(nil, conf.privileged_agent) assert.same(true, conf.dedicated_config_processing) assert.same(false, conf.allow_debug_header) + assert.same(KONG_VERSION, conf.lmdb_validation_tag) assert.is_nil(getmetatable(conf)) end) it("loads a given file, with higher precedence", function() @@ -85,6 +92,7 @@ describe("Configuration loader", function() assert.same({"127.0.0.1:9001"}, conf.admin_listen) assert.same({"0.0.0.0:9000", "0.0.0.0:9443 http2 ssl", "0.0.0.0:9002 http2"}, conf.proxy_listen) + assert.same(KONG_VERSION, conf.lmdb_validation_tag) assert.is_nil(getmetatable(conf)) end) it("preserves default properties if not in given file", function() diff --git a/spec/01-unit/04-prefix_handler_spec.lua b/spec/01-unit/04-prefix_handler_spec.lua index 35c1d703e767..63052c965c06 100644 --- a/spec/01-unit/04-prefix_handler_spec.lua +++ b/spec/01-unit/04-prefix_handler_spec.lua @@ -1453,6 +1453,7 @@ describe("NGINX conf compiler", function() local main_inject_conf = prefix_handler.compile_nginx_main_inject_conf(helpers.test_conf) assert.not_matches("lmdb_environment_path", main_inject_conf, nil, true) assert.not_matches("lmdb_map_size", main_inject_conf, nil, true) + assert.not_matches("lmdb_validation_tag", main_inject_conf, nil, true) end) it("compiles a main NGINX inject conf #database=off", function() @@ -1462,6 +1463,11 @@ describe("NGINX conf compiler", function() local main_inject_conf = prefix_handler.compile_nginx_main_inject_conf(conf) assert.matches("lmdb_environment_path%s+dbless.lmdb;", main_inject_conf) assert.matches("lmdb_map_size%s+2048m;", main_inject_conf) + + local kong_meta = require "kong.meta" + local major = kong_meta._VERSION_TABLE.major + local minor = kong_meta._VERSION_TABLE.minor + assert.matches("lmdb_validation_tag%s+" .. major .. "%." .. minor .. ";", main_inject_conf) end) end) diff --git a/spec/fixtures/custom_nginx.template b/spec/fixtures/custom_nginx.template index abee4616d9bb..e6498c1ef196 100644 --- a/spec/fixtures/custom_nginx.template +++ b/spec/fixtures/custom_nginx.template @@ -16,6 +16,11 @@ $(el.name) $(el.value); > if database == "off" then lmdb_environment_path ${{LMDB_ENVIRONMENT_PATH}}; lmdb_map_size ${{LMDB_MAP_SIZE}}; + +> if lmdb_validation_tag then +lmdb_validation_tag $(lmdb_validation_tag); +> end + > end events { From 2441e792f184a070531c0f4984037312abe7fe2d Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 29 Nov 2023 14:07:19 +0800 Subject: [PATCH 166/249] refactor(admin_gui): simplify code with table.concat (#12092) --- kong/admin_gui/init.lua | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/kong/admin_gui/init.lua b/kong/admin_gui/init.lua index 4186f4f966b5..f1c32500b620 100644 --- a/kong/admin_gui/init.lua +++ b/kong/admin_gui/init.lua @@ -1,31 +1,36 @@ local utils = require "kong.admin_gui.utils" +local fmt = string.format +local insert = table.insert +local concat = table.concat + +local select_listener = utils.select_listener +local prepare_variable = utils.prepare_variable + local _M = {} function _M.generate_kconfig(kong_config) - local api_listen = utils.select_listener(kong_config.admin_listeners, {ssl = false}) + local api_listen = select_listener(kong_config.admin_listeners, {ssl = false}) local api_port = api_listen and api_listen.port - local api_ssl_listen = utils.select_listener(kong_config.admin_listeners, {ssl = true}) + + local api_ssl_listen = select_listener(kong_config.admin_listeners, {ssl = true}) local api_ssl_port = api_ssl_listen and api_ssl_listen.port local configs = { - ADMIN_GUI_URL = utils.prepare_variable(kong_config.admin_gui_url), - ADMIN_GUI_PATH = utils.prepare_variable(kong_config.admin_gui_path), - ADMIN_API_URL = utils.prepare_variable(kong_config.admin_gui_api_url), - ADMIN_API_PORT = utils.prepare_variable(api_port), - ADMIN_API_SSL_PORT = utils.prepare_variable(api_ssl_port), - ANONYMOUS_REPORTS = utils.prepare_variable(kong_config.anonymous_reports), + ADMIN_GUI_URL = prepare_variable(kong_config.admin_gui_url), + ADMIN_GUI_PATH = prepare_variable(kong_config.admin_gui_path), + ADMIN_API_URL = prepare_variable(kong_config.admin_gui_api_url), + ADMIN_API_PORT = prepare_variable(api_port), + ADMIN_API_SSL_PORT = prepare_variable(api_ssl_port), + ANONYMOUS_REPORTS = prepare_variable(kong_config.anonymous_reports), } - local kconfig_str = "window.K_CONFIG = {\n" + local out = {} for config, value in pairs(configs) do - kconfig_str = kconfig_str .. " '" .. config .. "': '" .. value .. "',\n" + insert(out, fmt(" '%s': '%s'", config, value)) end - -- remove trailing comma - kconfig_str = kconfig_str:sub(1, -3) - - return kconfig_str .. "\n}\n" + return "window.K_CONFIG = {\n" .. concat(out, ",\n") .. "\n}\n" end return _M From 524fbdfa3aa367bfe968f561a0e0bfc64e7336a8 Mon Sep 17 00:00:00 2001 From: Zachary Hu <6426329+outsinre@users.noreply.github.com> Date: Wed, 29 Nov 2023 15:54:02 +0800 Subject: [PATCH 167/249] chore(ci): fix workflow webhook notification and use "Kong/github-slack-mapping" file based mapping instead of variables for easier update (#12021) FTI-5564 --- .github/workflows/backport-fail-bot.yml | 64 +++++++++++-------- .../workflows/release-and-tests-fail-bot.yml | 24 +++++-- 2 files changed, 57 insertions(+), 31 deletions(-) diff --git a/.github/workflows/backport-fail-bot.yml b/.github/workflows/backport-fail-bot.yml index f8393da03522..90004154abae 100644 --- a/.github/workflows/backport-fail-bot.yml +++ b/.github/workflows/backport-fail-bot.yml @@ -8,30 +8,44 @@ jobs: check_comment: runs-on: ubuntu-latest if: github.event.issue.pull_request != null && contains(github.event.comment.body, 'To backport manually, run these commands in your terminal') + steps: - - name: Generate Slack Payload - id: generate-payload - uses: actions/github-script@v7 - with: - script: | - const slack_mapping = JSON.parse(process.env.SLACK_MAPPING); - const pr_url = "${{ github.event.issue.pull_request.html_url}}"; - const pr_author_github_id = "${{ github.event.issue.user.login }}" - const pr_author_slack_id = slack_mapping[pr_author_github_id]; - const author = (pr_author_slack_id ? `<@${pr_author_slack_id}>` : pr_author_github_id); - const payload = { - text: `Backport failed in PR: ${pr_url}. Please check it ${author}.`, - channel: process.env.SLACK_CHANNEL, - }; - return JSON.stringify(payload); - result-encoding: string - env: - SLACK_CHANNEL: gateway-notifications - SLACK_MAPPING: "${{ vars.GH_ID_2_SLACK_ID_MAPPING }}" + - name: Fetch mapping file + id: fetch_mapping + uses: actions/github-script@v6 + env: + ACCESS_TOKEN: ${{ secrets.PAT }} + with: + script: | + const url = 'https://raw.githubusercontent.com/Kong/github-slack-mapping/main/mapping.json'; + const headers = {Authorization: `token ${process.env.ACCESS_TOKEN}`}; + const response = await fetch(url, {headers}); + const mapping = await response.json(); + return mapping; + + - name: Generate Slack Payload + id: generate-payload + uses: actions/github-script@v6 + env: + SLACK_CHANNEL: gateway-notifications + SLACK_MAPPING: "${{ steps.fetch_mapping.outputs.result }}" + with: + script: | + const pr_url = ${{ github.event.issue.pull_request.html_url }}; + const slack_mapping = JSON.parse(process.env.SLACK_MAPPING); + const pr_author_github_id = ${{ github.event.issue.user.login }}; + const pr_author_slack_id = slack_mapping[pr_author_github_id]; + const author = pr_author_slack_id ? `<@${pr_author_slack_id}>` : pr_author_github_id; + const payload = { + text: `${pr_url} from ${author} failed to backport.`, + channel: process.env.SLACK_CHANNEL, + }; + return JSON.stringify(payload); + result-encoding: string - - name: Send Slack Message - uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 - with: - payload: ${{ steps.generate-payload.outputs.result }} - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_GATEWAY_NOTIFICATIONS_WEBHOOK }} + - name: Send Slack Message + uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0 + with: + payload: ${{ steps.generate-payload.outputs.result }} + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_GATEWAY_NOTIFICATIONS_WEBHOOK }} diff --git a/.github/workflows/release-and-tests-fail-bot.yml b/.github/workflows/release-and-tests-fail-bot.yml index d651bef52903..44796c755bff 100644 --- a/.github/workflows/release-and-tests-fail-bot.yml +++ b/.github/workflows/release-and-tests-fail-bot.yml @@ -15,25 +15,37 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.workflow_run.conclusion == 'failure' && github.event.workflow_run.event != 'schedule' }} steps: + - name: Fetch mapping file + id: fetch_mapping + uses: actions/github-script@v6 + env: + ACCESS_TOKEN: ${{ secrets.PAT }} + with: + script: | + const url = 'https://raw.githubusercontent.com/Kong/github-slack-mapping/main/mapping.json'; + const headers = {Authorization: `token ${process.env.ACCESS_TOKEN}`}; + const response = await fetch(url, {headers}); + const mapping = await response.json(); + return mapping; + - name: Generate Slack Payload id: generate-payload env: SLACK_CHANNEL: gateway-notifications - SLACK_MAPPING: "${{ vars.GH_ID_2_SLACK_ID_MAPPING }}" + SLACK_MAPPING: "${{ steps.fetch_mapping.outputs.result }}" uses: actions/github-script@v7 with: script: | - const slack_mapping = JSON.parse(process.env.SLACK_MAPPING); - const repo_name = "${{ github.event.workflow_run.repository.full_name }}"; - const run_id = ${{ github.event.workflow_run.id }}; - const run_url = `https://github.com/${repo_name}/actions/runs/${run_id}`; const workflow_name = "${{ github.event.workflow_run.name }}"; + const repo_name = "${{ github.event.workflow_run.repository.full_name }}"; const branch_name = "${{ github.event.workflow_run.head_branch }}"; + const run_url = "${{ github.event.workflow_run.html_url }}"; + const slack_mapping = JSON.parse(process.env.SLACK_MAPPING); const actor_github_id = "${{ github.event.workflow_run.actor.login }}"; const actor_slack_id = slack_mapping[actor_github_id]; const actor = actor_slack_id ? `<@${actor_slack_id}>` : actor_github_id; const payload = { - text: `Workflow “${workflow_name}” failed in repo: "${repo_name}", branch: "${branch_name}". Run URL: ${run_url}. Please check it ${actor} .`, + text: `Hello ${actor} , workflow “${workflow_name}” failed in repo: "${repo_name}", branch: "${branch_name}". Please check it: ${run_url}.`, channel: process.env.SLACK_CHANNEL, }; return JSON.stringify(payload); From 7e5a1138302508e8213d10e874fd5095c397d0db Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Wed, 29 Nov 2023 16:24:32 +0800 Subject: [PATCH 168/249] fix(cd): use correct sha for PR based docker build (#12115) use github.event.pull_request.head.sha instead of github.sha on a PR, as github.sha on PR is the merged commit (temporary commit). also correctly set the KONG_VERSION env var. * fix(cd): use correct sha for PR based docker build * fix(cd): set correct KONG_VERSION in docker image KAG-3251 --- .github/workflows/release.yml | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 198f34c6ad07..e81e4e5c3e23 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -55,6 +55,8 @@ jobs: deploy-environment: ${{ steps.build-info.outputs.deploy-environment }} matrix: ${{ steps.build-info.outputs.matrix }} arch: ${{ steps.build-info.outputs.arch }} + # use github.event.pull_request.head.sha instead of github.sha on a PR, as github.sha on PR is the merged commit (temporary commit) + commit-sha: ${{ github.event.pull_request.head.sha || github.sha }} steps: - uses: actions/checkout@v3 @@ -342,11 +344,13 @@ jobs: - name: Docker meta id: meta uses: docker/metadata-action@v5 + env: + DOCKER_METADATA_PR_HEAD_SHA: true with: images: ${{ needs.metadata.outputs.prerelease-docker-repository }} tags: | - type=raw,${{ github.sha }}-${{ matrix.label }} - type=raw,enable=${{ matrix.label == 'ubuntu' }},${{ github.sha }} + type=raw,${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }} + type=raw,enable=${{ matrix.label == 'ubuntu' }},${{ needs.metadata.outputs.commit-sha }} - name: Set up QEMU if: matrix.docker-platforms != '' @@ -390,6 +394,7 @@ jobs: build-args: | KONG_BASE_IMAGE=${{ matrix.base-image }} KONG_ARTIFACT_PATH=bazel-bin/pkg/ + KONG_VERSION=${{ needs.metadata.outputs.kong-version }} RPM_PLATFORM=${{ steps.docker_rpm_platform_arg.outputs.rpm_platform }} EE_PORTS=8002 8445 8003 8446 8004 8447 @@ -401,7 +406,7 @@ jobs: token: ${{ secrets.GHA_COMMENT_TOKEN }} body: | ### Bazel Build - Docker image available `${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ github.sha }}` + Docker image available `${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ needs.metadata.outputs.commit-sha }}` Artifacts available https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} verify-manifest-images: @@ -430,7 +435,7 @@ jobs: # docker image verify requires sudo to set correct permissions, so we # also install deps for root sudo -E pip install -r requirements.txt - IMAGE=${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ github.sha }}-${{ matrix.label }} + IMAGE=${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }} sudo -E python ./main.py --image $IMAGE -f docker_image_filelist.txt -s docker-image @@ -452,7 +457,7 @@ jobs: matrix: include: "${{ fromJSON(needs.metadata.outputs.matrix)['scan-vulnerabilities'] }}" env: - IMAGE: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ github.sha }}-${{ matrix.label }} + IMAGE: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }} steps: - name: Install regctl uses: regclient/actions/regctl-installer@main @@ -491,16 +496,16 @@ jobs: if: steps.image_manifest_metadata.outputs.amd64_sha != '' uses: Kong/public-shared-actions/security-actions/scan-docker-image@v1 with: - asset_prefix: kong-${{ github.sha }}-${{ matrix.label }}-linux-amd64 - image: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ github.sha }}-${{ matrix.label }} + asset_prefix: kong-${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }}-linux-amd64 + image: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }} - name: Scan ARM64 Image digest if: steps.image_manifest_metadata.outputs.manifest_list_exists == 'true' && steps.image_manifest_metadata.outputs.arm64_sha != '' id: sbom_action_arm64 uses: Kong/public-shared-actions/security-actions/scan-docker-image@v1 with: - asset_prefix: kong-${{ github.sha }}-${{ matrix.label }}-linux-arm64 - image: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ github.sha }}-${{ matrix.label }} + asset_prefix: kong-${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }}-linux-arm64 + image: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }} smoke-tests: name: Smoke Tests - ${{ matrix.label }} @@ -553,7 +558,7 @@ jobs: --restart always \ --network=host -d \ --pull always \ - ${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ github.sha }}-${{ matrix.label }} \ + ${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }} \ sh -c "kong migrations bootstrap && kong start" sleep 3 docker logs kong @@ -698,7 +703,7 @@ jobs: env: TAGS: "${{ steps.meta.outputs.tags }}" run: | - PRERELEASE_IMAGE=${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ github.sha }}-${{ matrix.label }} + PRERELEASE_IMAGE=${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ needs.metadata.outputs.commit-sha }}-${{ matrix.label }} docker pull $PRERELEASE_IMAGE for tag in $TAGS; do regctl -v debug image copy $PRERELEASE_IMAGE $tag From a4369e7e85bd5d984af4f5f0f8362835513d486a Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 30 Nov 2023 00:46:46 +0800 Subject: [PATCH 169/249] refactor(conf_loader): separate constants from conf_loader core (#12055) --- kong-3.6.0-0.rockspec | 1 + kong/conf_loader/constants.lua | 641 ++++++++++++++++++++++++ kong/conf_loader/init.lua | 702 ++------------------------- spec/01-unit/03-conf_loader_spec.lua | 2 + 4 files changed, 692 insertions(+), 654 deletions(-) create mode 100644 kong/conf_loader/constants.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index c311b824f5ff..b722cafb7507 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -66,6 +66,7 @@ build = { ["kong.hooks"] = "kong/hooks.lua", ["kong.conf_loader"] = "kong/conf_loader/init.lua", + ["kong.conf_loader.constants"] = "kong/conf_loader/constants.lua", ["kong.conf_loader.listeners"] = "kong/conf_loader/listeners.lua", ["kong.clustering"] = "kong/clustering/init.lua", diff --git a/kong/conf_loader/constants.lua b/kong/conf_loader/constants.lua new file mode 100644 index 000000000000..4cd4d2519991 --- /dev/null +++ b/kong/conf_loader/constants.lua @@ -0,0 +1,641 @@ +local kong_meta = require "kong.meta" +local constants = require "kong.constants" + + +local type = type +local lower = string.lower + + +local HEADERS = constants.HEADERS +local BUNDLED_VAULTS = constants.BUNDLED_VAULTS +local BUNDLED_PLUGINS = constants.BUNDLED_PLUGINS + + +-- Version 5: https://wiki.mozilla.org/Security/Server_Side_TLS +local CIPHER_SUITES = { + modern = { + protocols = "TLSv1.3", + ciphers = nil, -- all TLSv1.3 ciphers are considered safe + prefer_server_ciphers = "off", -- as all are safe, let client choose + }, + intermediate = { + protocols = "TLSv1.2 TLSv1.3", + ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:" + .. "ECDHE-RSA-AES128-GCM-SHA256:" + .. "ECDHE-ECDSA-AES256-GCM-SHA384:" + .. "ECDHE-RSA-AES256-GCM-SHA384:" + .. "ECDHE-ECDSA-CHACHA20-POLY1305:" + .. "ECDHE-RSA-CHACHA20-POLY1305:" + .. "DHE-RSA-AES128-GCM-SHA256:" + .. "DHE-RSA-AES256-GCM-SHA384", + dhparams = "ffdhe2048", + prefer_server_ciphers = "off", + }, + old = { + protocols = "TLSv1 TLSv1.1 TLSv1.2 TLSv1.3", + ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:" + .. "ECDHE-RSA-AES128-GCM-SHA256:" + .. "ECDHE-ECDSA-AES256-GCM-SHA384:" + .. "ECDHE-RSA-AES256-GCM-SHA384:" + .. "ECDHE-ECDSA-CHACHA20-POLY1305:" + .. "ECDHE-RSA-CHACHA20-POLY1305:" + .. "DHE-RSA-AES128-GCM-SHA256:" + .. "DHE-RSA-AES256-GCM-SHA384:" + .. "DHE-RSA-CHACHA20-POLY1305:" + .. "ECDHE-ECDSA-AES128-SHA256:" + .. "ECDHE-RSA-AES128-SHA256:" + .. "ECDHE-ECDSA-AES128-SHA:" + .. "ECDHE-RSA-AES128-SHA:" + .. "ECDHE-ECDSA-AES256-SHA384:" + .. "ECDHE-RSA-AES256-SHA384:" + .. "ECDHE-ECDSA-AES256-SHA:" + .. "ECDHE-RSA-AES256-SHA:" + .. "DHE-RSA-AES128-SHA256:" + .. "DHE-RSA-AES256-SHA256:" + .. "AES128-GCM-SHA256:" + .. "AES256-GCM-SHA384:" + .. "AES128-SHA256:" + .. "AES256-SHA256:" + .. "AES128-SHA:" + .. "AES256-SHA:" + .. "DES-CBC3-SHA", + prefer_server_ciphers = "on", + }, + fips = { -- https://wiki.openssl.org/index.php/FIPS_mode_and_TLS + -- TLSv1.0 and TLSv1.1 is not completely not FIPS compliant, + -- but must be used under certain condititions like key sizes, + -- signatures in the full chain that Kong can't control. + -- In that case, we disables TLSv1.0 and TLSv1.1 and user + -- can optionally turn them on if they are aware of the caveats. + -- No FIPS compliant predefined DH group available prior to + -- OpenSSL 3.0. + protocols = "TLSv1.2", + ciphers = "TLSv1.2+FIPS:kRSA+FIPS:!eNULL:!aNULL", + prefer_server_ciphers = "on", + } +} + + +local DEFAULT_PATHS = { + "/etc/kong/kong.conf", + "/etc/kong.conf", +} + + +local HEADER_KEY_TO_NAME = { + ["server_tokens"] = "server_tokens", + ["latency_tokens"] = "latency_tokens", + [lower(HEADERS.VIA)] = HEADERS.VIA, + [lower(HEADERS.SERVER)] = HEADERS.SERVER, + [lower(HEADERS.PROXY_LATENCY)] = HEADERS.PROXY_LATENCY, + [lower(HEADERS.RESPONSE_LATENCY)] = HEADERS.RESPONSE_LATENCY, + [lower(HEADERS.ADMIN_LATENCY)] = HEADERS.ADMIN_LATENCY, + [lower(HEADERS.UPSTREAM_LATENCY)] = HEADERS.UPSTREAM_LATENCY, + [lower(HEADERS.UPSTREAM_STATUS)] = HEADERS.UPSTREAM_STATUS, + [lower(HEADERS.REQUEST_ID)] = HEADERS.REQUEST_ID, +} + + +local UPSTREAM_HEADER_KEY_TO_NAME = { + [lower(HEADERS.REQUEST_ID)] = HEADERS.REQUEST_ID, +} + + +local EMPTY = {} + + +-- NOTE! Prefixes should always follow `nginx_[a-z]+_`. +local DYNAMIC_KEY_NAMESPACES = { + { + injected_conf_name = "nginx_main_directives", + prefix = "nginx_main_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_events_directives", + prefix = "nginx_events_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_http_directives", + prefix = "nginx_http_", + ignore = { + upstream_keepalive = true, + upstream_keepalive_timeout = true, + upstream_keepalive_requests = true, + -- we already add it to nginx_kong_inject.lua explicitly + lua_ssl_protocols = true, + }, + }, + { + injected_conf_name = "nginx_upstream_directives", + prefix = "nginx_upstream_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_proxy_directives", + prefix = "nginx_proxy_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_location_directives", + prefix = "nginx_location_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_status_directives", + prefix = "nginx_status_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_admin_directives", + prefix = "nginx_admin_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_stream_directives", + prefix = "nginx_stream_", + ignore = { + -- we already add it to nginx_kong_stream_inject.lua explicitly + lua_ssl_protocols = true, + }, + }, + { + injected_conf_name = "nginx_supstream_directives", + prefix = "nginx_supstream_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_sproxy_directives", + prefix = "nginx_sproxy_", + ignore = EMPTY, + }, + { + prefix = "pluginserver_", + ignore = EMPTY, + }, + { + prefix = "vault_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_wasm_wasmtime_directives", + prefix = "nginx_wasm_wasmtime_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_wasm_v8_directives", + prefix = "nginx_wasm_v8_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_wasm_wasmer_directives", + prefix = "nginx_wasm_wasmer_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_wasm_main_shm_kv_directives", + prefix = "nginx_wasm_shm_kv_", + ignore = EMPTY, + }, + { + injected_conf_name = "nginx_wasm_main_directives", + prefix = "nginx_wasm_", + ignore = EMPTY, + }, +} + + +local DEPRECATED_DYNAMIC_KEY_NAMESPACES = {} + + +local PREFIX_PATHS = { + nginx_pid = {"pids", "nginx.pid"}, + nginx_err_logs = {"logs", "error.log"}, + nginx_acc_logs = {"logs", "access.log"}, + admin_acc_logs = {"logs", "admin_access.log"}, + nginx_conf = {"nginx.conf"}, + nginx_kong_gui_include_conf = {"nginx-kong-gui-include.conf"}, + nginx_kong_conf = {"nginx-kong.conf"}, + nginx_kong_stream_conf = {"nginx-kong-stream.conf"}, + nginx_inject_conf = {"nginx-inject.conf"}, + nginx_kong_inject_conf = {"nginx-kong-inject.conf"}, + nginx_kong_stream_inject_conf = {"nginx-kong-stream-inject.conf"}, + + kong_env = {".kong_env"}, + kong_process_secrets = {".kong_process_secrets"}, + + ssl_cert_csr_default = {"ssl", "kong-default.csr"}, + ssl_cert_default = {"ssl", "kong-default.crt"}, + ssl_cert_key_default = {"ssl", "kong-default.key"}, + ssl_cert_default_ecdsa = {"ssl", "kong-default-ecdsa.crt"}, + ssl_cert_key_default_ecdsa = {"ssl", "kong-default-ecdsa.key"}, + + client_ssl_cert_default = {"ssl", "kong-default.crt"}, + client_ssl_cert_key_default = {"ssl", "kong-default.key"}, + + admin_ssl_cert_default = {"ssl", "admin-kong-default.crt"}, + admin_ssl_cert_key_default = {"ssl", "admin-kong-default.key"}, + admin_ssl_cert_default_ecdsa = {"ssl", "admin-kong-default-ecdsa.crt"}, + admin_ssl_cert_key_default_ecdsa = {"ssl", "admin-kong-default-ecdsa.key"}, + + admin_gui_ssl_cert_default = {"ssl", "admin-gui-kong-default.crt"}, + admin_gui_ssl_cert_key_default = {"ssl", "admin-gui-kong-default.key"}, + admin_gui_ssl_cert_default_ecdsa = {"ssl", "admin-gui-kong-default-ecdsa.crt"}, + admin_gui_ssl_cert_key_default_ecdsa = {"ssl", "admin-gui-kong-default-ecdsa.key"}, + + status_ssl_cert_default = {"ssl", "status-kong-default.crt"}, + status_ssl_cert_key_default = {"ssl", "status-kong-default.key"}, + status_ssl_cert_default_ecdsa = {"ssl", "status-kong-default-ecdsa.crt"}, + status_ssl_cert_key_default_ecdsa = {"ssl", "status-kong-default-ecdsa.key"}, +} + + +-- By default, all properties in the configuration are considered to +-- be strings/numbers, but if we want to forcefully infer their type, specify it +-- in this table. +-- Also holds "enums" which are lists of valid configuration values for some +-- settings. +-- See `typ_checks` for the validation function of each type. +-- +-- Types: +-- `boolean`: can be "on"/"off"/"true"/"false", will be inferred to a boolean +-- `ngx_boolean`: can be "on"/"off", will be inferred to a string +-- `array`: a comma-separated list +local CONF_PARSERS = { + -- forced string inferences (or else are retrieved as numbers) + port_maps = { typ = "array" }, + proxy_listen = { typ = "array" }, + admin_listen = { typ = "array" }, + admin_gui_listen = {typ = "array"}, + status_listen = { typ = "array" }, + stream_listen = { typ = "array" }, + cluster_listen = { typ = "array" }, + ssl_cert = { typ = "array" }, + ssl_cert_key = { typ = "array" }, + admin_ssl_cert = { typ = "array" }, + admin_ssl_cert_key = { typ = "array" }, + admin_gui_ssl_cert = { typ = "array" }, + admin_gui_ssl_cert_key = { typ = "array" }, + status_ssl_cert = { typ = "array" }, + status_ssl_cert_key = { typ = "array" }, + db_update_frequency = { typ = "number" }, + db_update_propagation = { typ = "number" }, + db_cache_ttl = { typ = "number" }, + db_cache_neg_ttl = { typ = "number" }, + db_resurrect_ttl = { typ = "number" }, + db_cache_warmup_entities = { typ = "array" }, + nginx_user = { + typ = "string", + alias = { + replacement = "nginx_main_user", + } + }, + nginx_daemon = { + typ = "ngx_boolean", + alias = { + replacement = "nginx_main_daemon", + } + }, + nginx_worker_processes = { + typ = "string", + alias = { + replacement = "nginx_main_worker_processes", + }, + }, + + worker_events_max_payload = { typ = "number" }, + + upstream_keepalive_pool_size = { typ = "number" }, + upstream_keepalive_max_requests = { typ = "number" }, + upstream_keepalive_idle_timeout = { typ = "number" }, + allow_debug_header = { typ = "boolean" }, + + headers = { typ = "array" }, + headers_upstream = { typ = "array" }, + trusted_ips = { typ = "array" }, + real_ip_header = { + typ = "string", + alias = { + replacement = "nginx_proxy_real_ip_header", + } + }, + real_ip_recursive = { + typ = "ngx_boolean", + alias = { + replacement = "nginx_proxy_real_ip_recursive", + } + }, + error_default_type = { enum = { + "application/json", + "application/xml", + "text/html", + "text/plain", + } + }, + + database = { enum = { "postgres", "cassandra", "off" } }, + pg_port = { typ = "number" }, + pg_timeout = { typ = "number" }, + pg_password = { typ = "string" }, + pg_ssl = { typ = "boolean" }, + pg_ssl_verify = { typ = "boolean" }, + pg_max_concurrent_queries = { typ = "number" }, + pg_semaphore_timeout = { typ = "number" }, + pg_keepalive_timeout = { typ = "number" }, + pg_pool_size = { typ = "number" }, + pg_backlog = { typ = "number" }, + _debug_pg_ttl_cleanup_interval = { typ = "number" }, + + pg_ro_port = { typ = "number" }, + pg_ro_timeout = { typ = "number" }, + pg_ro_password = { typ = "string" }, + pg_ro_ssl = { typ = "boolean" }, + pg_ro_ssl_verify = { typ = "boolean" }, + pg_ro_max_concurrent_queries = { typ = "number" }, + pg_ro_semaphore_timeout = { typ = "number" }, + pg_ro_keepalive_timeout = { typ = "number" }, + pg_ro_pool_size = { typ = "number" }, + pg_ro_backlog = { typ = "number" }, + + dns_resolver = { typ = "array" }, + dns_hostsfile = { typ = "string" }, + dns_order = { typ = "array" }, + dns_valid_ttl = { typ = "number" }, + dns_stale_ttl = { typ = "number" }, + dns_cache_size = { typ = "number" }, + dns_not_found_ttl = { typ = "number" }, + dns_error_ttl = { typ = "number" }, + dns_no_sync = { typ = "boolean" }, + privileged_worker = { + typ = "boolean", + deprecated = { + replacement = "dedicated_config_processing", + alias = function(conf) + if conf.dedicated_config_processing == nil and + conf.privileged_worker ~= nil then + conf.dedicated_config_processing = conf.privileged_worker + end + end, + }}, + dedicated_config_processing = { typ = "boolean" }, + worker_consistency = { enum = { "strict", "eventual" }, + -- deprecating values for enums + deprecated = { + value = "strict", + } + }, + router_consistency = { + enum = { "strict", "eventual" }, + deprecated = { + replacement = "worker_consistency", + alias = function(conf) + if conf.worker_consistency == nil and + conf.router_consistency ~= nil then + conf.worker_consistency = conf.router_consistency + end + end, + } + }, + router_flavor = { + enum = { "traditional", "traditional_compatible", "expressions" }, + }, + worker_state_update_frequency = { typ = "number" }, + + lua_max_req_headers = { typ = "number" }, + lua_max_resp_headers = { typ = "number" }, + lua_max_uri_args = { typ = "number" }, + lua_max_post_args = { typ = "number" }, + + ssl_protocols = { + typ = "string", + directives = { + "nginx_http_ssl_protocols", + "nginx_stream_ssl_protocols", + }, + }, + ssl_prefer_server_ciphers = { + typ = "ngx_boolean", + directives = { + "nginx_http_ssl_prefer_server_ciphers", + "nginx_stream_ssl_prefer_server_ciphers", + }, + }, + ssl_dhparam = { + typ = "string", + directives = { + "nginx_http_ssl_dhparam", + "nginx_stream_ssl_dhparam", + }, + }, + ssl_session_tickets = { + typ = "ngx_boolean", + directives = { + "nginx_http_ssl_session_tickets", + "nginx_stream_ssl_session_tickets", + }, + }, + ssl_session_timeout = { + typ = "string", + directives = { + "nginx_http_ssl_session_timeout", + "nginx_stream_ssl_session_timeout", + }, + }, + ssl_session_cache_size = { typ = "string" }, + + client_ssl = { typ = "boolean" }, + + proxy_access_log = { typ = "string" }, + proxy_error_log = { typ = "string" }, + proxy_stream_access_log = { typ = "string" }, + proxy_stream_error_log = { typ = "string" }, + admin_access_log = { typ = "string" }, + admin_error_log = { typ = "string" }, + admin_gui_access_log = {typ = "string"}, + admin_gui_error_log = {typ = "string"}, + status_access_log = { typ = "string" }, + status_error_log = { typ = "string" }, + log_level = { enum = { + "debug", + "info", + "notice", + "warn", + "error", + "crit", + "alert", + "emerg", + } + }, + vaults = { typ = "array" }, + plugins = { typ = "array" }, + anonymous_reports = { typ = "boolean" }, + + lua_ssl_trusted_certificate = { typ = "array" }, + lua_ssl_verify_depth = { typ = "number" }, + lua_ssl_protocols = { + typ = "string", + directives = { + "nginx_http_lua_ssl_protocols", + "nginx_stream_lua_ssl_protocols", + }, + }, + lua_socket_pool_size = { typ = "number" }, + + role = { enum = { "data_plane", "control_plane", "traditional", }, }, + cluster_control_plane = { typ = "string", }, + cluster_cert = { typ = "string" }, + cluster_cert_key = { typ = "string" }, + cluster_mtls = { enum = { "shared", "pki" } }, + cluster_ca_cert = { typ = "string" }, + cluster_server_name = { typ = "string" }, + cluster_data_plane_purge_delay = { typ = "number" }, + cluster_ocsp = { enum = { "on", "off", "optional" } }, + cluster_max_payload = { typ = "number" }, + cluster_use_proxy = { typ = "boolean" }, + cluster_dp_labels = { typ = "array" }, + + kic = { typ = "boolean" }, + pluginserver_names = { typ = "array" }, + + untrusted_lua = { enum = { "on", "off", "sandbox" } }, + untrusted_lua_sandbox_requires = { typ = "array" }, + untrusted_lua_sandbox_environment = { typ = "array" }, + + lmdb_environment_path = { typ = "string" }, + lmdb_map_size = { typ = "string" }, + + opentelemetry_tracing = { + typ = "array", + alias = { + replacement = "tracing_instrumentations", + }, + deprecated = { + replacement = "tracing_instrumentations", + }, + }, + + tracing_instrumentations = { + typ = "array", + }, + + opentelemetry_tracing_sampling_rate = { + typ = "number", + deprecated = { + replacement = "tracing_sampling_rate", + }, + alias = { + replacement = "tracing_sampling_rate", + }, + }, + + tracing_sampling_rate = { + typ = "number", + }, + + proxy_server = { typ = "string" }, + proxy_server_ssl_verify = { typ = "boolean" }, + + wasm = { typ = "boolean" }, + wasm_filters_path = { typ = "string" }, + + error_template_html = { typ = "string" }, + error_template_json = { typ = "string" }, + error_template_xml = { typ = "string" }, + error_template_plain = { typ = "string" }, + + admin_gui_url = {typ = "string"}, + admin_gui_path = {typ = "string"}, + admin_gui_api_url = {typ = "string"}, + + request_debug = { typ = "boolean" }, + request_debug_token = { typ = "string" }, +} + + +-- List of settings whose values must not be printed when +-- using the CLI in debug mode (which prints all settings). +local CONF_SENSITIVE_PLACEHOLDER = "******" +local CONF_SENSITIVE = { + pg_password = true, + pg_ro_password = true, + proxy_server = true, -- hide proxy server URL as it may contain credentials + declarative_config_string = true, -- config may contain sensitive info + -- may contain absolute or base64 value of the the key + cluster_cert_key = true, + ssl_cert_key = true, + client_ssl_cert_key = true, + admin_ssl_cert_key = true, + admin_gui_ssl_cert_key = true, + status_ssl_cert_key = true, + debug_ssl_cert_key = true, +} + + +-- List of confs necessary for compiling injected nginx conf +local CONF_BASIC = { + prefix = true, + vaults = true, + database = true, + lmdb_environment_path = true, + lmdb_map_size = true, + lua_ssl_trusted_certificate = true, + lua_ssl_verify_depth = true, + lua_ssl_protocols = true, + nginx_http_lua_ssl_protocols = true, + nginx_stream_lua_ssl_protocols = true, + vault_env_prefix = true, +} + + +local TYP_CHECKS = { + array = function(v) return type(v) == "table" end, + string = function(v) return type(v) == "string" end, + number = function(v) return type(v) == "number" end, + boolean = function(v) return type(v) == "boolean" end, + ngx_boolean = function(v) return v == "on" or v == "off" end, +} + + +-- This meta table will prevent the parsed table to be passed on in the +-- intermediate Kong config file in the prefix directory. +-- We thus avoid 'table: 0x41c3fa58' from appearing into the prefix +-- hidden configuration file. +-- This is only to be applied to values that are injected into the +-- configuration object, and not configuration properties themselves, +-- otherwise we would prevent such properties from being specifiable +-- via environment variables. +local _NOP_TOSTRING_MT = { + __tostring = function() return "" end, +} + + +-- using kong version, "major.minor" +local LMDB_VALIDATION_TAG = string.format("%d.%d", + kong_meta._VERSION_TABLE.major, + kong_meta._VERSION_TABLE.minor) + + +return { + HEADERS = HEADERS, + BUNDLED_VAULTS = BUNDLED_VAULTS, + BUNDLED_PLUGINS = BUNDLED_PLUGINS, + + CIPHER_SUITES = CIPHER_SUITES, + DEFAULT_PATHS = DEFAULT_PATHS, + HEADER_KEY_TO_NAME = HEADER_KEY_TO_NAME, + UPSTREAM_HEADER_KEY_TO_NAME = UPSTREAM_HEADER_KEY_TO_NAME, + DYNAMIC_KEY_NAMESPACES = DYNAMIC_KEY_NAMESPACES, + DEPRECATED_DYNAMIC_KEY_NAMESPACES = DEPRECATED_DYNAMIC_KEY_NAMESPACES, + PREFIX_PATHS = PREFIX_PATHS, + CONF_PARSERS = CONF_PARSERS, + CONF_SENSITIVE_PLACEHOLDER = CONF_SENSITIVE_PLACEHOLDER, + CONF_SENSITIVE = CONF_SENSITIVE, + CONF_BASIC = CONF_BASIC, + TYP_CHECKS = TYP_CHECKS, + + _NOP_TOSTRING_MT = _NOP_TOSTRING_MT, + + LMDB_VALIDATION_TAG = LMDB_VALIDATION_TAG, +} + diff --git a/kong/conf_loader/init.lua b/kong/conf_loader/init.lua index b9823e7f2601..71e863892c59 100644 --- a/kong/conf_loader/init.lua +++ b/kong/conf_loader/init.lua @@ -1,7 +1,6 @@ local require = require -local kong_meta = require "kong.meta" local kong_default_conf = require "kong.templates.kong_defaults" local process_secrets = require "kong.cmd.utils.process_secrets" local nginx_signals = require "kong.cmd.utils.nginx_signals" @@ -10,7 +9,7 @@ local openssl_x509 = require "resty.openssl.x509" local pl_stringio = require "pl.stringio" local pl_stringx = require "pl.stringx" local socket_url = require "socket.url" -local constants = require "kong.constants" +local conf_constants = require "kong.conf_loader.constants" local listeners = require "kong.conf_loader.listeners" local pl_pretty = require "pl.pretty" local pl_config = require "pl.config" @@ -73,246 +72,6 @@ ffi.cdef([[ ]]) --- Version 5: https://wiki.mozilla.org/Security/Server_Side_TLS -local cipher_suites = { - modern = { - protocols = "TLSv1.3", - ciphers = nil, -- all TLSv1.3 ciphers are considered safe - prefer_server_ciphers = "off", -- as all are safe, let client choose - }, - intermediate = { - protocols = "TLSv1.2 TLSv1.3", - ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:" - .. "ECDHE-RSA-AES128-GCM-SHA256:" - .. "ECDHE-ECDSA-AES256-GCM-SHA384:" - .. "ECDHE-RSA-AES256-GCM-SHA384:" - .. "ECDHE-ECDSA-CHACHA20-POLY1305:" - .. "ECDHE-RSA-CHACHA20-POLY1305:" - .. "DHE-RSA-AES128-GCM-SHA256:" - .. "DHE-RSA-AES256-GCM-SHA384", - dhparams = "ffdhe2048", - prefer_server_ciphers = "off", - }, - old = { - protocols = "TLSv1 TLSv1.1 TLSv1.2 TLSv1.3", - ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:" - .. "ECDHE-RSA-AES128-GCM-SHA256:" - .. "ECDHE-ECDSA-AES256-GCM-SHA384:" - .. "ECDHE-RSA-AES256-GCM-SHA384:" - .. "ECDHE-ECDSA-CHACHA20-POLY1305:" - .. "ECDHE-RSA-CHACHA20-POLY1305:" - .. "DHE-RSA-AES128-GCM-SHA256:" - .. "DHE-RSA-AES256-GCM-SHA384:" - .. "DHE-RSA-CHACHA20-POLY1305:" - .. "ECDHE-ECDSA-AES128-SHA256:" - .. "ECDHE-RSA-AES128-SHA256:" - .. "ECDHE-ECDSA-AES128-SHA:" - .. "ECDHE-RSA-AES128-SHA:" - .. "ECDHE-ECDSA-AES256-SHA384:" - .. "ECDHE-RSA-AES256-SHA384:" - .. "ECDHE-ECDSA-AES256-SHA:" - .. "ECDHE-RSA-AES256-SHA:" - .. "DHE-RSA-AES128-SHA256:" - .. "DHE-RSA-AES256-SHA256:" - .. "AES128-GCM-SHA256:" - .. "AES256-GCM-SHA384:" - .. "AES128-SHA256:" - .. "AES256-SHA256:" - .. "AES128-SHA:" - .. "AES256-SHA:" - .. "DES-CBC3-SHA", - prefer_server_ciphers = "on", - }, - fips = { -- https://wiki.openssl.org/index.php/FIPS_mode_and_TLS - -- TLSv1.0 and TLSv1.1 is not completely not FIPS compliant, - -- but must be used under certain condititions like key sizes, - -- signatures in the full chain that Kong can't control. - -- In that case, we disables TLSv1.0 and TLSv1.1 and user - -- can optionally turn them on if they are aware of the caveats. - -- No FIPS compliant predefined DH group available prior to - -- OpenSSL 3.0. - protocols = "TLSv1.2", - ciphers = "TLSv1.2+FIPS:kRSA+FIPS:!eNULL:!aNULL", - prefer_server_ciphers = "on", - } -} - - -local DEFAULT_PATHS = { - "/etc/kong/kong.conf", - "/etc/kong.conf", -} - - -local HEADERS = constants.HEADERS -local HEADER_KEY_TO_NAME = { - ["server_tokens"] = "server_tokens", - ["latency_tokens"] = "latency_tokens", - [lower(HEADERS.VIA)] = HEADERS.VIA, - [lower(HEADERS.SERVER)] = HEADERS.SERVER, - [lower(HEADERS.PROXY_LATENCY)] = HEADERS.PROXY_LATENCY, - [lower(HEADERS.RESPONSE_LATENCY)] = HEADERS.RESPONSE_LATENCY, - [lower(HEADERS.ADMIN_LATENCY)] = HEADERS.ADMIN_LATENCY, - [lower(HEADERS.UPSTREAM_LATENCY)] = HEADERS.UPSTREAM_LATENCY, - [lower(HEADERS.UPSTREAM_STATUS)] = HEADERS.UPSTREAM_STATUS, - [lower(HEADERS.REQUEST_ID)] = HEADERS.REQUEST_ID, -} - -local UPSTREAM_HEADER_KEY_TO_NAME = { - [lower(HEADERS.REQUEST_ID)] = HEADERS.REQUEST_ID, -} - - -local EMPTY = {} - - --- NOTE! Prefixes should always follow `nginx_[a-z]+_`. -local DYNAMIC_KEY_NAMESPACES = { - { - injected_conf_name = "nginx_main_directives", - prefix = "nginx_main_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_events_directives", - prefix = "nginx_events_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_http_directives", - prefix = "nginx_http_", - ignore = { - upstream_keepalive = true, - upstream_keepalive_timeout = true, - upstream_keepalive_requests = true, - -- we already add it to nginx_kong_inject.lua explicitly - lua_ssl_protocols = true, - }, - }, - { - injected_conf_name = "nginx_upstream_directives", - prefix = "nginx_upstream_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_proxy_directives", - prefix = "nginx_proxy_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_location_directives", - prefix = "nginx_location_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_status_directives", - prefix = "nginx_status_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_admin_directives", - prefix = "nginx_admin_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_stream_directives", - prefix = "nginx_stream_", - ignore = { - -- we already add it to nginx_kong_stream_inject.lua explicitly - lua_ssl_protocols = true, - }, - }, - { - injected_conf_name = "nginx_supstream_directives", - prefix = "nginx_supstream_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_sproxy_directives", - prefix = "nginx_sproxy_", - ignore = EMPTY, - }, - { - prefix = "pluginserver_", - ignore = EMPTY, - }, - { - prefix = "vault_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_wasm_wasmtime_directives", - prefix = "nginx_wasm_wasmtime_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_wasm_v8_directives", - prefix = "nginx_wasm_v8_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_wasm_wasmer_directives", - prefix = "nginx_wasm_wasmer_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_wasm_main_shm_kv_directives", - prefix = "nginx_wasm_shm_kv_", - ignore = EMPTY, - }, - { - injected_conf_name = "nginx_wasm_main_directives", - prefix = "nginx_wasm_", - ignore = EMPTY, - }, -} - - -local DEPRECATED_DYNAMIC_KEY_NAMESPACES = {} - - -local PREFIX_PATHS = { - nginx_pid = {"pids", "nginx.pid"}, - nginx_err_logs = {"logs", "error.log"}, - nginx_acc_logs = {"logs", "access.log"}, - admin_acc_logs = {"logs", "admin_access.log"}, - nginx_conf = {"nginx.conf"}, - nginx_kong_gui_include_conf = {"nginx-kong-gui-include.conf"}, - nginx_kong_conf = {"nginx-kong.conf"}, - nginx_kong_stream_conf = {"nginx-kong-stream.conf"}, - nginx_inject_conf = {"nginx-inject.conf"}, - nginx_kong_inject_conf = {"nginx-kong-inject.conf"}, - nginx_kong_stream_inject_conf = {"nginx-kong-stream-inject.conf"}, - - kong_env = {".kong_env"}, - kong_process_secrets = {".kong_process_secrets"}, - - ssl_cert_csr_default = {"ssl", "kong-default.csr"}, - ssl_cert_default = {"ssl", "kong-default.crt"}, - ssl_cert_key_default = {"ssl", "kong-default.key"}, - ssl_cert_default_ecdsa = {"ssl", "kong-default-ecdsa.crt"}, - ssl_cert_key_default_ecdsa = {"ssl", "kong-default-ecdsa.key"}, - - client_ssl_cert_default = {"ssl", "kong-default.crt"}, - client_ssl_cert_key_default = {"ssl", "kong-default.key"}, - - admin_ssl_cert_default = {"ssl", "admin-kong-default.crt"}, - admin_ssl_cert_key_default = {"ssl", "admin-kong-default.key"}, - admin_ssl_cert_default_ecdsa = {"ssl", "admin-kong-default-ecdsa.crt"}, - admin_ssl_cert_key_default_ecdsa = {"ssl", "admin-kong-default-ecdsa.key"}, - - admin_gui_ssl_cert_default = {"ssl", "admin-gui-kong-default.crt"}, - admin_gui_ssl_cert_key_default = {"ssl", "admin-gui-kong-default.key"}, - admin_gui_ssl_cert_default_ecdsa = {"ssl", "admin-gui-kong-default-ecdsa.crt"}, - admin_gui_ssl_cert_key_default_ecdsa = {"ssl", "admin-gui-kong-default-ecdsa.key"}, - - status_ssl_cert_default = {"ssl", "status-kong-default.crt"}, - status_ssl_cert_key_default = {"ssl", "status-kong-default.key"}, - status_ssl_cert_default_ecdsa = {"ssl", "status-kong-default-ecdsa.crt"}, - status_ssl_cert_key_default_ecdsa = {"ssl", "status-kong-default-ecdsa.key"}, -} - - local function is_predefined_dhgroup(group) if type(group) ~= "string" then return false @@ -325,371 +84,6 @@ local function is_predefined_dhgroup(group) end --- By default, all properties in the configuration are considered to --- be strings/numbers, but if we want to forcefully infer their type, specify it --- in this table. --- Also holds "enums" which are lists of valid configuration values for some --- settings. --- See `typ_checks` for the validation function of each type. --- --- Types: --- `boolean`: can be "on"/"off"/"true"/"false", will be inferred to a boolean --- `ngx_boolean`: can be "on"/"off", will be inferred to a string --- `array`: a comma-separated list -local CONF_PARSERS = { - -- forced string inferences (or else are retrieved as numbers) - port_maps = { typ = "array" }, - proxy_listen = { typ = "array" }, - admin_listen = { typ = "array" }, - admin_gui_listen = {typ = "array"}, - status_listen = { typ = "array" }, - stream_listen = { typ = "array" }, - cluster_listen = { typ = "array" }, - ssl_cert = { typ = "array" }, - ssl_cert_key = { typ = "array" }, - admin_ssl_cert = { typ = "array" }, - admin_ssl_cert_key = { typ = "array" }, - admin_gui_ssl_cert = { typ = "array" }, - admin_gui_ssl_cert_key = { typ = "array" }, - status_ssl_cert = { typ = "array" }, - status_ssl_cert_key = { typ = "array" }, - db_update_frequency = { typ = "number" }, - db_update_propagation = { typ = "number" }, - db_cache_ttl = { typ = "number" }, - db_cache_neg_ttl = { typ = "number" }, - db_resurrect_ttl = { typ = "number" }, - db_cache_warmup_entities = { typ = "array" }, - nginx_user = { - typ = "string", - alias = { - replacement = "nginx_main_user", - } - }, - nginx_daemon = { - typ = "ngx_boolean", - alias = { - replacement = "nginx_main_daemon", - } - }, - nginx_worker_processes = { - typ = "string", - alias = { - replacement = "nginx_main_worker_processes", - }, - }, - - worker_events_max_payload = { typ = "number" }, - - upstream_keepalive_pool_size = { typ = "number" }, - upstream_keepalive_max_requests = { typ = "number" }, - upstream_keepalive_idle_timeout = { typ = "number" }, - allow_debug_header = { typ = "boolean" }, - - headers = { typ = "array" }, - headers_upstream = { typ = "array" }, - trusted_ips = { typ = "array" }, - real_ip_header = { - typ = "string", - alias = { - replacement = "nginx_proxy_real_ip_header", - } - }, - real_ip_recursive = { - typ = "ngx_boolean", - alias = { - replacement = "nginx_proxy_real_ip_recursive", - } - }, - error_default_type = { enum = { - "application/json", - "application/xml", - "text/html", - "text/plain", - } - }, - - database = { enum = { "postgres", "cassandra", "off" } }, - pg_port = { typ = "number" }, - pg_timeout = { typ = "number" }, - pg_password = { typ = "string" }, - pg_ssl = { typ = "boolean" }, - pg_ssl_verify = { typ = "boolean" }, - pg_max_concurrent_queries = { typ = "number" }, - pg_semaphore_timeout = { typ = "number" }, - pg_keepalive_timeout = { typ = "number" }, - pg_pool_size = { typ = "number" }, - pg_backlog = { typ = "number" }, - _debug_pg_ttl_cleanup_interval = { typ = "number" }, - - pg_ro_port = { typ = "number" }, - pg_ro_timeout = { typ = "number" }, - pg_ro_password = { typ = "string" }, - pg_ro_ssl = { typ = "boolean" }, - pg_ro_ssl_verify = { typ = "boolean" }, - pg_ro_max_concurrent_queries = { typ = "number" }, - pg_ro_semaphore_timeout = { typ = "number" }, - pg_ro_keepalive_timeout = { typ = "number" }, - pg_ro_pool_size = { typ = "number" }, - pg_ro_backlog = { typ = "number" }, - - dns_resolver = { typ = "array" }, - dns_hostsfile = { typ = "string" }, - dns_order = { typ = "array" }, - dns_valid_ttl = { typ = "number" }, - dns_stale_ttl = { typ = "number" }, - dns_cache_size = { typ = "number" }, - dns_not_found_ttl = { typ = "number" }, - dns_error_ttl = { typ = "number" }, - dns_no_sync = { typ = "boolean" }, - privileged_worker = { - typ = "boolean", - deprecated = { - replacement = "dedicated_config_processing", - alias = function(conf) - if conf.dedicated_config_processing == nil and - conf.privileged_worker ~= nil then - conf.dedicated_config_processing = conf.privileged_worker - end - end, - }}, - dedicated_config_processing = { typ = "boolean" }, - worker_consistency = { enum = { "strict", "eventual" }, - -- deprecating values for enums - deprecated = { - value = "strict", - } - }, - router_consistency = { - enum = { "strict", "eventual" }, - deprecated = { - replacement = "worker_consistency", - alias = function(conf) - if conf.worker_consistency == nil and - conf.router_consistency ~= nil then - conf.worker_consistency = conf.router_consistency - end - end, - } - }, - router_flavor = { - enum = { "traditional", "traditional_compatible", "expressions" }, - }, - worker_state_update_frequency = { typ = "number" }, - - lua_max_req_headers = { typ = "number" }, - lua_max_resp_headers = { typ = "number" }, - lua_max_uri_args = { typ = "number" }, - lua_max_post_args = { typ = "number" }, - - ssl_protocols = { - typ = "string", - directives = { - "nginx_http_ssl_protocols", - "nginx_stream_ssl_protocols", - }, - }, - ssl_prefer_server_ciphers = { - typ = "ngx_boolean", - directives = { - "nginx_http_ssl_prefer_server_ciphers", - "nginx_stream_ssl_prefer_server_ciphers", - }, - }, - ssl_dhparam = { - typ = "string", - directives = { - "nginx_http_ssl_dhparam", - "nginx_stream_ssl_dhparam", - }, - }, - ssl_session_tickets = { - typ = "ngx_boolean", - directives = { - "nginx_http_ssl_session_tickets", - "nginx_stream_ssl_session_tickets", - }, - }, - ssl_session_timeout = { - typ = "string", - directives = { - "nginx_http_ssl_session_timeout", - "nginx_stream_ssl_session_timeout", - }, - }, - ssl_session_cache_size = { typ = "string" }, - - client_ssl = { typ = "boolean" }, - - proxy_access_log = { typ = "string" }, - proxy_error_log = { typ = "string" }, - proxy_stream_access_log = { typ = "string" }, - proxy_stream_error_log = { typ = "string" }, - admin_access_log = { typ = "string" }, - admin_error_log = { typ = "string" }, - admin_gui_access_log = {typ = "string"}, - admin_gui_error_log = {typ = "string"}, - status_access_log = { typ = "string" }, - status_error_log = { typ = "string" }, - log_level = { enum = { - "debug", - "info", - "notice", - "warn", - "error", - "crit", - "alert", - "emerg", - } - }, - vaults = { typ = "array" }, - plugins = { typ = "array" }, - anonymous_reports = { typ = "boolean" }, - - lua_ssl_trusted_certificate = { typ = "array" }, - lua_ssl_verify_depth = { typ = "number" }, - lua_ssl_protocols = { - typ = "string", - directives = { - "nginx_http_lua_ssl_protocols", - "nginx_stream_lua_ssl_protocols", - }, - }, - lua_socket_pool_size = { typ = "number" }, - - role = { enum = { "data_plane", "control_plane", "traditional", }, }, - cluster_control_plane = { typ = "string", }, - cluster_cert = { typ = "string" }, - cluster_cert_key = { typ = "string" }, - cluster_mtls = { enum = { "shared", "pki" } }, - cluster_ca_cert = { typ = "string" }, - cluster_server_name = { typ = "string" }, - cluster_data_plane_purge_delay = { typ = "number" }, - cluster_ocsp = { enum = { "on", "off", "optional" } }, - cluster_max_payload = { typ = "number" }, - cluster_use_proxy = { typ = "boolean" }, - cluster_dp_labels = { typ = "array" }, - - kic = { typ = "boolean" }, - pluginserver_names = { typ = "array" }, - - untrusted_lua = { enum = { "on", "off", "sandbox" } }, - untrusted_lua_sandbox_requires = { typ = "array" }, - untrusted_lua_sandbox_environment = { typ = "array" }, - - lmdb_environment_path = { typ = "string" }, - lmdb_map_size = { typ = "string" }, - - opentelemetry_tracing = { - typ = "array", - alias = { - replacement = "tracing_instrumentations", - }, - deprecated = { - replacement = "tracing_instrumentations", - }, - }, - - tracing_instrumentations = { - typ = "array", - }, - - opentelemetry_tracing_sampling_rate = { - typ = "number", - deprecated = { - replacement = "tracing_sampling_rate", - }, - alias = { - replacement = "tracing_sampling_rate", - }, - }, - - tracing_sampling_rate = { - typ = "number", - }, - - proxy_server = { typ = "string" }, - proxy_server_ssl_verify = { typ = "boolean" }, - - wasm = { typ = "boolean" }, - wasm_filters_path = { typ = "string" }, - - error_template_html = { typ = "string" }, - error_template_json = { typ = "string" }, - error_template_xml = { typ = "string" }, - error_template_plain = { typ = "string" }, - - admin_gui_url = {typ = "string"}, - admin_gui_path = {typ = "string"}, - admin_gui_api_url = {typ = "string"}, - - request_debug = { typ = "boolean" }, - request_debug_token = { typ = "string" }, -} - - --- List of settings whose values must not be printed when --- using the CLI in debug mode (which prints all settings). -local CONF_SENSITIVE_PLACEHOLDER = "******" -local CONF_SENSITIVE = { - pg_password = true, - pg_ro_password = true, - proxy_server = true, -- hide proxy server URL as it may contain credentials - declarative_config_string = true, -- config may contain sensitive info - -- may contain absolute or base64 value of the the key - cluster_cert_key = true, - ssl_cert_key = true, - client_ssl_cert_key = true, - admin_ssl_cert_key = true, - admin_gui_ssl_cert_key = true, - status_ssl_cert_key = true, - debug_ssl_cert_key = true, -} - - --- List of confs necessary for compiling injected nginx conf -local CONF_BASIC = { - prefix = true, - vaults = true, - database = true, - lmdb_environment_path = true, - lmdb_map_size = true, - lua_ssl_trusted_certificate = true, - lua_ssl_verify_depth = true, - lua_ssl_protocols = true, - nginx_http_lua_ssl_protocols = true, - nginx_stream_lua_ssl_protocols = true, - vault_env_prefix = true, -} - - -local typ_checks = { - array = function(v) return type(v) == "table" end, - string = function(v) return type(v) == "string" end, - number = function(v) return type(v) == "number" end, - boolean = function(v) return type(v) == "boolean" end, - ngx_boolean = function(v) return v == "on" or v == "off" end, -} - - --- This meta table will prevent the parsed table to be passed on in the --- intermediate Kong config file in the prefix directory. --- We thus avoid 'table: 0x41c3fa58' from appearing into the prefix --- hidden configuration file. --- This is only to be applied to values that are injected into the --- configuration object, and not configuration properties themselves, --- otherwise we would prevent such properties from being specifiable --- via environment variables. -local _nop_tostring_mt = { - __tostring = function() return "" end, -} - - --- using kong version, "major.minor" -local LMDB_VALIDATION_TAG = string.format("%d.%d", - kong_meta._VERSION_TABLE.major, - kong_meta._VERSION_TABLE.minor) - - local function parse_value(value, typ) if type(value) == "string" then value = strip(value) @@ -842,12 +236,12 @@ local function check_and_parse(conf, opts) local errors = {} for k, value in pairs(conf) do - local v_schema = CONF_PARSERS[k] or {} + local v_schema = conf_constants.CONF_PARSERS[k] or {} value = parse_value(value, v_schema.typ) local typ = v_schema.typ or "string" - if value and not typ_checks[typ](value) then + if value and not conf_constants.TYP_CHECKS[typ](value) then errors[#errors + 1] = fmt("%s is not a %s: '%s'", k, typ, tostring(value)) @@ -1038,7 +432,7 @@ local function check_and_parse(conf, opts) end if conf.ssl_cipher_suite ~= "custom" then - local suite = cipher_suites[conf.ssl_cipher_suite] + local suite = conf_constants.CIPHER_SUITES[conf.ssl_cipher_suite] if suite then conf.ssl_ciphers = suite.ciphers conf.nginx_http_ssl_protocols = suite.protocols @@ -1087,7 +481,7 @@ local function check_and_parse(conf, opts) if conf.headers then for _, token in ipairs(conf.headers) do - if token ~= "off" and not HEADER_KEY_TO_NAME[lower(token)] then + if token ~= "off" and not conf_constants.HEADER_KEY_TO_NAME[lower(token)] then errors[#errors + 1] = fmt("headers: invalid entry '%s'", tostring(token)) end @@ -1096,7 +490,7 @@ local function check_and_parse(conf, opts) if conf.headers_upstream then for _, token in ipairs(conf.headers_upstream) do - if token ~= "off" and not UPSTREAM_HEADER_KEY_TO_NAME[lower(token)] then + if token ~= "off" and not conf_constants.UPSTREAM_HEADER_KEY_TO_NAME[lower(token)] then errors[#errors + 1] = fmt("headers_upstream: invalid entry '%s'", tostring(token)) end @@ -1493,8 +887,8 @@ local function overrides(k, default_v, opts, file_conf, arg_conf) if env ~= nil then local to_print = env - if CONF_SENSITIVE[k] then - to_print = CONF_SENSITIVE_PLACEHOLDER + if conf_constants.CONF_SENSITIVE[k] then + to_print = conf_constants.CONF_SENSITIVE_PLACEHOLDER end log.debug('%s ENV found with "%s"', env_name, to_print) @@ -1534,7 +928,7 @@ end local function aliased_properties(conf) - for property_name, v_schema in pairs(CONF_PARSERS) do + for property_name, v_schema in pairs(conf_constants.CONF_PARSERS) do local alias = v_schema.alias if alias and conf[property_name] ~= nil and conf[alias.replacement] == nil then @@ -1553,7 +947,7 @@ end local function deprecated_properties(conf, opts) - for property_name, v_schema in pairs(CONF_PARSERS) do + for property_name, v_schema in pairs(conf_constants.CONF_PARSERS) do local deprecated = v_schema.deprecated if deprecated and conf[property_name] ~= nil then @@ -1579,7 +973,7 @@ end local function dynamic_properties(conf) - for property_name, v_schema in pairs(CONF_PARSERS) do + for property_name, v_schema in pairs(conf_constants.CONF_PARSERS) do local value = conf[property_name] if value ~= nil then local directives = v_schema.directives @@ -1707,7 +1101,7 @@ local function load(path, custom_conf, opts) if not path then -- try to look for a conf in default locations, but no big -- deal if none is found: we will use our defaults. - for _, default_path in ipairs(DEFAULT_PATHS) do + for _, default_path in ipairs(conf_constants.DEFAULT_PATHS) do if exists(default_path) then path = default_path break @@ -1741,7 +1135,7 @@ local function load(path, custom_conf, opts) local function add_dynamic_keys(t) t = t or {} - for property_name, v_schema in pairs(CONF_PARSERS) do + for property_name, v_schema in pairs(conf_constants.CONF_PARSERS) do local directives = v_schema.directives if directives then local v = t[property_name] @@ -1801,7 +1195,7 @@ local function load(path, custom_conf, opts) add_dynamic_keys(kong_env_vars) add_dynamic_keys(from_file_conf) - for _, dyn_namespace in ipairs(DYNAMIC_KEY_NAMESPACES) do + for _, dyn_namespace in ipairs(conf_constants.DYNAMIC_KEY_NAMESPACES) do find_dynamic_keys(dyn_namespace.prefix, defaults) -- tostring() defaults find_dynamic_keys(dyn_namespace.prefix, custom_conf) find_dynamic_keys(dyn_namespace.prefix, kong_env_vars) @@ -1835,7 +1229,7 @@ local function load(path, custom_conf, opts) -- before executing the main `resty` cmd, i.e. still in `bin/kong` if opts.pre_cmd then for k, v in pairs(conf) do - if not CONF_BASIC[k] then + if not conf_constants.CONF_BASIC[k] then conf[k] = nil end end @@ -1849,7 +1243,7 @@ local function load(path, custom_conf, opts) local refs do -- validation - local vaults_array = parse_value(conf.vaults, CONF_PARSERS["vaults"].typ) + local vaults_array = parse_value(conf.vaults, conf_constants.CONF_PARSERS["vaults"].typ) -- merge vaults local vaults = {} @@ -1859,7 +1253,7 @@ local function load(path, custom_conf, opts) local vault_name = strip(vaults_array[i]) if vault_name ~= "off" then if vault_name == "bundled" then - vaults = tablex.merge(constants.BUNDLED_VAULTS, vaults, true) + vaults = tablex.merge(conf_constants.BUNDLED_VAULTS, vaults, true) else vaults[vault_name] = true @@ -1868,7 +1262,7 @@ local function load(path, custom_conf, opts) end end - loaded_vaults = setmetatable(vaults, _nop_tostring_mt) + loaded_vaults = setmetatable(vaults, conf_constants._NOP_TOSTRING_MT) if get_phase() == "init" then local secrets = getenv("KONG_PROCESS_SECRETS") @@ -1876,7 +1270,7 @@ local function load(path, custom_conf, opts) C.unsetenv("KONG_PROCESS_SECRETS") else - local path = pl_path.join(abspath(ngx.config.prefix()), unpack(PREFIX_PATHS.kong_process_secrets)) + local path = pl_path.join(abspath(ngx.config.prefix()), unpack(conf_constants.PREFIX_PATHS.kong_process_secrets)) if exists(path) then secrets, err = pl_file.read(path, true) pl_file.delete(path) @@ -1897,7 +1291,7 @@ local function load(path, custom_conf, opts) if refs then refs[k] = v else - refs = setmetatable({ [k] = v }, _nop_tostring_mt) + refs = setmetatable({ [k] = v }, conf_constants._NOP_TOSTRING_MT) end conf[k] = deref @@ -1920,7 +1314,7 @@ local function load(path, custom_conf, opts) if refs then refs[k] = v else - refs = setmetatable({ [k] = v }, _nop_tostring_mt) + refs = setmetatable({ [k] = v }, conf_constants._NOP_TOSTRING_MT) end local deref, deref_err = vault.get(v) @@ -1974,7 +1368,7 @@ local function load(path, custom_conf, opts) end -- attach prefix files paths - for property, t_path in pairs(PREFIX_PATHS) do + for property, t_path in pairs(conf_constants.PREFIX_PATHS) do conf[property] = pl_path.join(conf.prefix, unpack(t_path)) end @@ -2016,12 +1410,12 @@ local function load(path, custom_conf, opts) end -- lmdb validation tag - conf.lmdb_validation_tag = LMDB_VALIDATION_TAG + conf.lmdb_validation_tag = conf_constants.LMDB_VALIDATION_TAG -- Wasm module support if conf.wasm then local wasm_filters = get_wasm_filters(conf.wasm_filters_path) - conf.wasm_modules_parsed = setmetatable(wasm_filters, _nop_tostring_mt) + conf.wasm_modules_parsed = setmetatable(wasm_filters, conf_constants._NOP_TOSTRING_MT) local function add_wasm_directive(directive, value, prefix) local directive_name = (prefix or "") .. directive @@ -2071,19 +1465,19 @@ local function load(path, custom_conf, opts) local injected_in_namespace = {} -- nginx directives from conf - for _, dyn_namespace in ipairs(DYNAMIC_KEY_NAMESPACES) do + for _, dyn_namespace in ipairs(conf_constants.DYNAMIC_KEY_NAMESPACES) do if dyn_namespace.injected_conf_name then injected_in_namespace[dyn_namespace.injected_conf_name] = true local directives = parse_nginx_directives(dyn_namespace, conf, injected_in_namespace) conf[dyn_namespace.injected_conf_name] = setmetatable(directives, - _nop_tostring_mt) + conf_constants._NOP_TOSTRING_MT) end end -- TODO: Deprecated, but kept for backward compatibility. - for _, dyn_namespace in ipairs(DEPRECATED_DYNAMIC_KEY_NAMESPACES) do + for _, dyn_namespace in ipairs(conf_constants.DEPRECATED_DYNAMIC_KEY_NAMESPACES) do if conf[dyn_namespace.injected_conf_name] then conf[dyn_namespace.previous_conf_name] = conf[dyn_namespace.injected_conf_name] end @@ -2096,8 +1490,8 @@ local function load(path, custom_conf, opts) for k, v in pairs(conf) do local to_print = v - if CONF_SENSITIVE[k] then - to_print = "******" + if conf_constants.CONF_SENSITIVE[k] then + to_print = conf_constants.CONF_SENSITIVE_PLACEHOLDER end conf_arr[#conf_arr+1] = k .. " = " .. pl_pretty.write(to_print, "") @@ -2123,7 +1517,7 @@ local function load(path, custom_conf, opts) local plugin_name = strip(conf.plugins[i]) if plugin_name ~= "off" then if plugin_name == "bundled" then - plugins = tablex.merge(constants.BUNDLED_PLUGINS, plugins, true) + plugins = tablex.merge(conf_constants.BUNDLED_PLUGINS, plugins, true) else plugins[plugin_name] = true @@ -2132,7 +1526,7 @@ local function load(path, custom_conf, opts) end end - conf.loaded_plugins = setmetatable(plugins, _nop_tostring_mt) + conf.loaded_plugins = setmetatable(plugins, conf_constants._NOP_TOSTRING_MT) end -- temporary workaround: inject an shm for prometheus plugin if needed @@ -2178,7 +1572,7 @@ local function load(path, custom_conf, opts) end end - for _, dyn_namespace in ipairs(DYNAMIC_KEY_NAMESPACES) do + for _, dyn_namespace in ipairs(conf_constants.DYNAMIC_KEY_NAMESPACES) do if dyn_namespace.injected_conf_name then sort(conf[dyn_namespace.injected_conf_name], function(a, b) return a.name < b.name @@ -2203,48 +1597,48 @@ local function load(path, custom_conf, opts) -- (downstream) local enabled_headers = {} - for _, v in pairs(HEADER_KEY_TO_NAME) do + for _, v in pairs(conf_constants.HEADER_KEY_TO_NAME) do enabled_headers[v] = false end if #conf.headers > 0 and conf.headers[1] ~= "off" then for _, token in ipairs(conf.headers) do if token ~= "off" then - enabled_headers[HEADER_KEY_TO_NAME[lower(token)]] = true + enabled_headers[conf_constants.HEADER_KEY_TO_NAME[lower(token)]] = true end end end if enabled_headers.server_tokens then - enabled_headers[HEADERS.VIA] = true - enabled_headers[HEADERS.SERVER] = true + enabled_headers[conf_constants.HEADERS.VIA] = true + enabled_headers[conf_constants.HEADERS.SERVER] = true end if enabled_headers.latency_tokens then - enabled_headers[HEADERS.PROXY_LATENCY] = true - enabled_headers[HEADERS.RESPONSE_LATENCY] = true - enabled_headers[HEADERS.ADMIN_LATENCY] = true - enabled_headers[HEADERS.UPSTREAM_LATENCY] = true + enabled_headers[conf_constants.HEADERS.PROXY_LATENCY] = true + enabled_headers[conf_constants.HEADERS.RESPONSE_LATENCY] = true + enabled_headers[conf_constants.HEADERS.ADMIN_LATENCY] = true + enabled_headers[conf_constants.HEADERS.UPSTREAM_LATENCY] = true end - conf.enabled_headers = setmetatable(enabled_headers, _nop_tostring_mt) + conf.enabled_headers = setmetatable(enabled_headers, conf_constants._NOP_TOSTRING_MT) -- (upstream) local enabled_headers_upstream = {} - for _, v in pairs(UPSTREAM_HEADER_KEY_TO_NAME) do + for _, v in pairs(conf_constants.UPSTREAM_HEADER_KEY_TO_NAME) do enabled_headers_upstream[v] = false end if #conf.headers_upstream > 0 and conf.headers_upstream[1] ~= "off" then for _, token in ipairs(conf.headers_upstream) do if token ~= "off" then - enabled_headers_upstream[UPSTREAM_HEADER_KEY_TO_NAME[lower(token)]] = true + enabled_headers_upstream[conf_constants.UPSTREAM_HEADER_KEY_TO_NAME[lower(token)]] = true end end end - conf.enabled_headers_upstream = setmetatable(enabled_headers_upstream, _nop_tostring_mt) + conf.enabled_headers_upstream = setmetatable(enabled_headers_upstream, conf_constants._NOP_TOSTRING_MT) end for _, prefix in ipairs({ "ssl", "admin_ssl", "admin_gui_ssl", "status_ssl", "client_ssl", "cluster" }) do @@ -2340,7 +1734,7 @@ return setmetatable({ load_config_file = load_config_file, add_default_path = function(path) - DEFAULT_PATHS[#DEFAULT_PATHS+1] = path + table.insert(conf_constants.DEFAULT_PATHS, path) end, remove_sensitive = function(conf) @@ -2349,16 +1743,16 @@ return setmetatable({ local refs = purged_conf["$refs"] if type(refs) == "table" then for k, v in pairs(refs) do - if not CONF_SENSITIVE[k] then + if not conf_constants.CONF_SENSITIVE[k] then purged_conf[k] = v end end purged_conf["$refs"] = nil end - for k in pairs(CONF_SENSITIVE) do + for k in pairs(conf_constants.CONF_SENSITIVE) do if purged_conf[k] then - purged_conf[k] = CONF_SENSITIVE_PLACEHOLDER + purged_conf[k] = conf_constants.CONF_SENSITIVE_PLACEHOLDER end end diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index 10743b25eff3..f8e1446f856b 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -1618,6 +1618,7 @@ describe("Configuration loader", function() finally(function() os.getenv = _os_getenv -- luacheck: ignore package.loaded["kong.conf_loader"] = nil + package.loaded["kong.conf_loader.constants"] = nil conf_loader = require "kong.conf_loader" end) os.getenv = function() end -- luacheck: ignore @@ -1632,6 +1633,7 @@ describe("Configuration loader", function() finally(function() os.getenv = _os_getenv -- luacheck: ignore package.loaded["kong.conf_loader"] = nil + package.loaded["kong.conf_loader.constants"] = nil conf_loader = require "kong.conf_loader" end) os.getenv = function() end -- luacheck: ignore From 2784bf54d8cbf3dbffe743837c1cbac2338c69f3 Mon Sep 17 00:00:00 2001 From: Yusheng Li Date: Thu, 30 Nov 2023 01:27:34 +0800 Subject: [PATCH 170/249] feat(log-serializer): add `source` property to log-serializer (#12052) --- .../kong/log-serializer-source-property.yml | 3 +++ kong/constants.lua | 13 +++++++++++ kong/pdk/log.lua | 6 +++++ kong/pdk/response.lua | 8 ++++--- spec/01-unit/10-log_serializer_spec.lua | 23 ++++++++++++++++++- t/01-pdk/02-log/00-phase_checks.t | 3 +++ 6 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 changelog/unreleased/kong/log-serializer-source-property.yml diff --git a/changelog/unreleased/kong/log-serializer-source-property.yml b/changelog/unreleased/kong/log-serializer-source-property.yml new file mode 100644 index 000000000000..326950c22ab4 --- /dev/null +++ b/changelog/unreleased/kong/log-serializer-source-property.yml @@ -0,0 +1,3 @@ +message: 'Add `source` property to log serializer, indicating the response is generated by `kong` or `upstream`.' +type: feature +scope: Core diff --git a/kong/constants.lua b/kong/constants.lua index 46a16fcac2a1..fc3b8a18a3b2 100644 --- a/kong/constants.lua +++ b/kong/constants.lua @@ -253,6 +253,19 @@ local constants = { SCHEMA_NAMESPACES = { PROXY_WASM_FILTERS = "proxy-wasm-filters", }, + + RESPONSE_SOURCE = { + TYPES = { + ERROR = "error", + EXIT = "exit", + SERVICE = "service", + }, + NAMES = { + error = "kong", + exit = "kong", + service = "upstream", + } + } } for _, v in ipairs(constants.CLUSTERING_SYNC_STATUS) do diff --git a/kong/pdk/log.lua b/kong/pdk/log.lua index e1cf4892cd8d..7fbaf168f7c1 100644 --- a/kong/pdk/log.lua +++ b/kong/pdk/log.lua @@ -18,6 +18,7 @@ local ngx_ssl = require "ngx.ssl" local phase_checker = require "kong.pdk.private.phases" local utils = require "kong.tools.utils" local cycle_aware_deep_copy = utils.cycle_aware_deep_copy +local constants = require "kong.constants" local sub = string.sub local type = type @@ -46,6 +47,7 @@ local _DEFAULT_NAMESPACED_FORMAT = "%file_src:%line_src [%namespace] %message" local PHASES = phase_checker.phases local PHASES_LOG = PHASES.log local QUESTION_MARK = byte("?") +local TYPE_NAMES = constants.RESPONSE_SOURCE.NAMES local phases_with_ctx = phase_checker.new(PHASES.rewrite, @@ -817,6 +819,9 @@ do -- the nginx doc: http://nginx.org/en/docs/http/ngx_http_upstream_module.html#upstream_status local upstream_status = var.upstream_status or "" + local response_source = okong.response.get_source(ongx.ctx) + local response_source_name = TYPE_NAMES[response_source] + local root = { request = { id = request_id_get() or "", @@ -848,6 +853,7 @@ do consumer = cycle_aware_deep_copy(ctx.authenticated_consumer), client_ip = var.remote_addr, started_at = okong.request.get_start_time(), + source = response_source_name, } return edit_result(ctx, root) diff --git a/kong/pdk/response.lua b/kong/pdk/response.lua index 228626b62943..dd83b2a8270a 100644 --- a/kong/pdk/response.lua +++ b/kong/pdk/response.lua @@ -18,6 +18,7 @@ local checks = require "kong.pdk.private.checks" local phase_checker = require "kong.pdk.private.phases" local utils = require "kong.tools.utils" local request_id = require "kong.tracing.request_id" +local constants = require "kong.constants" local ngx = ngx @@ -40,6 +41,7 @@ local is_http_subsystem = ngx and ngx.config.subsystem == "http" if is_http_subsystem then add_header = require("ngx.resp").add_header end +local RESPONSE_SOURCE_TYPES = constants.RESPONSE_SOURCE.TYPES local PHASES = phase_checker.phases @@ -349,15 +351,15 @@ local function new(self, major_version) end if ctx.KONG_UNEXPECTED then - return "error" + return RESPONSE_SOURCE_TYPES.ERROR end if ctx.KONG_EXITED then - return "exit" + return RESPONSE_SOURCE_TYPES.EXIT end if ctx.KONG_PROXIED then - return "service" + return RESPONSE_SOURCE_TYPES.SERVICE end return "error" diff --git a/spec/01-unit/10-log_serializer_spec.lua b/spec/01-unit/10-log_serializer_spec.lua index bd465d22805e..005772ca8b01 100644 --- a/spec/01-unit/10-log_serializer_spec.lua +++ b/spec/01-unit/10-log_serializer_spec.lua @@ -20,6 +20,7 @@ describe("kong.log.serialize", function() }, }, }, + KONG_PROXIED = true, }, var = { kong_request_id = "1234", @@ -43,7 +44,7 @@ describe("kong.log.serialize", function() get_uri_args = function() return {"arg1", "arg2"} end, get_method = function() return "POST" end, get_headers = function() return {header1 = "header1", header2 = "header2", authorization = "authorization"} end, - start_time = function() return 3 end + start_time = function() return 3 end, }, resp = { get_headers = function() return {header1 = "respheader1", header2 = "respheader2", ["set-cookie"] = "delicious=delicacy"} end @@ -99,6 +100,8 @@ describe("kong.log.serialize", function() -- Tries assert.is_table(res.tries) + + assert.equal("upstream", res.source) end) it("uses port map (ngx.ctx.host_port) for request url ", function() @@ -173,6 +176,24 @@ describe("kong.log.serialize", function() }, res.tries) end) + it("serializes the response.source", function() + ngx.ctx.KONG_EXITED = true + ngx.ctx.KONG_PROXIED = nil + ngx.ctx.KONG_UNEXPECTED = nil + + local res = kong.log.serialize({ngx = ngx, kong = kong, }) + assert.is_table(res) + assert.same("kong", res.source) + + ngx.ctx.KONG_UNEXPECTED = nil + ngx.ctx.KONG_EXITED = nil + ngx.ctx.KONG_PROXIED = nil + + local res = kong.log.serialize({ngx = ngx, kong = kong, }) + assert.is_table(res) + assert.same("kong", res.source) + end) + it("does not fail when the 'balancer_data' structure is missing", function() ngx.ctx.balancer_data = nil diff --git a/t/01-pdk/02-log/00-phase_checks.t b/t/01-pdk/02-log/00-phase_checks.t index 2bc16e1d3444..ecea2458341d 100644 --- a/t/01-pdk/02-log/00-phase_checks.t +++ b/t/01-pdk/02-log/00-phase_checks.t @@ -64,6 +64,9 @@ qq{ get_headers = function() return {} end, get_start_time = function() return 1 end, }, + response = { + get_source = function() return "service" end, + }, } } }, From 25149497a5f8f71ef8693e46b8e183c0d08e46eb Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Wed, 15 Nov 2023 16:01:30 +0800 Subject: [PATCH 171/249] fix(scripts): fix update-copyright in venv and remove unused repos --- Makefile | 3 +++ build/templates/venv-commons | 1 - scripts/update-copyright | 27 +++++++++++++++------------ 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/Makefile b/Makefile index 8f3cc3e11de3..5d860bcf7264 100644 --- a/Makefile +++ b/Makefile @@ -138,6 +138,9 @@ lint: dev @!(grep -R -E -I -n -w '#only|#o' spec && echo "#only or #o tag detected") >&2 @!(grep -R -E -I -n -- '---\s+ONLY' t && echo "--- ONLY block detected") >&2 +update-copyright: build-venv + bash -c 'OPENSSL_DIR=$(OPENSSL_DIR) EXPAT_DIR=$(EXPAT_DIR) $(VENV) luajit $(KONG_SOURCE_LOCATION)/scripts/update-copyright' + test: dev @$(VENV) $(TEST_CMD) spec/01-unit diff --git a/build/templates/venv-commons b/build/templates/venv-commons index 7fcf2b932d4e..f13613ca71d4 100644 --- a/build/templates/venv-commons +++ b/build/templates/venv-commons @@ -60,6 +60,5 @@ export LUA_PATH="$LUA_PATH" export LUA_CPATH="$KONG_VENV/openresty/site/lualib/?.so;$KONG_VENV/openresty/lualib/?.so;./?.so;$KONG_VENV/lib/lua/5.1/?.so;$KONG_VENV/openresty/luajit/lib/lua/5.1/?.so;$ROCKS_ROOT/lib/lua/5.1/?.so;;" export KONG_PREFIX="$KONG_VENV/kong/servroot" export LIBRARY_PREFIX="$KONG_VENV/kong" # let "make dev" happy -export OPENSSL_DIR="$KONG_VENV/kong" # let "make dev" happy EOF diff --git a/scripts/update-copyright b/scripts/update-copyright index 1a63f07c8393..afcfd29ae9f8 100755 --- a/scripts/update-copyright +++ b/scripts/update-copyright @@ -3,6 +3,8 @@ --[[ Usage: ./scripts/update-copyright +Use `make update-copyright` is recommended without least setup. + The COPYRIGHT file should be updated after running this. Changes are not added to git, visual review is recommended. @@ -20,11 +22,12 @@ including installing rocks inside said folder. Requires internet connection in order to download luarocks and license files. -On Macs, you might need to set up OPENSSL_DIR and CRYPTO_DIR. +On Macs, you might need to set up OPENSSL_DIR and EXPAT_DIR. The default for mac is: -OPENSSL_DIR=/usr/local/opt/openssl/ CRYPTO_DIR=/usr/local/opt/openssl/ ./scripts/update-copyright +OPENSSL_DIR=/usr/local/opt/openssl/ EXPAT_DIR=/usr/local/opt/expat ./scripts/update-copyright + ]] setmetatable(_G, nil) @@ -34,10 +37,10 @@ local url = require "socket.url" local fmt = string.format local OPENSSL_DIR = os.getenv("OPENSSL_DIR") -assert(OPENSSL_DIR, "please set the OPENSSL_DIR env variable (needed for installing luaOSSL)") +assert(OPENSSL_DIR, "please set the OPENSSL_DIR env variable (needed for installing luasocket)") -local CRYPTO_DIR = os.getenv("CRYPTO_DIR") -assert(CRYPTO_DIR, "please set the CRYPTO_DIR env variable (needed for installing luaOSSL)") +local EXPAT_DIR = os.getenv("EXPAT_DIR") +assert(EXPAT_DIR, "please set the EXPAT_DIR env variable (needed for installing luaexpat)") local work_folder = os.tmpname() .. "-update-copyright" @@ -72,9 +75,8 @@ local HARDCODED_DEPENDENCIES = { url = "https://luarocks.org", repo_url = "https://github.com/luarocks/luarocks", }, - ["luaossl"] = { -- the rockspec information is not up to date - url = "http://25thandclement.com/~william/projects/luaossl.html", - repo_url = "https://github.com/wahern/luaossl", + ["OpenSSL"] = { + url = "https://github.com/openssl/openssl", }, -- go-pdk dependencies: ["go-codec"] = { @@ -330,7 +332,7 @@ local function find_and_download_license(main_url, alt_url) local attempt_url = url.build(parsed_url) local text = download_file(attempt_url) - if text then + if text and #text > 0 then parsed_url.host = "github.com" parsed_url.path = fmt("/%s/%s/blob/master/%s", user, reponame, attempt) local url_for_humans = url.build(parsed_url) @@ -344,7 +346,7 @@ local function find_and_download_license(main_url, alt_url) local readme_markdown = download_file(readme_url) if readme_markdown then local header, text = extract_license_from_markdown(readme_markdown) - if header then + if header and #header > 0 then parsed_url.host = "github.com" parsed_url.path = fmt("/%s/%s", user, reponame) parsed_url.fragment = to_anchor(header) @@ -383,8 +385,8 @@ print("") print(fmt("Installing rocks in work folder. (Install log: %s/luarocks.log) ...", work_folder)) assert(os.execute(fmt("cp kong*.rockspec %s", work_folder))) -assert(os.execute(fmt("luarocks --lua-version=5.1 --tree %s make %s/kong*.rockspec OPENSSL_DIR=%s CRYPTO_DIR=%s 2>&1 > %s/luarocks.log", - work_folder, work_folder, OPENSSL_DIR, CRYPTO_DIR, work_folder))) +assert(os.execute(fmt("luarocks --lua-version=5.1 --tree %s make %s/kong*.rockspec OPENSSL_DIR=%s EXPAT_DIR=%s 2>&1 > %s/luarocks.log", + work_folder, work_folder, OPENSSL_DIR, EXPAT_DIR, work_folder))) local rocklist_path = fmt("%s/rocklist.txt", work_folder) assert(os.execute(fmt("find %s/lib | grep rockspec > %s", work_folder, rocklist_path))) @@ -420,6 +422,7 @@ table.sort(rocks, function(a, b) return a.package:lower() < b.package:lower() en print("Searching and downloading license texts from rock repos") for _, rock in ipairs(rocks) do + break -- if it was in HARDCODED_DEPENDENCIES, it is already in licenses at this point if not HARDCODED_DEPENDENCIES[rock.package] then local homepage = get_rock_homepage(rock) From 3b09d87aa78799f55b2b6624a7e4820085e16142 Mon Sep 17 00:00:00 2001 From: Zachary Hu <6426329+outsinre@users.noreply.github.com> Date: Thu, 30 Nov 2023 13:30:58 +0800 Subject: [PATCH 172/249] chore(deps): bump `actions/github-script` from `6` to `7` (#12119) --- .github/workflows/backport-fail-bot.yml | 4 ++-- .github/workflows/release-and-tests-fail-bot.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/backport-fail-bot.yml b/.github/workflows/backport-fail-bot.yml index 90004154abae..94eff6defd80 100644 --- a/.github/workflows/backport-fail-bot.yml +++ b/.github/workflows/backport-fail-bot.yml @@ -12,7 +12,7 @@ jobs: steps: - name: Fetch mapping file id: fetch_mapping - uses: actions/github-script@v6 + uses: actions/github-script@v7 env: ACCESS_TOKEN: ${{ secrets.PAT }} with: @@ -25,7 +25,7 @@ jobs: - name: Generate Slack Payload id: generate-payload - uses: actions/github-script@v6 + uses: actions/github-script@v7 env: SLACK_CHANNEL: gateway-notifications SLACK_MAPPING: "${{ steps.fetch_mapping.outputs.result }}" diff --git a/.github/workflows/release-and-tests-fail-bot.yml b/.github/workflows/release-and-tests-fail-bot.yml index 44796c755bff..1e9adaf073a9 100644 --- a/.github/workflows/release-and-tests-fail-bot.yml +++ b/.github/workflows/release-and-tests-fail-bot.yml @@ -17,7 +17,7 @@ jobs: steps: - name: Fetch mapping file id: fetch_mapping - uses: actions/github-script@v6 + uses: actions/github-script@v7 env: ACCESS_TOKEN: ${{ secrets.PAT }} with: From cfb56a74825eb053321ad399b97ab089030172bc Mon Sep 17 00:00:00 2001 From: Robin Xiang Date: Thu, 30 Nov 2023 16:16:06 +0800 Subject: [PATCH 173/249] fix(error_handler): fix the bug that error handler can't recognize status code 494. (#12114) * fix(error_handler): fix the bug that error handler can't recognize status code 494. There is a dedicated response body for 494 defined in error_handler. However, based on the current configuration for `error_page` in nginx-kong.conf, 494 will not be treated correctly wihout reserving it by the `=response` option in `error_page` directive. In this PR, a `error_page` configuration is added for 494 separately, so that it can be recognized in error handler, and it will be replaced with 400 finally. FTI-5374 --- changelog/unreleased/kong/error_handler_494.yml | 3 +++ kong/error_handlers.lua | 7 +++++++ kong/templates/nginx_kong.lua | 3 ++- spec/02-integration/05-proxy/13-error_handlers_spec.lua | 2 +- 4 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/error_handler_494.yml diff --git a/changelog/unreleased/kong/error_handler_494.yml b/changelog/unreleased/kong/error_handler_494.yml new file mode 100644 index 000000000000..dabcfd0cdc88 --- /dev/null +++ b/changelog/unreleased/kong/error_handler_494.yml @@ -0,0 +1,3 @@ +message: Fix a bug that the error_handler can not provide the meaningful response body when the internal error code 494 is triggered. +type: bugfix +scope: Core \ No newline at end of file diff --git a/kong/error_handlers.lua b/kong/error_handlers.lua index e4e8e17d0020..8fd83cf55aaf 100644 --- a/kong/error_handlers.lua +++ b/kong/error_handlers.lua @@ -59,6 +59,13 @@ return function(ctx) local status = kong.response.get_status() local message = get_body(status) + -- Nginx 494 status code is used internally when the client sends + -- too large or invalid HTTP headers. Kong is obliged to convert + -- it back to `400 Bad Request`. + if status == 494 then + status = 400 + end + local headers if find(accept_header, TYPE_GRPC, nil, true) == 1 then message = { message = message } diff --git a/kong/templates/nginx_kong.lua b/kong/templates/nginx_kong.lua index 3375dcf14572..cc2e8c167298 100644 --- a/kong/templates/nginx_kong.lua +++ b/kong/templates/nginx_kong.lua @@ -81,7 +81,8 @@ server { listen $(entry.listener); > end - error_page 400 404 405 408 411 412 413 414 417 494 /kong_error_handler; + error_page 400 404 405 408 411 412 413 414 417 /kong_error_handler; + error_page 494 =494 /kong_error_handler; error_page 500 502 503 504 /kong_error_handler; # Append the kong request id to the error log diff --git a/spec/02-integration/05-proxy/13-error_handlers_spec.lua b/spec/02-integration/05-proxy/13-error_handlers_spec.lua index 3c864e5d653e..a755d515bedc 100644 --- a/spec/02-integration/05-proxy/13-error_handlers_spec.lua +++ b/spec/02-integration/05-proxy/13-error_handlers_spec.lua @@ -36,7 +36,7 @@ describe("Proxy error handlers", function() assert.res_status(400, res) local body = res:read_body() assert.matches("kong/", res.headers.server, nil, true) - assert.matches("Bad request\nrequest_id: %x+\n", body) + assert.matches("Request header or cookie too large", body) end) it("Request For Routers With Trace Method Not Allowed", function () From ab8691a7072deb12d45aff21e76b89cb8476b5a6 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Thu, 30 Nov 2023 16:11:34 +0800 Subject: [PATCH 174/249] fix(scripts): remove debug code in update-copyright --- scripts/update-copyright | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/update-copyright b/scripts/update-copyright index afcfd29ae9f8..ccf343ccc0a0 100755 --- a/scripts/update-copyright +++ b/scripts/update-copyright @@ -422,7 +422,6 @@ table.sort(rocks, function(a, b) return a.package:lower() < b.package:lower() en print("Searching and downloading license texts from rock repos") for _, rock in ipairs(rocks) do - break -- if it was in HARDCODED_DEPENDENCIES, it is already in licenses at this point if not HARDCODED_DEPENDENCIES[rock.package] then local homepage = get_rock_homepage(rock) From ddc81de54d277d19d4fa6e0c4221bb5795858196 Mon Sep 17 00:00:00 2001 From: Brent Yarger Date: Fri, 1 Dec 2023 01:11:04 -0800 Subject: [PATCH 175/249] Add ngx globals to 01-header_transformer_spec setup (#12136) * Add ngx globals to 01-header_transformer_spec setup * Remove print statements --- .../01-header_transformer_spec.lua | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua b/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua index 4ec31a7832bd..ca15b1a562a8 100644 --- a/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua +++ b/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua @@ -20,6 +20,7 @@ describe("Plugin: response-transformer", function() local header_transformer setup(function() + _G.ngx = { headers_sent = false, resp = { @@ -31,10 +32,19 @@ describe("Plugin: response-transformer", function() KONG_PHASE = 0x00000200, }, } + + _G.ngx.DEBUG = 8 + _G.ngx.INFO = 7 + _G.ngx.NOTICE = 6 + _G.ngx.WARN = 5 + _G.ngx.ERR = 4 + _G.ngx.CRIT = 3 + _G.ngx.ALERT = 2 + _G.ngx.EMERG = 1 + _G.kong = { response = require "kong.pdk.response".new(), } - -- mock since FFI based ngx.resp.add_header won't work in this setup _G.kong.response.add_header = function(name, value) local new_value = _G.kong.response.get_headers()[name] From f75b10d2c9a511f96216e2dae377a55da995fb6c Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Fri, 1 Dec 2023 13:06:11 +0100 Subject: [PATCH 176/249] chore: remove changelog from PR template (#12140) --- .github/PULL_REQUEST_TEMPLATE.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index ba036d070436..808639120f3b 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -18,10 +18,6 @@ https://github.com/Kong/kong/blob/master/COMMUNITY_PLEDGE.md - [ ] A changelog file has been created under `changelog/unreleased/kong` or `skip-changelog` label added on PR if changelog is unnecessary. [README.md](https://github.com/Kong/gateway-changelog/README.md) - [ ] There is a user-facing docs PR against https://github.com/Kong/docs.konghq.com - PUT DOCS PR HERE -### Full changelog - -* [Implement ...] - ### Issue reference From 9eb21f74a8d36bb689af4fea6245dc6462c534ba Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Mon, 4 Dec 2023 16:08:06 +0800 Subject: [PATCH 177/249] chore(helpers): ignore mount points for helpers.clean_prefix (#12139) KAG-5588 --- spec/helpers.lua | 41 +++++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/spec/helpers.lua b/spec/helpers.lua index bfb71f98a069..e6100913b09b 100644 --- a/spec/helpers.lua +++ b/spec/helpers.lua @@ -3234,17 +3234,42 @@ end -- configuration will be used -- @function clean_prefix local function clean_prefix(prefix) + + -- like pl_dir.rmtree, but ignore mount points + local function rmtree(fullpath) + if pl_path.islink(fullpath) then return false,'will not follow symlink' end + for root,dirs,files in pl_dir.walk(fullpath,true) do + if pl_path.islink(root) then + -- sub dir is a link, remove link, do not follow + local res, err = os.remove(root) + if not res then + return nil, err .. ": " .. root + end + + else + for i,f in ipairs(files) do + f = pl_path.join(root,f) + local res, err = os.remove(f) + if not res then + return nil,err .. ": " .. f + end + end + + local res, err = pl_path.rmdir(root) + -- skip errors when trying to remove mount points + if not res and os.execute("findmnt " .. root .. " 2>&1 >/dev/null") == 0 then + return nil, err .. ": " .. root + end + end + end + return true + end + prefix = prefix or conf.prefix if pl_path.exists(prefix) then - local _, err = pl_dir.rmtree(prefix) - -- Note: gojira mount default kong prefix as a volume so itself can't - -- be removed; only throw error if the prefix is indeed not empty + local _, err = rmtree(prefix) if err then - local fcnt = #assert(pl_dir.getfiles(prefix)) - local dcnt = #assert(pl_dir.getdirectories(prefix)) - if fcnt + dcnt > 0 then - error(err) - end + error(err) end end end From 22e0b13f49e4fbd7ed0ad1125cc090f2a3102cad Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Mon, 4 Dec 2023 19:31:33 +0200 Subject: [PATCH 178/249] chore(tests): re-enable disabled mlcache tests (#12102) ### Summary Some mlcache tests were disabled because of flakiness. This commit re-enables them (hopefully this time without flakiness). Signed-off-by: Aapo Talvensaari --- t/05-mlcache/02-get.t | 6 ------ t/05-mlcache/03-peek.t | 30 ++++++++++++++++-------------- t/05-mlcache/15-renew.t | 6 ------ 3 files changed, 16 insertions(+), 26 deletions(-) diff --git a/t/05-mlcache/02-get.t b/t/05-mlcache/02-get.t index b2403547ede9..dea312ca673a 100644 --- a/t/05-mlcache/02-get.t +++ b/t/05-mlcache/02-get.t @@ -2379,7 +2379,6 @@ is stale: true === TEST 50: get() does not cache value in LRU indefinitely when retrieved from shm on last ms (see GH PR #58) ---- SKIP --- http_config eval: $::HttpConfig --- config location = /t { @@ -2419,8 +2418,6 @@ is stale: true assert(data == 42, err or "invalid data value: " .. data) ngx.say("hit_lvl: ", hit_lvl) - ngx.update_time() - local start = ngx.now() * 1000 while true do lru:delete("key") data, err, hit_lvl = cache:get("key", nil, cb) @@ -2431,9 +2428,6 @@ is stale: true end ngx.sleep(0) end - ngx.update_time() - local took = ngx.now() * 1000 - start - assert(took > 198 and took < 202) data, err, hit_lvl = cache:get("key", nil, cb) assert(data == 42, err or "invalid data value: " .. data) diff --git a/t/05-mlcache/03-peek.t b/t/05-mlcache/03-peek.t index c5f57626bfce..9a5b3978daf5 100644 --- a/t/05-mlcache/03-peek.t +++ b/t/05-mlcache/03-peek.t @@ -100,7 +100,6 @@ ttl: nil === TEST 3: peek() returns the remaining ttl if a key has been fetched before ---- SKIP --- http_config eval: $::HttpConfig --- config location = /t { @@ -117,21 +116,23 @@ ttl: nil return nil end - local val, err = cache:get("my_key", { neg_ttl = 19 }, cb) + local val, err = cache:get("my_key", { neg_ttl = 20 }, cb) if err then ngx.log(ngx.ERR, err) return end + ngx.sleep(1.1) + local ttl, err = cache:peek("my_key") if err then ngx.log(ngx.ERR, err) return end - ngx.say("ttl: ", math.ceil(ttl)) + ngx.say("ttl < 19: ", tostring(math.floor(ttl) < 19)) - ngx.sleep(1) + ngx.sleep(1.1) local ttl, err = cache:peek("my_key") if err then @@ -139,14 +140,14 @@ ttl: nil return end - ngx.say("ttl: ", math.ceil(ttl)) + ngx.say("ttl < 18: ", tostring(math.floor(ttl) < 18)) } } --- request GET /t --- response_body -ttl: 19 -ttl: 18 +ttl < 19: true +ttl < 18: true --- no_error_log [error] @@ -359,7 +360,6 @@ no ttl: false === TEST 8: peek() returns remaining ttl if shm_miss is specified ---- SKIP --- http_config eval: $::HttpConfig --- config location = /t { @@ -374,21 +374,23 @@ no ttl: false return nil end - local val, err = cache:get("my_key", { neg_ttl = 19 }, cb) + local val, err = cache:get("my_key", { neg_ttl = 20 }, cb) if err then ngx.log(ngx.ERR, err) return end + ngx.sleep(1.1) + local ttl, err = cache:peek("my_key") if err then ngx.log(ngx.ERR, err) return end - ngx.say("ttl: ", math.ceil(ttl)) + ngx.say("ttl < 19: ", tostring(math.floor(ttl) < 19)) - ngx.sleep(1) + ngx.sleep(1.1) local ttl, err = cache:peek("my_key") if err then @@ -396,14 +398,14 @@ no ttl: false return end - ngx.say("ttl: ", math.ceil(ttl)) + ngx.say("ttl < 18: ", tostring(math.floor(ttl) < 18)) } } --- request GET /t --- response_body -ttl: 19 -ttl: 18 +ttl < 19: true +ttl < 18: true --- no_error_log [error] diff --git a/t/05-mlcache/15-renew.t b/t/05-mlcache/15-renew.t index 34887a469bf0..074375dbfc5f 100644 --- a/t/05-mlcache/15-renew.t +++ b/t/05-mlcache/15-renew.t @@ -2378,7 +2378,6 @@ is stale: true === TEST 48: renew() does not cache value in LRU indefinitely when retrieved from shm on last ms (see GH PR #58) ---- SKIP --- http_config eval: $::HttpConfig --- config location = /t { @@ -2419,8 +2418,6 @@ is stale: true assert(data == 42, err or "invalid data value: " .. data) ngx.say("hit_lvl: ", hit_lvl) - ngx.update_time() - local start = ngx.now() * 1000 while true do lru:delete("key") data, err, hit_lvl = cache:get("key", nil, cb) @@ -2431,9 +2428,6 @@ is stale: true end ngx.sleep(0) end - ngx.update_time() - local took = ngx.now() * 1000 - start - assert(took > 198 and took < 202) data, err, hit_lvl = cache:get("key", nil, cb) assert(data == 42, err or "invalid data value: " .. data) From 08d989cf17e2bfdb68be8137b083f671ac78ca33 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Mon, 4 Dec 2023 19:32:59 +0200 Subject: [PATCH 179/249] feat(conf): add DHE-RSA-CHACHA20-POLY1305 cipher to the intermediate configuration (#12133) ### Summary Mozilla TLS recommendations added `DHE-RSA-CHACHA20-POLY1305` cipher to intermediate in their version 5.7, see: https://wiki.mozilla.org/Security/Server_Side_TLS Signed-off-by: Aapo Talvensaari --- .../kong/feat-add-cipher-to-the-intermediate.yml | 3 +++ kong/conf_loader/constants.lua | 8 ++++---- spec/01-unit/03-conf_loader_spec.lua | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 changelog/unreleased/kong/feat-add-cipher-to-the-intermediate.yml diff --git a/changelog/unreleased/kong/feat-add-cipher-to-the-intermediate.yml b/changelog/unreleased/kong/feat-add-cipher-to-the-intermediate.yml new file mode 100644 index 000000000000..eac454bc5447 --- /dev/null +++ b/changelog/unreleased/kong/feat-add-cipher-to-the-intermediate.yml @@ -0,0 +1,3 @@ +message: add DHE-RSA-CHACHA20-POLY1305 cipher to the intermediate configuration +type: feature +scope: Configuration diff --git a/kong/conf_loader/constants.lua b/kong/conf_loader/constants.lua index 4cd4d2519991..17a4a9dfaab5 100644 --- a/kong/conf_loader/constants.lua +++ b/kong/conf_loader/constants.lua @@ -11,7 +11,7 @@ local BUNDLED_VAULTS = constants.BUNDLED_VAULTS local BUNDLED_PLUGINS = constants.BUNDLED_PLUGINS --- Version 5: https://wiki.mozilla.org/Security/Server_Side_TLS +-- Version 5.7: https://wiki.mozilla.org/Security/Server_Side_TLS local CIPHER_SUITES = { modern = { protocols = "TLSv1.3", @@ -27,7 +27,8 @@ local CIPHER_SUITES = { .. "ECDHE-ECDSA-CHACHA20-POLY1305:" .. "ECDHE-RSA-CHACHA20-POLY1305:" .. "DHE-RSA-AES128-GCM-SHA256:" - .. "DHE-RSA-AES256-GCM-SHA384", + .. "DHE-RSA-AES256-GCM-SHA384:" + .. "DHE-RSA-CHACHA20-POLY1305", dhparams = "ffdhe2048", prefer_server_ciphers = "off", }, @@ -63,7 +64,7 @@ local CIPHER_SUITES = { }, fips = { -- https://wiki.openssl.org/index.php/FIPS_mode_and_TLS -- TLSv1.0 and TLSv1.1 is not completely not FIPS compliant, - -- but must be used under certain condititions like key sizes, + -- but must be used under certain conditions like key sizes, -- signatures in the full chain that Kong can't control. -- In that case, we disables TLSv1.0 and TLSv1.1 and user -- can optionally turn them on if they are aware of the caveats. @@ -638,4 +639,3 @@ return { LMDB_VALIDATION_TAG = LMDB_VALIDATION_TAG, } - diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index f8e1446f856b..20de7423595b 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -1241,7 +1241,7 @@ describe("Configuration loader", function() it("defines ssl_ciphers by default", function() local conf, err = conf_loader(nil, {}) assert.is_nil(err) - assert.equal("ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384", conf.ssl_ciphers) + assert.equal("ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305", conf.ssl_ciphers) end) it("explicitly defines ssl_ciphers", function() local conf, err = conf_loader(nil, { From 8d10cc07a24513fa9b0fb15cafd13cfabcbcc7a0 Mon Sep 17 00:00:00 2001 From: Qi Date: Tue, 5 Dec 2023 17:25:45 +0800 Subject: [PATCH 180/249] tests(request-debugging): fix flaky tests (#11892) --- .../21-request-debug/01-request-debug_spec.lua | 7 +++---- .../kong/plugins/muti-external-http-calls/handler.lua | 4 +++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/spec/02-integration/21-request-debug/01-request-debug_spec.lua b/spec/02-integration/21-request-debug/01-request-debug_spec.lua index 74ae4344c026..a507e4a80a00 100644 --- a/spec/02-integration/21-request-debug/01-request-debug_spec.lua +++ b/spec/02-integration/21-request-debug/01-request-debug_spec.lua @@ -144,8 +144,7 @@ local function get_output_header(_deployment, path, filter, fake_ip, token) ["X-Real-IP"] = fake_ip or "127.0.0.1", } }) - assert.not_same(500, res.status) - res:read_body() -- discard body + assert.not_same(500, res.status, res:read_body()) proxy_client:close() if not res.headers["X-Kong-Request-Debug-Output"] then @@ -512,7 +511,7 @@ describe(desc, function() local total_log = assert(tonumber(log_output.child.upstream.total_time)) local tfb_log = assert(tonumber(log_output.child.upstream.child.time_to_first_byte.total_time)) local streaming = assert(tonumber(log_output.child.upstream.child.streaming.total_time)) - assert.near(tfb_header, tfb_log, 10) + assert.near(tfb_header, tfb_log, 50) assert.same(total_log, tfb_log + streaming) assert.near(TIME_TO_FIRST_BYTE, tfb_log, 50) @@ -656,7 +655,7 @@ describe(desc, function() it("truncate/split too large debug output", function() local route_id = setup_route("/large_debug_output", upstream) - local plugin_id = setup_plugin(route_id, "muti-external-http-calls", { calls = 50 }) + local plugin_id = setup_plugin(route_id, "muti-external-http-calls", { calls = 10 }) finally(function() if plugin_id then diff --git a/spec/fixtures/custom_plugins/kong/plugins/muti-external-http-calls/handler.lua b/spec/fixtures/custom_plugins/kong/plugins/muti-external-http-calls/handler.lua index f27650bb83d1..b89845c9512d 100644 --- a/spec/fixtures/custom_plugins/kong/plugins/muti-external-http-calls/handler.lua +++ b/spec/fixtures/custom_plugins/kong/plugins/muti-external-http-calls/handler.lua @@ -12,7 +12,9 @@ function EnableBuffering:access(conf) for suffix = 0, conf.calls - 1 do local uri = "http://really.really.really.really.really.really.not.exists." .. suffix - httpc:request_uri(uri) + pcall(function() + httpc:request_uri(uri) + end) end end From 533d3f76177596dcb9b5911dec52eb2cfff9fdf7 Mon Sep 17 00:00:00 2001 From: Xiaoch Date: Tue, 5 Dec 2023 17:51:05 +0800 Subject: [PATCH 181/249] feat(templates): bump `dns_stale_ttl` default to 1 hour (#12087) A longer stale TTL can help reduce the load on less performant/reliable DNS servers, reducing proxy latency and availability impact to Kong's proxy path. KAG-3080 Co-authored-by: Datong Sun --------- Co-authored-by: Datong Sun --- changelog/unreleased/kong/bump_dns_stale_ttl.yml | 3 +++ kong.conf.default | 4 +++- kong/templates/kong_defaults.lua | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/bump_dns_stale_ttl.yml diff --git a/changelog/unreleased/kong/bump_dns_stale_ttl.yml b/changelog/unreleased/kong/bump_dns_stale_ttl.yml new file mode 100644 index 000000000000..43ed55cb0795 --- /dev/null +++ b/changelog/unreleased/kong/bump_dns_stale_ttl.yml @@ -0,0 +1,3 @@ +message: Bump `dns_stale_ttl` default to 1 hour so stale DNS record can be used for longer time in case of resolver downtime. +type: performance +scope: Configuration diff --git a/kong.conf.default b/kong.conf.default index 14c2a3a09465..5e0b3bdc5e97 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -1529,7 +1529,7 @@ # property receives a value (in seconds), it # will override the TTL for all records. -#dns_stale_ttl = 4 # Defines, in seconds, how long a record will +#dns_stale_ttl = 3600 # Defines, in seconds, how long a record will # remain in cache past its TTL. This value # will be used while the new DNS record is # fetched in the background. @@ -1537,6 +1537,8 @@ # record until either the refresh query # completes, or the `dns_stale_ttl` number of # seconds have passed. + # This configuration enables Kong to be more + # resilient during resolver downtime. #dns_cache_size = 10000 # Defines the maximum allowed number of # DNS records stored in memory cache. diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index c28245192924..eb6db07ae275 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -155,7 +155,7 @@ dns_resolver = NONE dns_hostsfile = /etc/hosts dns_order = LAST,SRV,A,CNAME dns_valid_ttl = NONE -dns_stale_ttl = 4 +dns_stale_ttl = 3600 dns_cache_size = 10000 dns_not_found_ttl = 30 dns_error_ttl = 1 From e9ac7c198c2d34d0798d3235edf21a65e6d8e901 Mon Sep 17 00:00:00 2001 From: samugi Date: Tue, 21 Nov 2023 16:35:04 +0100 Subject: [PATCH 182/249] fix(tracing): allow passing nil to span:set_attribute * passing a nil value to `span:set_attribute` is a NOOP if the attribute does not already exists, else it means unsetting that attribute * considered a fix because previously this was causing a stack trace when the DNS spans were created without a port --- kong/pdk/tracing.lua | 13 ++++++++++--- spec/01-unit/26-tracing/01-tracer_pdk_spec.lua | 13 +++++++++++-- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/kong/pdk/tracing.lua b/kong/pdk/tracing.lua index 6337e1fddc03..c41500d50196 100644 --- a/kong/pdk/tracing.lua +++ b/kong/pdk/tracing.lua @@ -295,11 +295,12 @@ end -- -- @function span:set_attribute -- @tparam string key --- @tparam string|number|boolean value +-- @tparam string|number|boolean|nil value -- @usage -- span:set_attribute("net.transport", "ip_tcp") -- span:set_attribute("net.peer.port", 443) -- span:set_attribute("exception.escaped", true) +-- span:set_attribute("unset.this", nil) function span_mt:set_attribute(key, value) -- key is decided by the programmer, so if it is not a string, we should -- error out. @@ -307,8 +308,14 @@ function span_mt:set_attribute(key, value) error("invalid key", 2) end - local vtyp = type(value) - if vtyp ~= "string" and vtyp ~= "number" and vtyp ~= "boolean" then + local vtyp + if value == nil then + vtyp = value + else + vtyp = type(value) + end + + if vtyp ~= "string" and vtyp ~= "number" and vtyp ~= "boolean" and vtyp ~= nil then -- we should not error out here, as most of the caller does not catch -- errors, and they are hooking to core facilities, which may cause -- unexpected behavior. diff --git a/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua b/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua index 2cd05a72a0f0..cef90a327dda 100644 --- a/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua +++ b/spec/01-unit/26-tracing/01-tracer_pdk_spec.lua @@ -195,11 +195,20 @@ describe("Tracer PDK", function() assert.has_no.error(function () span:finish() end) end) - it("fails set_attribute", function () + it("set_attribute validation", function () local span = c_tracer.start_span("meow") + -- nil value is allowed as a noop span:set_attribute("key1") - assert.spy(log_spy).was_called_with(ngx.ERR, match.is_string()) + assert.spy(log_spy).was_not_called_with(ngx.ERR, match.is_string()) + assert.is_nil(span.attributes["key1"]) + + span:set_attribute("key1", "value1") + assert.equal("value1", span.attributes["key1"]) + + -- nil value unsets the attribute + span:set_attribute("key1") + assert.is_nil(span.attributes["key1"]) span:set_attribute("key1", function() end) assert.spy(log_spy).was_called_with(ngx.ERR, match.is_string()) From 8e5cb497b73512afa972c18eda07232c8a94ead6 Mon Sep 17 00:00:00 2001 From: Zhefeng C <38037704+catbro666@users.noreply.github.com> Date: Wed, 6 Dec 2023 03:20:22 +0800 Subject: [PATCH 183/249] tests(*): use `.test` domain in most of the tests (#12152) * tests(*): use `.test` domain in most of the tests Following Vinicius's [advocation](https://github.com/Kong/kong-ee/pull/7467#discussion_r1410706385), change most of url the tests to use `.test` domain to avoid external influencies. [Ref](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) Except: - `example`, `example.com`, `example.net`, `example.org` - tests that are used to verify the format, validity or wildcard of the url itself (sni/router related tests), to prevent inadvertent change of the test logic - true external urls that may be accessed (service host, dns/balancer related tests) - some urls that are not sure if they will be accessed or not --- .../01-db/01-schema/06-routes_spec.lua | 6 +- spec/01-unit/03-conf_loader_spec.lua | 36 +- spec/01-unit/05-utils_spec.lua | 12 +- spec/01-unit/09-balancer/01-generic_spec.lua | 186 +++--- spec/01-unit/10-log_serializer_spec.lua | 6 +- spec/01-unit/24-runloop_certificate_spec.lua | 14 +- .../02-admin_gui_template_spec.lua | 4 +- .../01-helpers/02-blueprints_spec.lua | 6 +- .../03-db/02-db_core_entities_spec.lua | 46 +- spec/02-integration/03-db/07-tags_spec.lua | 14 +- .../04-admin_api/05-cache_routes_spec.lua | 12 +- .../06-certificates_routes_spec.lua | 8 +- .../04-admin_api/09-routes_routes_spec.lua | 52 +- .../04-admin_api/10-services_routes_spec.lua | 20 +- .../04-admin_api/15-off_spec.lua | 4 +- .../04-admin_api/22-debug_spec.lua | 18 +- .../05-proxy/02-router_spec.lua | 12 +- .../05-proxy/03-upstream_headers_spec.lua | 164 ++--- .../05-proxy/04-plugins_triggering_spec.lua | 28 +- spec/02-integration/05-proxy/05-dns_spec.lua | 8 +- .../10-balancer/01-healthchecks_spec.lua | 8 +- .../10-balancer/04-round-robin_spec.lua | 6 +- .../10-balancer/05-recreate-request_spec.lua | 6 +- .../05-proxy/14-server_tokens_spec.lua | 64 +- .../05-proxy/25-upstream_keepalive_spec.lua | 70 +-- .../05-proxy/31-stream_tls_spec.lua | 14 +- .../05-proxy/33-request-id-header_spec.lua | 2 +- .../02-core_entities_invalidations_spec.lua | 72 +-- .../03-plugins_iterator_invalidation_spec.lua | 6 +- spec/02-integration/07-sdk/01-ctx_spec.lua | 8 +- spec/02-integration/07-sdk/02-log_spec.lua | 4 +- .../07-sdk/04-plugin-config_spec.lua | 4 +- spec/02-integration/07-sdk/05-pdk_spec.lua | 4 +- .../16-queues/01-shutdown_spec.lua | 2 +- .../03-plugins/01-tcp-log/01-tcp-log_spec.lua | 24 +- .../03-plugins/02-udp-log/01-udp-log_spec.lua | 12 +- .../03-plugins/03-http-log/02-schema_spec.lua | 20 +- spec/03-plugins/04-file-log/01-log_spec.lua | 36 +- spec/03-plugins/05-syslog/01-log_spec.lua | 28 +- spec/03-plugins/06-statsd/01-log_spec.lua | 116 ++-- spec/03-plugins/07-loggly/01-log_spec.lua | 40 +- spec/03-plugins/08-datadog/01-log_spec.lua | 38 +- .../03-plugins/09-key-auth/02-access_spec.lua | 118 ++-- .../09-key-auth/03-invalidations_spec.lua | 16 +- .../09-key-auth/04-hybrid_mode_spec.lua | 6 +- .../10-basic-auth/03-access_spec.lua | 72 +-- .../10-basic-auth/04-invalidations_spec.lua | 16 +- .../11-correlation-id/01-access_spec.lua | 50 +- .../01-access_spec.lua | 32 +- spec/03-plugins/13-cors/01-access_spec.lua | 186 +++--- .../14-request-termination/02-access_spec.lua | 64 +- .../03-integration_spec.lua | 4 +- .../04-filter_spec.lua | 14 +- .../05-big_response_body_spec.lua | 6 +- spec/03-plugins/16-jwt/03-access_spec.lua | 116 ++-- .../16-jwt/04-invalidations_spec.lua | 18 +- .../17-ip-restriction/02-access_spec.lua | 140 ++--- spec/03-plugins/18-acl/02-access_spec.lua | 144 ++--- .../18-acl/03-invalidations_spec.lua | 20 +- .../19-hmac-auth/03-access_spec.lua | 170 +++--- .../19-hmac-auth/04-invalidations_spec.lua | 16 +- .../20-ldap-auth/01-access_spec.lua | 82 +-- .../20-ldap-auth/02-invalidations_spec.lua | 6 +- .../21-bot-detection/01-access_spec.lua | 54 +- .../02-invalidations_spec.lua | 10 +- .../21-bot-detection/03-api_spec.lua | 4 +- .../23-rate-limiting/03-api_spec.lua | 4 +- .../23-rate-limiting/05-integration_spec.lua | 18 +- .../04-access_spec.lua | 94 +-- .../05-integration_spec.lua | 18 +- spec/03-plugins/25-oauth2/02-api_spec.lua | 50 +- spec/03-plugins/25-oauth2/03-access_spec.lua | 574 +++++++++--------- .../27-aws-lambda/05-aws-serializer_spec.lua | 12 +- .../27-aws-lambda/06-request-util_spec.lua | 28 +- .../27-aws-lambda/08-sam-integration_spec.lua | 8 +- .../27-aws-lambda/99-access_spec.lua | 142 ++--- spec/03-plugins/29-acme/01-client_spec.lua | 8 +- .../29-acme/05-redis_storage_spec.lua | 2 +- .../29-acme/06-hybrid_mode_spec.lua | 2 +- spec/03-plugins/30-session/01-access_spec.lua | 20 +- .../02-kong_storage_adapter_spec.lua | 16 +- .../31-proxy-cache/02-access_spec.lua | 192 +++--- .../03-plugins/31-proxy-cache/03-api_spec.lua | 24 +- .../31-proxy-cache/04-invalidations_spec.lua | 20 +- .../02-access_spec.lua | 56 +- .../04-phases_spec.lua | 4 +- .../35-azure-functions/01-access_spec.lua | 30 +- spec/helpers.lua | 4 +- 88 files changed, 1965 insertions(+), 1965 deletions(-) diff --git a/spec/01-unit/01-db/01-schema/06-routes_spec.lua b/spec/01-unit/01-db/01-schema/06-routes_spec.lua index f4ef090ce0fe..551aecc0fa58 100644 --- a/spec/01-unit/01-db/01-schema/06-routes_spec.lua +++ b/spec/01-unit/01-db/01-schema/06-routes_spec.lua @@ -1188,7 +1188,7 @@ describe("routes schema (flavor = traditional/traditional_compatible)", function it("errors if strip_path is set on grpc/grpcs", function() local s = { id = "a4fbd24e-6a52-4937-bd78-2536713072d2" } local route = Routes:process_auto_fields({ - hosts = { "foo.grpc.com" }, + hosts = { "foo.grpc.test" }, protocols = { "grpc" }, strip_path = true, service = s, @@ -1200,7 +1200,7 @@ describe("routes schema (flavor = traditional/traditional_compatible)", function }, errs) route = Routes:process_auto_fields({ - hosts = { "foo.grpc.com" }, + hosts = { "foo.grpc.test" }, protocols = { "grpcs" }, strip_path = true, service = s, @@ -1215,7 +1215,7 @@ describe("routes schema (flavor = traditional/traditional_compatible)", function it("errors if tls and tls_passthrough set on a same route", function() local s = { id = "a4fbd24e-6a52-4937-bd78-2536713072d2" } local route = Routes:process_auto_fields({ - snis = { "foo.grpc.com" }, + snis = { "foo.grpc.test" }, protocols = { "tls", "tls_passthrough" }, service = s, }, "insert") diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index 20de7423595b..c2d0df449682 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -241,27 +241,27 @@ describe("Configuration loader", function() it("extracts ssl flags properly when hostnames contain them", function() local conf conf = assert(conf_loader(nil, { - proxy_listen = "ssl.myname.com:8000", - admin_listen = "ssl.myname.com:8001", - admin_gui_listen = "ssl.myname.com:8002", + proxy_listen = "ssl.myname.test:8000", + admin_listen = "ssl.myname.test:8001", + admin_gui_listen = "ssl.myname.test:8002", })) - assert.equal("ssl.myname.com", conf.proxy_listeners[1].ip) + assert.equal("ssl.myname.test", conf.proxy_listeners[1].ip) assert.equal(false, conf.proxy_listeners[1].ssl) - assert.equal("ssl.myname.com", conf.admin_listeners[1].ip) + assert.equal("ssl.myname.test", conf.admin_listeners[1].ip) assert.equal(false, conf.admin_listeners[1].ssl) - assert.equal("ssl.myname.com", conf.admin_gui_listeners[1].ip) + assert.equal("ssl.myname.test", conf.admin_gui_listeners[1].ip) assert.equal(false, conf.admin_gui_listeners[1].ssl) conf = assert(conf_loader(nil, { - proxy_listen = "ssl_myname.com:8000 ssl", - admin_listen = "ssl_myname.com:8001 ssl", - admin_gui_listen = "ssl_myname.com:8002 ssl", + proxy_listen = "ssl_myname.test:8000 ssl", + admin_listen = "ssl_myname.test:8001 ssl", + admin_gui_listen = "ssl_myname.test:8002 ssl", })) - assert.equal("ssl_myname.com", conf.proxy_listeners[1].ip) + assert.equal("ssl_myname.test", conf.proxy_listeners[1].ip) assert.equal(true, conf.proxy_listeners[1].ssl) - assert.equal("ssl_myname.com", conf.admin_listeners[1].ip) + assert.equal("ssl_myname.test", conf.admin_listeners[1].ip) assert.equal(true, conf.admin_listeners[1].ssl) - assert.equal("ssl_myname.com", conf.admin_gui_listeners[1].ip) + assert.equal("ssl_myname.test", conf.admin_gui_listeners[1].ip) assert.equal(true, conf.admin_gui_listeners[1].ssl) end) it("extracts 'off' from proxy_listen/admin_listen/admin_gui_listen", function() @@ -285,13 +285,13 @@ describe("Configuration loader", function() assert.same({}, conf.admin_gui_listeners) -- not off with names containing 'off' conf = assert(conf_loader(nil, { - proxy_listen = "offshore.com:9000", - admin_listen = "offshore.com:9001", - admin_gui_listen = "offshore.com:9002", + proxy_listen = "offshore.test:9000", + admin_listen = "offshore.test:9001", + admin_gui_listen = "offshore.test:9002", })) - assert.same("offshore.com", conf.proxy_listeners[1].ip) - assert.same("offshore.com", conf.admin_listeners[1].ip) - assert.same("offshore.com", conf.admin_gui_listeners[1].ip) + assert.same("offshore.test", conf.proxy_listeners[1].ip) + assert.same("offshore.test", conf.admin_listeners[1].ip) + assert.same("offshore.test", conf.admin_gui_listeners[1].ip) end) it("attaches prefix paths", function() local conf = assert(conf_loader()) diff --git a/spec/01-unit/05-utils_spec.lua b/spec/01-unit/05-utils_spec.lua index dbd9944cfd8f..d358954f1205 100644 --- a/spec/01-unit/05-utils_spec.lua +++ b/spec/01-unit/05-utils_spec.lua @@ -595,14 +595,14 @@ describe("Utils", function() assert.are.same({host = "0000:0000:0000:0000:0000:0000:0000:0001", type = "ipv6", port = 80}, utils.normalize_ip("[::1]:80")) assert.are.same({host = "0000:0000:0000:0000:0000:0000:0000:0001", type = "ipv6", port = nil}, utils.normalize_ip("::1")) assert.are.same({host = "localhost", type = "name", port = 80}, utils.normalize_ip("localhost:80")) - assert.are.same({host = "mashape.com", type = "name", port = nil}, utils.normalize_ip("mashape.com")) + assert.are.same({host = "mashape.test", type = "name", port = nil}, utils.normalize_ip("mashape.test")) assert.is_nil((utils.normalize_ip("1.2.3.4:8x0"))) assert.is_nil((utils.normalize_ip("1.2.3.400"))) assert.is_nil((utils.normalize_ip("[::1]:8x0"))) assert.is_nil((utils.normalize_ip(":x:1"))) assert.is_nil((utils.normalize_ip("localhost:8x0"))) - assert.is_nil((utils.normalize_ip("mashape..com"))) + assert.is_nil((utils.normalize_ip("mashape..test"))) end) end) describe("formatting", function() @@ -612,21 +612,21 @@ describe("Utils", function() assert.are.equal("[0000:0000:0000:0000:0000:0000:0000:0001]", utils.format_host("::1")) assert.are.equal("[0000:0000:0000:0000:0000:0000:0000:0001]:80", utils.format_host("::1", 80)) assert.are.equal("localhost", utils.format_host("localhost")) - assert.are.equal("mashape.com:80", utils.format_host("mashape.com", 80)) + assert.are.equal("mashape.test:80", utils.format_host("mashape.test", 80)) -- passthrough (string) assert.are.equal("1.2.3.4", utils.format_host(utils.normalize_ipv4("1.2.3.4"))) assert.are.equal("1.2.3.4:80", utils.format_host(utils.normalize_ipv4("1.2.3.4:80"))) assert.are.equal("[0000:0000:0000:0000:0000:0000:0000:0001]", utils.format_host(utils.normalize_ipv6("::1"))) assert.are.equal("[0000:0000:0000:0000:0000:0000:0000:0001]:80", utils.format_host(utils.normalize_ipv6("[::1]:80"))) assert.are.equal("localhost", utils.format_host(utils.check_hostname("localhost"))) - assert.are.equal("mashape.com:80", utils.format_host(utils.check_hostname("mashape.com:80"))) + assert.are.equal("mashape.test:80", utils.format_host(utils.check_hostname("mashape.test:80"))) -- passthrough general (table) assert.are.equal("1.2.3.4", utils.format_host(utils.normalize_ip("1.2.3.4"))) assert.are.equal("1.2.3.4:80", utils.format_host(utils.normalize_ip("1.2.3.4:80"))) assert.are.equal("[0000:0000:0000:0000:0000:0000:0000:0001]", utils.format_host(utils.normalize_ip("::1"))) assert.are.equal("[0000:0000:0000:0000:0000:0000:0000:0001]:80", utils.format_host(utils.normalize_ip("[::1]:80"))) assert.are.equal("localhost", utils.format_host(utils.normalize_ip("localhost"))) - assert.are.equal("mashape.com:80", utils.format_host(utils.normalize_ip("mashape.com:80"))) + assert.are.equal("mashape.test:80", utils.format_host(utils.normalize_ip("mashape.test:80"))) -- passthrough errors local one, two = utils.format_host(utils.normalize_ipv4("1.2.3.4.5")) assert.are.equal("nilstring", type(one) .. type(two)) @@ -634,7 +634,7 @@ describe("Utils", function() assert.are.equal("nilstring", type(one) .. type(two)) local one, two = utils.format_host(utils.check_hostname("//bad..name\\:123")) assert.are.equal("nilstring", type(one) .. type(two)) - local one, two = utils.format_host(utils.normalize_ip("m a s h a p e.com:80")) + local one, two = utils.format_host(utils.normalize_ip("m a s h a p e.test:80")) assert.are.equal("nilstring", type(one) .. type(two)) end) end) diff --git a/spec/01-unit/09-balancer/01-generic_spec.lua b/spec/01-unit/09-balancer/01-generic_spec.lua index 7ce28f6bc5cb..6d2c6c1f38ca 100644 --- a/spec/01-unit/09-balancer/01-generic_spec.lua +++ b/spec/01-unit/09-balancer/01-generic_spec.lua @@ -298,8 +298,8 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("adding a host",function() dnsA({ - { name = "arecord.tst", address = "1.2.3.4" }, - { name = "arecord.tst", address = "5.6.7.8" }, + { name = "arecord.test", address = "1.2.3.4" }, + { name = "arecord.test", address = "5.6.7.8" }, }) assert.same({ @@ -332,7 +332,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, b:getStatus()) - add_target(b, "arecord.tst", 8001, 25) + add_target(b, "arecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -361,7 +361,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 25, @@ -391,8 +391,8 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("switching address availability",function() dnsA({ - { name = "arecord.tst", address = "1.2.3.4" }, - { name = "arecord.tst", address = "5.6.7.8" }, + { name = "arecord.test", address = "1.2.3.4" }, + { name = "arecord.test", address = "5.6.7.8" }, }) assert.same({ @@ -425,7 +425,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, b:getStatus()) - add_target(b, "arecord.tst", 8001, 25) + add_target(b, "arecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -454,7 +454,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 25, @@ -482,8 +482,8 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, b:getStatus()) -- switch to unavailable - assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8001, "arecord.tst"), false)) - add_target(b, "arecord.tst", 8001, 25) + assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8001, "arecord.test"), false)) + add_target(b, "arecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -512,7 +512,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 25, @@ -540,7 +540,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, b:getStatus()) -- switch to available - assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8001, "arecord.tst"), true)) + assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8001, "arecord.test"), true)) assert.same({ healthy = true, weight = { @@ -569,7 +569,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 25, @@ -599,11 +599,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("changing weight of an available address",function() dnsA({ - { name = "arecord.tst", address = "1.2.3.4" }, - { name = "arecord.tst", address = "5.6.7.8" }, + { name = "arecord.test", address = "1.2.3.4" }, + { name = "arecord.test", address = "5.6.7.8" }, }) - add_target(b, "arecord.tst", 8001, 25) + add_target(b, "arecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -632,7 +632,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 25, @@ -659,7 +659,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, b:getStatus()) - add_target(b, "arecord.tst", 8001, 50) -- adding again changes weight + add_target(b, "arecord.test", 8001, 50) -- adding again changes weight assert.same({ healthy = true, weight = { @@ -688,7 +688,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 50, @@ -718,11 +718,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("changing weight of an unavailable address",function() dnsA({ - { name = "arecord.tst", address = "1.2.3.4" }, - { name = "arecord.tst", address = "5.6.7.8" }, + { name = "arecord.test", address = "1.2.3.4" }, + { name = "arecord.test", address = "5.6.7.8" }, }) - add_target(b, "arecord.tst", 8001, 25) + add_target(b, "arecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -751,7 +751,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 25, @@ -779,7 +779,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, b:getStatus()) -- switch to unavailable - assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8001, "arecord.tst"), false)) + assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8001, "arecord.test"), false)) assert.same({ healthy = true, weight = { @@ -808,7 +808,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 25, @@ -835,7 +835,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, b:getStatus()) - add_target(b, "arecord.tst", 8001, 50) -- adding again changes weight + add_target(b, "arecord.test", 8001, 50) -- adding again changes weight assert.same({ healthy = true, weight = { @@ -864,7 +864,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "arecord.tst", + host = "arecord.test", port = 8001, dns = "A", nodeWeight = 50, @@ -898,11 +898,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("adding a host",function() dnsSRV({ - { name = "srvrecord.tst", target = "1.1.1.1", port = 9000, weight = 10 }, - { name = "srvrecord.tst", target = "2.2.2.2", port = 9001, weight = 10 }, + { name = "srvrecord.test", target = "1.1.1.1", port = 9000, weight = 10 }, + { name = "srvrecord.test", target = "2.2.2.2", port = 9001, weight = 10 }, }) - add_target(b, "srvrecord.tst", 8001, 25) + add_target(b, "srvrecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -931,7 +931,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 25, @@ -961,11 +961,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("switching address availability",function() dnsSRV({ - { name = "srvrecord.tst", target = "1.1.1.1", port = 9000, weight = 10 }, - { name = "srvrecord.tst", target = "2.2.2.2", port = 9001, weight = 10 }, + { name = "srvrecord.test", target = "1.1.1.1", port = 9000, weight = 10 }, + { name = "srvrecord.test", target = "2.2.2.2", port = 9001, weight = 10 }, }) - add_target(b, "srvrecord.tst", 8001, 25) + add_target(b, "srvrecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -994,7 +994,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 25, @@ -1022,7 +1022,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, b:getStatus()) -- switch to unavailable - assert(b:setAddressStatus(b:findAddress("1.1.1.1", 9000, "srvrecord.tst"), false)) + assert(b:setAddressStatus(b:findAddress("1.1.1.1", 9000, "srvrecord.test"), false)) assert.same({ healthy = true, weight = { @@ -1051,7 +1051,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 25, @@ -1079,7 +1079,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, b:getStatus()) -- switch to available - assert(b:setAddressStatus(b:findAddress("1.1.1.1", 9000, "srvrecord.tst"), true)) + assert(b:setAddressStatus(b:findAddress("1.1.1.1", 9000, "srvrecord.test"), true)) assert.same({ healthy = true, weight = { @@ -1108,7 +1108,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 25, @@ -1138,11 +1138,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("changing weight of an available address (dns update)",function() local record = dnsSRV({ - { name = "srvrecord.tst", target = "1.1.1.1", port = 9000, weight = 10 }, - { name = "srvrecord.tst", target = "2.2.2.2", port = 9001, weight = 10 }, + { name = "srvrecord.test", target = "1.1.1.1", port = 9000, weight = 10 }, + { name = "srvrecord.test", target = "2.2.2.2", port = 9001, weight = 10 }, }) - add_target(b, "srvrecord.tst", 8001, 10) + add_target(b, "srvrecord.test", 8001, 10) assert.same({ healthy = true, weight = { @@ -1171,7 +1171,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 10, @@ -1200,11 +1200,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro dnsExpire(record) dnsSRV({ - { name = "srvrecord.tst", target = "1.1.1.1", port = 9000, weight = 20 }, - { name = "srvrecord.tst", target = "2.2.2.2", port = 9001, weight = 20 }, + { name = "srvrecord.test", target = "1.1.1.1", port = 9000, weight = 20 }, + { name = "srvrecord.test", target = "2.2.2.2", port = 9001, weight = 20 }, }) targets.resolve_targets(b.targets) -- touch all addresses to force dns renewal - add_target(b, "srvrecord.tst", 8001, 99) -- add again to update nodeWeight + add_target(b, "srvrecord.test", 8001, 99) -- add again to update nodeWeight assert.same({ healthy = true, @@ -1234,7 +1234,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 99, @@ -1264,11 +1264,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("changing weight of an unavailable address (dns update)",function() local record = dnsSRV({ - { name = "srvrecord.tst", target = "1.1.1.1", port = 9000, weight = 10 }, - { name = "srvrecord.tst", target = "2.2.2.2", port = 9001, weight = 10 }, + { name = "srvrecord.test", target = "1.1.1.1", port = 9000, weight = 10 }, + { name = "srvrecord.test", target = "2.2.2.2", port = 9001, weight = 10 }, }) - add_target(b, "srvrecord.tst", 8001, 25) + add_target(b, "srvrecord.test", 8001, 25) assert.same({ healthy = true, weight = { @@ -1297,7 +1297,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 25, @@ -1325,7 +1325,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, b:getStatus()) -- switch to unavailable - assert(b:setAddressStatus(b:findAddress("2.2.2.2", 9001, "srvrecord.tst"), false)) + assert(b:setAddressStatus(b:findAddress("2.2.2.2", 9001, "srvrecord.test"), false)) assert.same({ healthy = true, weight = { @@ -1354,7 +1354,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 25, @@ -1384,11 +1384,11 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro -- update weight, through dns renewal dnsExpire(record) dnsSRV({ - { name = "srvrecord.tst", target = "1.1.1.1", port = 9000, weight = 20 }, - { name = "srvrecord.tst", target = "2.2.2.2", port = 9001, weight = 20 }, + { name = "srvrecord.test", target = "1.1.1.1", port = 9000, weight = 20 }, + { name = "srvrecord.test", target = "2.2.2.2", port = 9001, weight = 20 }, }) targets.resolve_targets(b.targets) -- touch all addresses to force dns renewal - add_target(b, "srvrecord.tst", 8001, 99) -- add again to update nodeWeight + add_target(b, "srvrecord.test", 8001, 99) -- add again to update nodeWeight assert.same({ healthy = true, @@ -1418,7 +1418,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 8001, dns = "SRV", nodeWeight = 99, @@ -1470,16 +1470,16 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("returns expected results/types when using SRV with name ('useSRVname=false')", function() dnsA({ - { name = "getkong.org", address = "1.2.3.4" }, + { name = "getkong.test", address = "1.2.3.4" }, }) dnsSRV({ - { name = "konghq.com", target = "getkong.org", port = 2, weight = 3 }, + { name = "konghq.test", target = "getkong.test", port = 2, weight = 3 }, }) - add_target(b, "konghq.com", 8000, 50) + add_target(b, "konghq.test", 8000, 50) local ip, port, hostname, handle = b:getPeer(true, nil, "a string") assert.equal("1.2.3.4", ip) assert.equal(2, port) - assert.equal("konghq.com", hostname) + assert.equal("konghq.test", hostname) assert.not_nil(handle) end) end) @@ -1503,16 +1503,16 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("returns expected results/types when using SRV with name ('useSRVname=true')", function() dnsA({ - { name = "getkong.org", address = "1.2.3.4" }, + { name = "getkong.test", address = "1.2.3.4" }, }) dnsSRV({ - { name = "konghq.com", target = "getkong.org", port = 2, weight = 3 }, + { name = "konghq.test", target = "getkong.test", port = 2, weight = 3 }, }) - add_target(b, "konghq.com", 8000, 50) + add_target(b, "konghq.test", 8000, 50) local ip, port, hostname, handle = b:getPeer(true, nil, "a string") assert.equal("1.2.3.4", ip) assert.equal(2, port) - assert.equal("getkong.org", hostname) + assert.equal("getkong.test", hostname) assert.not_nil(handle) end) end) @@ -1535,29 +1535,29 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("returns expected results/types when using SRV with IP", function() dnsSRV({ - { name = "konghq.com", target = "1.1.1.1", port = 2, weight = 3 }, + { name = "konghq.test", target = "1.1.1.1", port = 2, weight = 3 }, }) - add_target(b, "konghq.com", 8000, 50) + add_target(b, "konghq.test", 8000, 50) local ip, port, hostname, handle = b:getPeer(true, nil, "a string") assert.equal("1.1.1.1", ip) assert.equal(2, port) - assert.equal("konghq.com", hostname) + assert.equal("konghq.test", hostname) assert.not_nil(handle) end) it("returns expected results/types when using SRV with name ('useSRVname=false')", function() dnsA({ - { name = "getkong.org", address = "1.2.3.4" }, + { name = "getkong.test", address = "1.2.3.4" }, }) dnsSRV({ - { name = "konghq.com", target = "getkong.org", port = 2, weight = 3 }, + { name = "konghq.test", target = "getkong.test", port = 2, weight = 3 }, }) - add_target(b, "konghq.com", 8000, 50) + add_target(b, "konghq.test", 8000, 50) local ip, port, hostname, handle = b:getPeer(true, nil, "a string") assert.equal("1.2.3.4", ip) assert.equal(2, port) - assert.equal("konghq.com", hostname) + assert.equal("konghq.test", hostname) assert.not_nil(handle) end) @@ -1566,29 +1566,29 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro b.useSRVname = true -- override setting specified when creating dnsA({ - { name = "getkong.org", address = "1.2.3.4" }, + { name = "getkong.test", address = "1.2.3.4" }, }) dnsSRV({ - { name = "konghq.com", target = "getkong.org", port = 2, weight = 3 }, + { name = "konghq.test", target = "getkong.test", port = 2, weight = 3 }, }) - add_target(b, "konghq.com", 8000, 50) + add_target(b, "konghq.test", 8000, 50) local ip, port, hostname, handle = b:getPeer(true, nil, "a string") assert.equal("1.2.3.4", ip) assert.equal(2, port) - assert.equal("getkong.org", hostname) + assert.equal("getkong.test", hostname) assert.not_nil(handle) end) it("returns expected results/types when using A", function() dnsA({ - { name = "getkong.org", address = "1.2.3.4" }, + { name = "getkong.test", address = "1.2.3.4" }, }) - add_target(b, "getkong.org", 8000, 50) + add_target(b, "getkong.test", 8000, 50) local ip, port, hostname, handle = b:getPeer(true, nil, "another string") assert.equal("1.2.3.4", ip) assert.equal(8000, port) - assert.equal("getkong.org", hostname) + assert.equal("getkong.test", hostname) assert.not_nil(handle) end) @@ -1678,13 +1678,13 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro it("recovers when dns entries are replaced by healthy ones", function() local record = dnsA({ - { name = "getkong.org", address = "1.2.3.4", ttl = 2 }, + { name = "getkong.test", address = "1.2.3.4", ttl = 2 }, }) - add_target(b, "getkong.org", 8000, 50) + add_target(b, "getkong.test", 8000, 50) assert.not_nil(b:getPeer(true, nil, "from the client")) -- mark it as unhealthy - assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8000, "getkong.org", false))) + assert(b:setAddressStatus(b:findAddress("1.2.3.4", 8000, "getkong.test", false))) assert.same({ nil, "Balancer is unhealthy", nil, nil, }, { @@ -1696,7 +1696,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro -- balancer should now recover since a new healthy backend is available record.expire = 0 dnsA({ - { name = "getkong.org", address = "5.6.7.8", ttl = 60 }, + { name = "getkong.test", address = "5.6.7.8", ttl = 60 }, }) targets.resolve_targets(b.targets) @@ -1739,15 +1739,15 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro add_target(b, "127.0.0.1", 8000, 100) add_target(b, "0::1", 8080, 50) dnsSRV({ - { name = "srvrecord.tst", target = "1.1.1.1", port = 9000, weight = 10 }, - { name = "srvrecord.tst", target = "2.2.2.2", port = 9001, weight = 10 }, + { name = "srvrecord.test", target = "1.1.1.1", port = 9000, weight = 10 }, + { name = "srvrecord.test", target = "2.2.2.2", port = 9001, weight = 10 }, }) - add_target(b, "srvrecord.tst", 1234, 9999) + add_target(b, "srvrecord.test", 1234, 9999) dnsA({ - { name = "getkong.org", address = "5.6.7.8", ttl = 0 }, + { name = "getkong.test", address = "5.6.7.8", ttl = 0 }, }) - add_target(b, "getkong.org", 5678, 1000) - add_target(b, "notachanceinhell.this.name.exists.konghq.com", 4321, 100) + add_target(b, "getkong.test", 5678, 1000) + add_target(b, "notachanceinhell.this.name.exists.konghq.test", 4321, 100) local status = b:getStatus() table.sort(status.hosts, function(hostA, hostB) return hostA.host < hostB.host end) @@ -1799,7 +1799,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro }, }, { - host = "getkong.org", + host = "getkong.test", port = 5678, dns = "ttl=0, virtual SRV", nodeWeight = 1000, @@ -1811,14 +1811,14 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro addresses = { { healthy = true, - ip = "getkong.org", + ip = "getkong.test", port = 5678, weight = 1000 }, }, }, { - host = "notachanceinhell.this.name.exists.konghq.com", + host = "notachanceinhell.this.name.exists.konghq.test", port = 4321, dns = "dns server error: 3 name error", nodeWeight = 100, @@ -1830,7 +1830,7 @@ for _, algorithm in ipairs{ "consistent-hashing", "least-connections", "round-ro addresses = {}, }, { - host = "srvrecord.tst", + host = "srvrecord.test", port = 1234, dns = "SRV", nodeWeight = 9999, diff --git a/spec/01-unit/10-log_serializer_spec.lua b/spec/01-unit/10-log_serializer_spec.lua index 005772ca8b01..daa4489d9fb8 100644 --- a/spec/01-unit/10-log_serializer_spec.lua +++ b/spec/01-unit/10-log_serializer_spec.lua @@ -27,7 +27,7 @@ describe("kong.log.serialize", function() request_uri = "/request_uri", upstream_uri = "/upstream_uri", scheme = "http", - host = "test.com", + host = "test.test", server_port = "80", request_length = "200", bytes_sent = "99", @@ -82,7 +82,7 @@ describe("kong.log.serialize", function() assert.same({header1 = "header1", header2 = "header2", authorization = "REDACTED"}, res.request.headers) assert.equal("POST", res.request.method) assert.same({"arg1", "arg2"}, res.request.querystring) - assert.equal("http://test.com:80/request_uri", res.request.url) + assert.equal("http://test.test:80/request_uri", res.request.url) assert.equal("/upstream_uri", res.upstream_uri) assert.equal("500, 200 : 200, 200", res.upstream_status) assert.equal(200, res.request.size) @@ -109,7 +109,7 @@ describe("kong.log.serialize", function() local res = kong.log.serialize({ngx = ngx, kong = kong, }) assert.is_table(res) assert.is_table(res.request) - assert.equal("http://test.com:5000/request_uri", res.request.url) + assert.equal("http://test.test:5000/request_uri", res.request.url) end) it("serializes the matching Route and Services", function() diff --git a/spec/01-unit/24-runloop_certificate_spec.lua b/spec/01-unit/24-runloop_certificate_spec.lua index c20584113e26..9ccd70dacc43 100644 --- a/spec/01-unit/24-runloop_certificate_spec.lua +++ b/spec/01-unit/24-runloop_certificate_spec.lua @@ -11,26 +11,26 @@ describe("kong.runloop.certificate", function() end) it("produces suffix wildcard SNI", function() - local prefix, suffix = produce_wild_snis("domain.com") + local prefix, suffix = produce_wild_snis("domain.test") assert.is_nil(prefix) assert.equal("domain.*", suffix) end) it("produces prefix and suffix wildcard SNIs", function() - local prefix, suffix = produce_wild_snis("www.domain.com") - assert.equal("*.domain.com", prefix) + local prefix, suffix = produce_wild_snis("www.domain.test") + assert.equal("*.domain.test", prefix) assert.equal("www.domain.*", suffix) end) it("produces prefix and suffix wildcard SNIs on sub-subnames", function() - local prefix, suffix = produce_wild_snis("foo.www.domain.com") - assert.equal("*.www.domain.com", prefix) + local prefix, suffix = produce_wild_snis("foo.www.domain.test") + assert.equal("*.www.domain.test", prefix) assert.equal("foo.www.domain.*", suffix) end) it("does not produce wildcard SNIs when input is wildcard", function() - local prefix, suffix = produce_wild_snis("*.domain.com") - assert.equal("*.domain.com", prefix) + local prefix, suffix = produce_wild_snis("*.domain.test") + assert.equal("*.domain.test", prefix) assert.is_nil(suffix) prefix, suffix = produce_wild_snis("domain.*") diff --git a/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua b/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua index 6a262eee2492..9a3df93ab523 100644 --- a/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua +++ b/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua @@ -15,7 +15,7 @@ describe("admin_gui template", function() local conf = { prefix = mock_prefix, admin_gui_url = "http://0.0.0.0:8002", - admin_gui_api_url = "https://admin-reference.kong-cloud.com", + admin_gui_api_url = "https://admin-reference.kong-cloud.test", admin_gui_path = '/manager', admin_gui_listeners = { { @@ -65,7 +65,7 @@ describe("admin_gui template", function() assert.matches("'ADMIN_GUI_URL': 'http://0.0.0.0:8002'", kconfig_content, nil, true) assert.matches("'ADMIN_GUI_PATH': '/manager'", kconfig_content, nil, true) - assert.matches("'ADMIN_API_URL': 'https://admin-reference.kong-cloud.com'", kconfig_content, nil, true) + assert.matches("'ADMIN_API_URL': 'https://admin-reference.kong-cloud.test'", kconfig_content, nil, true) assert.matches("'ADMIN_API_PORT': '8001'", kconfig_content, nil, true) assert.matches("'ADMIN_API_SSL_PORT': '8444'", kconfig_content, nil, true) end) diff --git a/spec/02-integration/01-helpers/02-blueprints_spec.lua b/spec/02-integration/01-helpers/02-blueprints_spec.lua index 58f222d45afd..798d3ee02077 100644 --- a/spec/02-integration/01-helpers/02-blueprints_spec.lua +++ b/spec/02-integration/01-helpers/02-blueprints_spec.lua @@ -178,7 +178,7 @@ for _, strategy in helpers.each_strategy() do local co = bp.consumers:insert() local c = bp.oauth2_credentials:insert({ consumer = { id = co.id }, - redirect_uris = { "http://foo.com" }, + redirect_uris = { "http://foo.test" }, }) assert.equals("oauth2 credential", c.name) assert.equals("secret", c.client_secret) @@ -189,7 +189,7 @@ for _, strategy in helpers.each_strategy() do local co = bp.consumers:insert() local cr = bp.oauth2_credentials:insert({ consumer = { id = co.id }, - redirect_uris = { "http://foo.com" }, + redirect_uris = { "http://foo.test" }, }) local c = bp.oauth2_authorization_codes:insert({ credential = { id = cr.id } }) assert.is_string(c.code) @@ -201,7 +201,7 @@ for _, strategy in helpers.each_strategy() do local co = bp.consumers:insert() local cr = bp.oauth2_credentials:insert({ consumer = { id = co.id }, - redirect_uris = { "http://foo.com" }, + redirect_uris = { "http://foo.test" }, }) local t = bp.oauth2_tokens:insert({ credential = { id = cr.id } }) assert.equals("bearer", t.token_type) diff --git a/spec/02-integration/03-db/02-db_core_entities_spec.lua b/spec/02-integration/03-db/02-db_core_entities_spec.lua index 88a16896dbab..0dedb916f6b0 100644 --- a/spec/02-integration/03-db/02-db_core_entities_spec.lua +++ b/spec/02-integration/03-db/02-db_core_entities_spec.lua @@ -242,7 +242,7 @@ for _, strategy in helpers.each_strategy() do lazy_setup(function() for i = 1, 10 do bp.routes:insert({ - hosts = { "example-" .. i .. ".com" }, + hosts = { "example-" .. i .. ".test" }, methods = { "GET" }, }) end @@ -392,7 +392,7 @@ for _, strategy in helpers.each_strategy() do lazy_setup(function() for i = 1, 101 do bp.routes:insert({ - hosts = { "example-" .. i .. ".com" }, + hosts = { "example-" .. i .. ".test" }, methods = { "GET" }, }) end @@ -443,7 +443,7 @@ for _, strategy in helpers.each_strategy() do lazy_setup(function() for i = 1, 50 do bp.routes:insert({ - hosts = { "example-" .. i .. ".com" }, + hosts = { "example-" .. i .. ".test" }, methods = { "GET" } }) end @@ -513,7 +513,7 @@ for _, strategy in helpers.each_strategy() do local route, err, err_t = db.routes:insert({ protocols = { "http" }, hosts = { "example.com" }, - service = assert(db.services:insert({ host = "service.com" })), + service = assert(db.services:insert({ host = "service.test" })), path_handling = "v0", }, { nulls = true, workspace = "8a139c70-49a1-4ba2-98a6-bb36f534269d", }) assert.is_nil(route) @@ -654,7 +654,7 @@ for _, strategy in helpers.each_strategy() do local route, err, err_t = db.routes:insert({ protocols = { "http" }, hosts = { "example.com" }, - service = assert(db.services:insert({ host = "service.com" })), + service = assert(db.services:insert({ host = "service.test" })), }, { ttl = 100, }) @@ -678,7 +678,7 @@ for _, strategy in helpers.each_strategy() do local route, err, err_t = db.routes:insert({ protocols = { "http" }, hosts = { "example.com" }, - service = assert(db.services:insert({ host = "service.com" })), + service = assert(db.services:insert({ host = "service.test" })), path_handling = "v0", }, { nulls = true }) assert.is_nil(err_t) @@ -1442,7 +1442,7 @@ for _, strategy in helpers.each_strategy() do id = a_blank_uuid, name = "my_other_service", protocol = "http", - host = "other-example.com", + host = "other-example.test", } assert.is_nil(service) assert.same({ @@ -1469,7 +1469,7 @@ for _, strategy in helpers.each_strategy() do local service, _, err_t = db.services:insert { name = "my_service_name", protocol = "http", - host = "other-example.com", + host = "other-example.test", } assert.is_nil(service) assert.same({ @@ -1625,7 +1625,7 @@ for _, strategy in helpers.each_strategy() do for i = 1, 5 do assert(db.services:insert({ name = "service_" .. i, - host = "service" .. i .. ".com", + host = "service" .. i .. ".test", })) end end) @@ -1640,7 +1640,7 @@ for _, strategy in helpers.each_strategy() do -- I/O it("returns existing Service", function() local service = assert(db.services:select_by_name("service_1")) - assert.equal("service1.com", service.host) + assert.equal("service1.test", service.host) end) it("returns nothing on non-existing Service", function() @@ -1695,7 +1695,7 @@ for _, strategy in helpers.each_strategy() do it("updates an existing Service", function() local service = assert(db.services:insert({ - host = "service.com" + host = "service.test" })) local updated_service, err, err_t = db.services:update(service, { protocol = "https" }) @@ -1722,7 +1722,7 @@ for _, strategy in helpers.each_strategy() do local service, _, err_t = db.services:insert { name = "service_bis", protocol = "http", - host = "other-example.com", + host = "other-example.test", } assert.is_nil(err_t) @@ -1755,11 +1755,11 @@ for _, strategy in helpers.each_strategy() do s1 = assert(db.services:insert({ name = "update-by-name-service", - host = "update-by-name-service.com", + host = "update-by-name-service.test", })) s2 = assert(db.services:insert({ name = "existing-service", - host = "existing-service.com", + host = "existing-service.test", })) end) @@ -1801,7 +1801,7 @@ for _, strategy in helpers.each_strategy() do it("updates an existing Service", function() local service = assert(db.services:insert({ - host = "service.com" + host = "service.test" })) local updated_service, err, err_t = db.services:update(service, { protocol = "https" }) @@ -1895,7 +1895,7 @@ for _, strategy in helpers.each_strategy() do lazy_setup(function() service = assert(db.services:insert({ name = "delete-by-name-service", - host = "service1.com", + host = "service1.test", })) end) @@ -1937,7 +1937,7 @@ for _, strategy in helpers.each_strategy() do it(":insert() a Route with a relation to a Service", function() local service = assert(db.services:insert({ protocol = "http", - host = "service.com" + host = "service.test" })) local route, err, err_t = db.routes:insert({ @@ -1981,8 +1981,8 @@ for _, strategy in helpers.each_strategy() do end) it(":update() attaches a Route to an existing Service", function() - local service1 = bp.services:insert({ host = "service1.com" }) - local service2 = bp.services:insert({ host = "service2.com" }) + local service1 = bp.services:insert({ host = "service1.test" }) + local service2 = bp.services:insert({ host = "service2.test" }) local route = bp.routes:insert({ service = service1, methods = { "GET" } }) @@ -1995,7 +1995,7 @@ for _, strategy in helpers.each_strategy() do end) it(":update() detaches a Route from an existing Service", function() - local service1 = bp.services:insert({ host = "service1.com" }) + local service1 = bp.services:insert({ host = "service1.test" }) local route = bp.routes:insert({ service = service1, methods = { "GET" } }) local new_route, err, err_t = db.routes:update(route, { service = ngx.null @@ -2172,7 +2172,7 @@ for _, strategy in helpers.each_strategy() do for i = 1, 102 do bp.routes:insert { - hosts = { "paginate-" .. i .. ".com" }, + hosts = { "paginate-" .. i .. ".test" }, service = service, } end @@ -2211,7 +2211,7 @@ for _, strategy in helpers.each_strategy() do for i = 1, 10 do bp.routes:insert { - hosts = { "paginate-" .. i .. ".com" }, + hosts = { "paginate-" .. i .. ".test" }, service = service, } end @@ -2349,7 +2349,7 @@ for _, strategy in helpers.each_strategy() do for i = 1, 10 do bp.routes:insert { - hosts = { "paginate-" .. i .. ".com" }, + hosts = { "paginate-" .. i .. ".test" }, service = service, } end diff --git a/spec/02-integration/03-db/07-tags_spec.lua b/spec/02-integration/03-db/07-tags_spec.lua index ac826ba019a2..2327a15bce7b 100644 --- a/spec/02-integration/03-db/07-tags_spec.lua +++ b/spec/02-integration/03-db/07-tags_spec.lua @@ -32,7 +32,7 @@ for _, strategy in helpers.each_strategy() do for i = 1, test_entity_count do local service = { - host = "example-" .. i .. ".com", + host = "example-" .. i .. ".test", name = "service" .. i, tags = { "team_ a", "level "..fmod(i, 5), "service"..i } } @@ -109,7 +109,7 @@ for _, strategy in helpers.each_strategy() do it(func, function() local tags = { "team_b_" .. func, "team_ a" } local row, err, err_t = db.services[func](db.services, - key, { tags = tags, host = 'whatever.com' }) + key, { tags = tags, host = 'whatever.test' }) assert.is_nil(err) assert.is_nil(err_t) @@ -198,7 +198,7 @@ for _, strategy in helpers.each_strategy() do it(func, function() local row, err, err_t = db.services[func](db.services, - key, { tags = tags, host = 'whatever.com' }) + key, { tags = tags, host = 'whatever.test' }) assert.is_nil(err) assert.is_nil(err_t) @@ -221,7 +221,7 @@ for _, strategy in helpers.each_strategy() do local total_entities_count = 100 for i = 1, total_entities_count do local service = { - host = "anotherexample-" .. i .. ".org", + host = "anotherexample-" .. i .. ".test", name = "service-paging" .. i, tags = { "paging", "team_paging_" .. fmod(i, 5), "irrelevant_tag" } } @@ -351,7 +351,7 @@ for _, strategy in helpers.each_strategy() do it("#db errors if tag value is invalid", function() local ok, err = pcall(bp.services.insert, bp.services, { - host = "invalid-tag.com", + host = "invalid-tag.test", name = "service-invalid-tag", tags = { "tag,with,commas" } }) @@ -359,7 +359,7 @@ for _, strategy in helpers.each_strategy() do assert.matches("invalid tag", err) local ok, err = pcall(bp.services.insert, bp.services, { - host = "invalid-tag.com", + host = "invalid-tag.test", name = "service-invalid-tag", tags = { "tag/with/slashes" } }) @@ -367,7 +367,7 @@ for _, strategy in helpers.each_strategy() do assert.matches("invalid tag", err) local ok, err = pcall(bp.services.insert, bp.services, { - host = "invalid-tag.com", + host = "invalid-tag.test", name = "service-invalid-tag", tags = { "tag-with-invalid-utf8" .. string.char(255) } }) diff --git a/spec/02-integration/04-admin_api/05-cache_routes_spec.lua b/spec/02-integration/04-admin_api/05-cache_routes_spec.lua index 1f5dcfaf33aa..b8bef46ae889 100644 --- a/spec/02-integration/04-admin_api/05-cache_routes_spec.lua +++ b/spec/02-integration/04-admin_api/05-cache_routes_spec.lua @@ -18,12 +18,12 @@ describe("Admin API /cache [#" .. strategy .. "]", function() local service = bp.services:insert() bp.routes:insert { - hosts = { "cache.com" }, + hosts = { "cache.test" }, service = service, } bp.routes:insert { - hosts = { "cache.com" }, + hosts = { "cache.test" }, methods = { "POST" }, service = service, } @@ -76,7 +76,7 @@ describe("Admin API /cache [#" .. strategy .. "]", function() cache_value = "my_value", }, headers = { - ["Host"] = "cache.com", + ["Host"] = "cache.test", ["Content-Type"] = "application/x-www-form-urlencoded", }, }) @@ -105,7 +105,7 @@ describe("Admin API /cache [#" .. strategy .. "]", function() cache_value = "value_to_purge", }, headers = { - ["Host"] = "cache.com", + ["Host"] = "cache.test", ["Content-Type"] = "application/x-www-form-urlencoded", }, }) @@ -139,7 +139,7 @@ describe("Admin API /cache [#" .. strategy .. "]", function() cache_value = "value_to_purge", }, headers = { - ["Host"] = "cache.com", + ["Host"] = "cache.test", ["Content-Type"] = "application/x-www-form-urlencoded", }, }) @@ -153,7 +153,7 @@ describe("Admin API /cache [#" .. strategy .. "]", function() cache_value = "value_to_purge", }, headers = { - ["Host"] = "cache.com", + ["Host"] = "cache.test", ["Content-Type"] = "application/x-www-form-urlencoded", }, }) diff --git a/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua b/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua index d8baf1aeae63..848885c81670 100644 --- a/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua +++ b/spec/02-integration/04-admin_api/06-certificates_routes_spec.lua @@ -346,7 +346,7 @@ describe("Admin API: #" .. strategy, function() end) it("returns 404 for a random non-existing sni", function() - local res = client:get("/certificates/doesntexist.com") + local res = client:get("/certificates/doesntexist.test") assert.res_status(404, res) end) end) @@ -1165,14 +1165,14 @@ describe("Admin API: #" .. strategy, function() local certificate = add_certificate() bp.snis:insert({ - name = "*.wildcard.com", + name = "*.wildcard.test", certificate = { id = certificate.id }, }) - local res = client:get("/snis/%2A.wildcard.com") + local res = client:get("/snis/%2A.wildcard.test") local body = assert.res_status(200, res) local json = cjson.decode(body) - assert.equal("*.wildcard.com", json.name) + assert.equal("*.wildcard.test", json.name) end) end) end) diff --git a/spec/02-integration/04-admin_api/09-routes_routes_spec.lua b/spec/02-integration/04-admin_api/09-routes_routes_spec.lua index 38d0c8969f04..20ab5d8a5734 100644 --- a/spec/02-integration/04-admin_api/09-routes_routes_spec.lua +++ b/spec/02-integration/04-admin_api/09-routes_routes_spec.lua @@ -76,7 +76,7 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/routes", { body = { protocols = { "http" }, - hosts = { "my.route.com" }, + hosts = { "my.route.test" }, headers = { location = { "my-location" } }, service = bp.services:insert(), }, @@ -84,7 +84,7 @@ for _, strategy in helpers.each_strategy() do }) local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "my.route.com" }, json.hosts) + assert.same({ "my.route.test" }, json.hosts) assert.same({ location = { "my-location" } }, json.headers) assert.is_number(json.created_at) assert.is_number(json.regex_priority) @@ -106,14 +106,14 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/routes", { body = { protocols = { "grpc", "grpcs" }, - hosts = { "my.route.com" }, + hosts = { "my.route.test" }, service = bp.services:insert(), }, headers = { ["Content-Type"] = content_type } }) local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "my.route.com" }, json.hosts) + assert.same({ "my.route.test" }, json.hosts) assert.is_number(json.created_at) assert.is_number(json.regex_priority) assert.is_string(json.id) @@ -135,13 +135,13 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/routes", { body = { protocols = { "http" }, - hosts = { "my.route.com" }, + hosts = { "my.route.test" }, }, headers = { ["Content-Type"] = content_type } }) local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "my.route.com" }, json.hosts) + assert.same({ "my.route.test" }, json.hosts) assert.is_number(json.created_at) assert.is_number(json.regex_priority) assert.is_string(json.id) @@ -163,13 +163,13 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/routes", { body = { protocols = { "grpc", "grpcs" }, - hosts = { "my.route.com" }, + hosts = { "my.route.test" }, }, headers = { ["Content-Type"] = content_type } }) local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "my.route.com" }, json.hosts) + assert.same({ "my.route.test" }, json.hosts) assert.is_number(json.created_at) assert.is_number(json.regex_priority) assert.is_string(json.id) @@ -194,7 +194,7 @@ for _, strategy in helpers.each_strategy() do body = { protocols = { "http" }, methods = { "GET", "POST", "PATCH" }, - hosts = { "foo.api.com", "bar.api.com" }, + hosts = { "foo.api.test", "bar.api.test" }, paths = { "/foo", "/bar" }, service = { id = s.id }, }, @@ -203,7 +203,7 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "foo.api.com", "bar.api.com" }, json.hosts) + assert.same({ "foo.api.test", "bar.api.test" }, json.hosts) assert.same({ "/foo","/bar" }, json.paths) assert.same({ "GET", "POST", "PATCH" }, json.methods) assert.same(s.id, json.service.id) @@ -221,7 +221,7 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/routes", { body = { protocols = { "grpc", "grpcs" }, - hosts = { "foo.api.com", "bar.api.com" }, + hosts = { "foo.api.test", "bar.api.test" }, paths = { "/foo", "/bar" }, service = { id = s.id }, }, @@ -230,7 +230,7 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "foo.api.com", "bar.api.com" }, json.hosts) + assert.same({ "foo.api.test", "bar.api.test" }, json.hosts) assert.same({ "/foo","/bar" }, json.paths) assert.same(s.id, json.service.id) assert.same({ "grpc", "grpcs"}, json.protocols) @@ -249,7 +249,7 @@ for _, strategy in helpers.each_strategy() do body = { protocols = { "http" }, methods = { "GET", "POST", "PATCH" }, - hosts = { "foo.api.com", "bar.api.com" }, + hosts = { "foo.api.test", "bar.api.test" }, paths = { "/foo", "/bar" }, service = { name = s.name }, }, @@ -258,7 +258,7 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "foo.api.com", "bar.api.com" }, json.hosts) + assert.same({ "foo.api.test", "bar.api.test" }, json.hosts) assert.same({ "/foo","/bar" }, json.paths) assert.same({ "GET", "POST", "PATCH" }, json.methods) assert.same(s.id, json.service.id) @@ -276,7 +276,7 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/routes", { body = { protocols = { "grpc", "grpcs" }, - hosts = { "foo.api.com", "bar.api.com" }, + hosts = { "foo.api.test", "bar.api.test" }, paths = { "/foo", "/bar" }, service = { name = s.name }, }, @@ -285,7 +285,7 @@ for _, strategy in helpers.each_strategy() do local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "foo.api.com", "bar.api.com" }, json.hosts) + assert.same({ "foo.api.test", "bar.api.test" }, json.hosts) assert.same({ "/foo","/bar" }, json.paths) assert.same(s.id, json.service.id) assert.same({ "grpc", "grpcs"}, json.protocols) @@ -1443,12 +1443,12 @@ for _, strategy in helpers.each_strategy() do ["Content-Type"] = content_type }, body = { - url = "http://edited2.com:1234/foo", + url = "http://edited2.test:1234/foo", }, }) local body = assert.res_status(200, res) local json = cjson.decode(body) - assert.equal("edited2.com", json.host) + assert.equal("edited2.test", json.host) assert.equal(1234, json.port) assert.equal("/foo", json.path) @@ -1467,7 +1467,7 @@ for _, strategy in helpers.each_strategy() do }, body = { name = "edited", - host = "edited.com", + host = "edited.test", path = cjson.null, }, }) @@ -1537,12 +1537,12 @@ for _, strategy in helpers.each_strategy() do ["Content-Type"] = content_type }, body = { - url = "http://konghq.com", + url = "http://konghq.test", }, }) local body = assert.res_status(200, res) local json = cjson.decode(body) - assert.same("konghq.com", json.host) + assert.same("konghq.test", json.host) local in_db = assert(db.services:select(json, { nulls = true })) assert.same(json, in_db) @@ -1627,12 +1627,12 @@ for _, strategy in helpers.each_strategy() do ["Content-Type"] = content_type }, body = { - url = "http://edited2.com:1234/foo", + url = "http://edited2.test:1234/foo", }, }) local body = assert.res_status(200, res) local json = cjson.decode(body) - assert.equal("edited2.com", json.host) + assert.equal("edited2.test", json.host) assert.equal(1234, json.port) assert.equal("/foo", json.path) @@ -1651,7 +1651,7 @@ for _, strategy in helpers.each_strategy() do }, body = { name = "edited", - host = "edited.com", + host = "edited.test", path = cjson.null, }, }) @@ -1990,7 +1990,7 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/routes", { body = { protocols = { "http" }, - hosts = { "my.route.com" }, + hosts = { "my.route.test" }, headers = { location = { "my-location" } }, service = bp.services:insert(), }, @@ -1998,7 +1998,7 @@ for _, strategy in helpers.each_strategy() do }) local body = assert.res_status(201, res) local json = cjson.decode(body) - assert.same({ "my.route.com" }, json.hosts) + assert.same({ "my.route.test" }, json.hosts) assert.same({ location = { "my-location" } }, json.headers) assert.is_number(json.created_at) assert.is_number(json.regex_priority) diff --git a/spec/02-integration/04-admin_api/10-services_routes_spec.lua b/spec/02-integration/04-admin_api/10-services_routes_spec.lua index 644c92dc6f23..b1fe3be1cc70 100644 --- a/spec/02-integration/04-admin_api/10-services_routes_spec.lua +++ b/spec/02-integration/04-admin_api/10-services_routes_spec.lua @@ -55,7 +55,7 @@ for _, strategy in helpers.each_strategy() do local res = client:post("/services", { body = { protocol = "http", - host = "service.com", + host = "service.test", }, headers = { ["Content-Type"] = content_type }, }) @@ -67,7 +67,7 @@ for _, strategy in helpers.each_strategy() do assert.is_number(json.updated_at) assert.equals(cjson.null, json.name) assert.equals("http", json.protocol) - assert.equals("service.com", json.host) + assert.equals("service.test", json.host) assert.equals(80, json.port) assert.equals(60000, json.connect_timeout) assert.equals(60000, json.write_timeout) @@ -79,7 +79,7 @@ for _, strategy in helpers.each_strategy() do return function() local res = client:post("/services", { body = { - url = "http://service.com/", + url = "http://service.test/", }, headers = { ["Content-Type"] = content_type }, }) @@ -91,7 +91,7 @@ for _, strategy in helpers.each_strategy() do assert.is_number(json.updated_at) assert.equals(cjson.null, json.name) assert.equals("http", json.protocol) - assert.equals("service.com", json.host) + assert.equals("service.test", json.host) assert.equals("/", json.path) assert.equals(80, json.port) assert.equals(60000, json.connect_timeout) @@ -104,7 +104,7 @@ for _, strategy in helpers.each_strategy() do return function() local res = client:post("/services", { body = { - url = "https://service.com/", + url = "https://service.test/", }, headers = { ["Content-Type"] = content_type }, }) @@ -116,7 +116,7 @@ for _, strategy in helpers.each_strategy() do assert.is_number(json.updated_at) assert.equals(cjson.null, json.name) assert.equals("https", json.protocol) - assert.equals("service.com", json.host) + assert.equals("service.test", json.host) assert.equals("/", json.path) assert.equals(443, json.port) assert.equals(60000, json.connect_timeout) @@ -472,18 +472,18 @@ for _, strategy in helpers.each_strategy() do return function() local service = db.services:insert({ protocol = "http", - host = "service.com", + host = "service.test", }) local route = db.routes:insert({ protocol = "http", - hosts = { "service.com" }, + hosts = { "service.test" }, service = service, }) local _ = db.routes:insert({ protocol = "http", - hosts = { "service.com" }, + hosts = { "service.test" }, }) local res = client:get("/services/" .. service.id .. "/routes", { @@ -880,7 +880,7 @@ for _, strategy in helpers.each_strategy() do -- Invalid parameter res = client:post("/services", { body = { - host = "example.com", + host = "example.test", protocol = "foo", }, headers = { ["Content-Type"] = content_type } diff --git a/spec/02-integration/04-admin_api/15-off_spec.lua b/spec/02-integration/04-admin_api/15-off_spec.lua index 61ec17c8fe28..7373a82b3564 100644 --- a/spec/02-integration/04-admin_api/15-off_spec.lua +++ b/spec/02-integration/04-admin_api/15-off_spec.lua @@ -82,7 +82,7 @@ describe("Admin API #off", function() local res = client:post("/routes", { body = { protocols = { "http" }, - hosts = { "my.route.com" }, + hosts = { "my.route.test" }, service = { id = utils.uuid() }, }, headers = { ["Content-Type"] = content_type } @@ -108,7 +108,7 @@ describe("Admin API #off", function() body = { protocols = { "http" }, methods = { "GET", "POST", "PATCH" }, - hosts = { "foo.api.com", "bar.api.com" }, + hosts = { "foo.api.test", "bar.api.test" }, paths = { "/foo", "/bar" }, service = { id = utils.uuid() }, }, diff --git a/spec/02-integration/04-admin_api/22-debug_spec.lua b/spec/02-integration/04-admin_api/22-debug_spec.lua index 9ab63b6696be..620702bfe64c 100644 --- a/spec/02-integration/04-admin_api/22-debug_spec.lua +++ b/spec/02-integration/04-admin_api/22-debug_spec.lua @@ -22,7 +22,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() assert(bp.routes:insert { protocols = { "http" }, - hosts = { "mockbin.com" }, + hosts = { "mockbin.test" }, paths = { "/" }, service = service_mockbin, }) @@ -148,7 +148,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) body = assert.res_status(502, res) @@ -162,7 +162,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) assert.res_status(502, res) @@ -199,7 +199,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) body = assert.res_status(502, res) @@ -213,7 +213,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) assert.res_status(502, res) @@ -578,7 +578,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) body = assert.res_status(502, res) @@ -592,7 +592,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) assert.res_status(502, res) @@ -617,7 +617,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) body = assert.res_status(502, res) @@ -631,7 +631,7 @@ describe("Admin API - Kong debug route with strategy #" .. strategy, function() method = "GET", path = "/", headers = { - Host = "mockbin.com", + Host = "mockbin.test", }, }) assert.res_status(502, res) diff --git a/spec/02-integration/05-proxy/02-router_spec.lua b/spec/02-integration/05-proxy/02-router_spec.lua index d8c1ad223291..74d4f491bee3 100644 --- a/spec/02-integration/05-proxy/02-router_spec.lua +++ b/spec/02-integration/05-proxy/02-router_spec.lua @@ -1484,7 +1484,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "domain.org", + ["Host"] = "domain.test", ["version"] = "v1", ["kong-debug"] = 1, } @@ -1502,7 +1502,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "domain.org", + ["Host"] = "domain.test", ["version"] = "v3", ["kong-debug"] = 1, } @@ -1531,7 +1531,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "domain.org", + ["Host"] = "domain.test", ["version"] = "v1", ["kong-debug"] = 1, } @@ -1553,7 +1553,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "domain.org", + ["Host"] = "domain.test", ["Version"] = "v3", ["kong-debug"] = 1, } @@ -1592,7 +1592,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "domain.org", + ["Host"] = "domain.test", ["version"] = "v3", ["location"] = "us-east", ["kong-debug"] = 1, @@ -1611,7 +1611,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "domain.org", + ["Host"] = "domain.test", ["version"] = "v3", ["kong-debug"] = 1, } diff --git a/spec/02-integration/05-proxy/03-upstream_headers_spec.lua b/spec/02-integration/05-proxy/03-upstream_headers_spec.lua index de794afe7ebf..3132d0a6bfd0 100644 --- a/spec/02-integration/05-proxy/03-upstream_headers_spec.lua +++ b/spec/02-integration/05-proxy/03-upstream_headers_spec.lua @@ -45,11 +45,11 @@ for _, strategy in helpers.each_strategy() do insert_routes { { protocols = { "http" }, - hosts = { "headers-inspect.com" }, + hosts = { "headers-inspect.test" }, }, { protocols = { "http" }, - hosts = { "preserved.com" }, + hosts = { "preserved.test" }, preserve_host = true, }, { @@ -127,7 +127,7 @@ for _, strategy in helpers.each_strategy() do it("are removed from request", function() local headers = request_headers({ ["Connection"] = "X-Foo, X-Bar", - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["Keep-Alive"] = "timeout=5, max=1000", ["Proxy"] = "Remove-Me", -- See: https://httpoxy.org/ ["Proxy-Connection"] = "close", @@ -164,7 +164,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", }, path = "/hop-by-hop", }) @@ -193,7 +193,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["TE"] = "trailers" }, path = "/hop-by-hop", @@ -210,7 +210,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["Connection"] = "keep-alive, Upgrade", ["Upgrade"] = "websocket" }, @@ -277,7 +277,7 @@ for _, strategy in helpers.each_strategy() do }) assert(bp.routes:insert { - hosts = { "headers-charset.com" }, + hosts = { "headers-charset.test" }, service = service, }) @@ -298,7 +298,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/nocharset", headers = { - ["Host"] = "headers-charset.com", + ["Host"] = "headers-charset.test", } }) @@ -311,7 +311,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/charset", headers = { - ["Host"] = "headers-charset.com", + ["Host"] = "headers-charset.test", } }) @@ -333,7 +333,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Real-IP", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-real-ip"]) @@ -341,7 +341,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Real-IP"] = "10.0.0.1", } @@ -352,7 +352,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-For", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-forwarded-for"]) @@ -360,7 +360,7 @@ for _, strategy in helpers.each_strategy() do it("should be appended if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "10.0.0.1", } @@ -371,7 +371,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Proto", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("http", headers["x-forwarded-proto"]) @@ -379,7 +379,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Proto"] = "https", } @@ -390,26 +390,26 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Host", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } - assert.equal("headers-inspect.com", headers["x-forwarded-host"]) + assert.equal("headers-inspect.test", headers["x-forwarded-host"]) end) it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", - ["X-Forwarded-Host"] = "example.com", + ["Host"] = "headers-inspect.test", + ["X-Forwarded-Host"] = "example.test", } - assert.equal("headers-inspect.com", headers["x-forwarded-host"]) + assert.equal("headers-inspect.test", headers["x-forwarded-host"]) end) end) describe("X-Forwarded-Port", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal(helpers.get_proxy_port(false), tonumber(headers["x-forwarded-port"])) @@ -417,7 +417,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Port"] = "80", } @@ -428,7 +428,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Path", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("/", headers["x-forwarded-path"]) @@ -436,7 +436,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Path"] = "/replaced", } @@ -475,33 +475,33 @@ for _, strategy in helpers.each_strategy() do describe("with the downstream host preserved", function() it("should be added if not present in request while preserving the downstream host", function() local headers = request_headers { - ["Host"] = "preserved.com", + ["Host"] = "preserved.test", } - assert.equal("preserved.com", headers["host"]) + assert.equal("preserved.test", headers["host"]) assert.equal("127.0.0.1", headers["x-real-ip"]) assert.equal("127.0.0.1", headers["x-forwarded-for"]) assert.equal("http", headers["x-forwarded-proto"]) - assert.equal("preserved.com", headers["x-forwarded-host"]) + assert.equal("preserved.test", headers["x-forwarded-host"]) assert.equal("/", headers["x-forwarded-path"]) assert.equal(helpers.get_proxy_port(false), tonumber(headers["x-forwarded-port"])) end) it("should be added if present in request while preserving the downstream host", function() local headers = request_headers { - ["Host"] = "preserved.com", + ["Host"] = "preserved.test", ["X-Real-IP"] = "10.0.0.1", ["X-Forwarded-For"] = "10.0.0.1", ["X-Forwarded-Proto"] = "https", - ["X-Forwarded-Host"] = "example.com", + ["X-Forwarded-Host"] = "example.test", ["X-Forwarded-Port"] = "80", } - assert.equal("preserved.com", headers["host"]) + assert.equal("preserved.test", headers["host"]) assert.equal("127.0.0.1", headers["x-real-ip"]) assert.equal("10.0.0.1, 127.0.0.1", headers["x-forwarded-for"]) assert.equal("http", headers["x-forwarded-proto"]) - assert.equal("preserved.com", headers["x-forwarded-host"]) + assert.equal("preserved.test", headers["x-forwarded-host"]) assert.equal(helpers.get_proxy_port(false), tonumber(headers["x-forwarded-port"])) assert.equal("/", headers["x-forwarded-path"]) end) @@ -510,7 +510,7 @@ for _, strategy in helpers.each_strategy() do describe("with the downstream host discarded", function() it("should be added if not present in request while discarding the downstream host", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal(helpers.mock_upstream_host .. ":" .. @@ -519,18 +519,18 @@ for _, strategy in helpers.each_strategy() do assert.equal(helpers.mock_upstream_host, headers["x-real-ip"]) assert.equal(helpers.mock_upstream_host, headers["x-forwarded-for"]) assert.equal("http", headers["x-forwarded-proto"]) - assert.equal("headers-inspect.com", headers["x-forwarded-host"]) + assert.equal("headers-inspect.test", headers["x-forwarded-host"]) assert.equal(helpers.get_proxy_port(false), tonumber(headers["x-forwarded-port"])) assert.equal("/", headers["x-forwarded-path"]) end) it("if present in request while discarding the downstream host", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Real-IP"] = "10.0.0.1", ["X-Forwarded-For"] = "10.0.0.1", ["X-Forwarded-Proto"] = "https", - ["X-Forwarded-Host"] = "example.com", + ["X-Forwarded-Host"] = "example.test", ["X-Forwarded-Port"] = "80", } @@ -540,7 +540,7 @@ for _, strategy in helpers.each_strategy() do assert.equal("127.0.0.1", headers["x-real-ip"]) assert.equal("10.0.0.1, 127.0.0.1", headers["x-forwarded-for"]) assert.equal("http", headers["x-forwarded-proto"]) - assert.equal("headers-inspect.com", headers["x-forwarded-host"]) + assert.equal("headers-inspect.test", headers["x-forwarded-host"]) assert.equal(helpers.get_proxy_port(false), tonumber(headers["x-forwarded-port"])) assert.equal("/", headers["x-forwarded-path"]) end) @@ -561,7 +561,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Real-IP", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-real-ip"]) @@ -569,7 +569,7 @@ for _, strategy in helpers.each_strategy() do it("should be forwarded if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Real-IP"] = "10.0.0.1", } @@ -581,7 +581,7 @@ for _, strategy in helpers.each_strategy() do it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-forwarded-for"]) @@ -589,7 +589,7 @@ for _, strategy in helpers.each_strategy() do it("should be appended if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "10.0.0.1", } @@ -601,7 +601,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Proto", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("http", headers["x-forwarded-proto"]) @@ -609,7 +609,7 @@ for _, strategy in helpers.each_strategy() do it("should be forwarded if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Proto"] = "https", } @@ -620,26 +620,26 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Host", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } - assert.equal("headers-inspect.com", headers["x-forwarded-host"]) + assert.equal("headers-inspect.test", headers["x-forwarded-host"]) end) it("should be forwarded if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", - ["X-Forwarded-Host"] = "example.com", + ["Host"] = "headers-inspect.test", + ["X-Forwarded-Host"] = "example.test", } - assert.equal("example.com", headers["x-forwarded-host"]) + assert.equal("example.test", headers["x-forwarded-host"]) end) end) describe("X-Forwarded-Port", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal(helpers.get_proxy_port(false), tonumber(headers["x-forwarded-port"])) @@ -647,7 +647,7 @@ for _, strategy in helpers.each_strategy() do it("should be forwarded if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Port"] = "80", } @@ -658,7 +658,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Path", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("/", headers["x-forwarded-path"]) @@ -666,7 +666,7 @@ for _, strategy in helpers.each_strategy() do it("should be forwarded if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Path"] = "/original-path", } @@ -706,7 +706,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Real-IP", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-real-ip"]) @@ -714,7 +714,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Real-IP"] = "10.0.0.1", } @@ -726,7 +726,7 @@ for _, strategy in helpers.each_strategy() do it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-forwarded-for"]) @@ -734,7 +734,7 @@ for _, strategy in helpers.each_strategy() do it("should be appended if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "10.0.0.1", } @@ -746,7 +746,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Proto", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("http", headers["x-forwarded-proto"]) @@ -754,7 +754,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Proto"] = "https", } @@ -765,26 +765,26 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Host", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } - assert.equal("headers-inspect.com", headers["x-forwarded-host"]) + assert.equal("headers-inspect.test", headers["x-forwarded-host"]) end) it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", - ["X-Forwarded-Host"] = "example.com", + ["Host"] = "headers-inspect.test", + ["X-Forwarded-Host"] = "example.test", } - assert.equal("headers-inspect.com", headers["x-forwarded-host"]) + assert.equal("headers-inspect.test", headers["x-forwarded-host"]) end) end) describe("X-Forwarded-Port", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal(helpers.get_proxy_port(false), tonumber(headers["x-forwarded-port"])) @@ -792,7 +792,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Port"] = "80", } @@ -803,7 +803,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Path", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("/", headers["x-forwarded-path"]) @@ -811,7 +811,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Path"] = "/untrusted", } @@ -863,7 +863,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Real-IP and X-Forwarded-For", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-real-ip"]) @@ -872,7 +872,7 @@ for _, strategy in helpers.each_strategy() do it("should be changed according to rules if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "127.0.0.1, 10.0.0.1, 192.168.0.1, 127.0.0.1, 172.16.0.1", ["X-Real-IP"] = "10.0.0.2", } @@ -885,7 +885,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Port", function() it("should be forwarded even if X-Forwarded-For header has a port in it", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "127.0.0.1:14, 10.0.0.1:15, 192.168.0.1:16, 127.0.0.1:17, 172.16.0.1:18", ["X-Real-IP"] = "10.0.0.2", ["X-Forwarded-Port"] = "14", @@ -898,7 +898,7 @@ for _, strategy in helpers.each_strategy() do pending("should take a port from X-Forwarded-For header if it has a port in it", function() -- local headers = request_headers { - -- ["Host"] = "headers-inspect.com", + -- ["Host"] = "headers-inspect.test", -- ["X-Forwarded-For"] = "127.0.0.1:14, 10.0.0.1:15, 192.168.0.1:16, 127.0.0.1:17, 172.16.0.1:18", -- ["X-Real-IP"] = "10.0.0.2", -- } @@ -925,7 +925,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Real-IP and X-Forwarded-For", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal("127.0.0.1", headers["x-real-ip"]) @@ -934,7 +934,7 @@ for _, strategy in helpers.each_strategy() do it("should be changed according to rules if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "10.0.0.1, 127.0.0.2, 10.0.0.1, 192.168.0.1, 172.16.0.1", ["X-Real-IP"] = "10.0.0.2", } @@ -947,7 +947,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Port", function() it("should be replaced even if X-Forwarded-Port and X-Forwarded-For headers have a port in it", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "127.0.0.1:14, 10.0.0.1:15, 192.168.0.1:16, 127.0.0.1:17, 172.16.0.1:18", ["X-Real-IP"] = "10.0.0.2", ["X-Forwarded-Port"] = "14", @@ -960,7 +960,7 @@ for _, strategy in helpers.each_strategy() do it("should not take a port from X-Forwarded-For header if it has a port in it", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-For"] = "127.0.0.1:14, 10.0.0.1:15, 192.168.0.1:16, 127.0.0.1:17, 172.16.0.1:18", ["X-Real-IP"] = "10.0.0.2", } @@ -994,7 +994,7 @@ for _, strategy in helpers.each_strategy() do local sock = ngx.socket.tcp() local request = "PROXY TCP4 192.168.0.1 " .. helpers.get_proxy_ip(false) .. " 56324 " .. helpers.get_proxy_port(false) .. "\r\n" .. "GET / HTTP/1.1\r\n" .. - "Host: headers-inspect.com\r\n" .. + "Host: headers-inspect.test\r\n" .. "Connection: close\r\n" .. "\r\n" @@ -1021,7 +1021,7 @@ for _, strategy in helpers.each_strategy() do local sock = ngx.socket.tcp() local request = "PROXY TCP4 192.168.0.1 " .. helpers.get_proxy_ip(false) .. " 56324 " .. helpers.get_proxy_port(false) .. "\r\n" .. "GET / HTTP/1.1\r\n" .. - "Host: headers-inspect.com\r\n" .. + "Host: headers-inspect.test\r\n" .. "Connection: close\r\n" .. "X-Real-IP: 10.0.0.2\r\n" .. "X-Forwarded-For: 10.0.0.1, 127.0.0.2, 10.0.0.1, 192.168.0.1, 172.16.0.1\r\n" .. @@ -1051,7 +1051,7 @@ for _, strategy in helpers.each_strategy() do local sock = ngx.socket.tcp() local request = "PROXY TCP4 192.168.0.1 " .. helpers.get_proxy_ip(false) .. " 56324 " .. helpers.get_proxy_port(false) .. "\r\n" .. "GET / HTTP/1.1\r\n" .. - "Host: headers-inspect.com\r\n" .. + "Host: headers-inspect.test\r\n" .. "Connection: close\r\n" .. "X-Real-IP: 10.0.0.2\r\n" .. "X-Forwarded-For: 127.0.0.1:14, 10.0.0.1:15, 192.168.0.1:16, 127.0.0.1:17, 172.16.0.1:18\r\n" .. @@ -1100,7 +1100,7 @@ for _, strategy in helpers.each_strategy() do local sock = ngx.socket.tcp() local request = "PROXY TCP4 192.168.0.1 " .. proxy_ip .. " 56324 " .. proxy_port .. "\r\n" .. "GET / HTTP/1.1\r\n" .. - "Host: headers-inspect.com\r\n" .. + "Host: headers-inspect.test\r\n" .. "Connection: close\r\n" .. "\r\n" @@ -1127,7 +1127,7 @@ for _, strategy in helpers.each_strategy() do local sock = ngx.socket.tcp() local request = "PROXY TCP4 192.168.0.1 " .. proxy_ip .. " 56324 " .. proxy_port .. "\r\n" .. "GET / HTTP/1.1\r\n" .. - "Host: headers-inspect.com\r\n" .. + "Host: headers-inspect.test\r\n" .. "Connection: close\r\n" .. "X-Real-IP: 10.0.0.2\r\n" .. "X-Forwarded-For: 10.0.0.1, 127.0.0.2, 10.0.0.1, 192.168.0.1, 172.16.0.1\r\n" .. @@ -1157,7 +1157,7 @@ for _, strategy in helpers.each_strategy() do local sock = ngx.socket.tcp() local request = "PROXY TCP4 192.168.0.1 " .. proxy_ip .. " 56324 " .. proxy_port .. "\r\n" .. "GET / HTTP/1.1\r\n" .. - "Host: headers-inspect.com\r\n" .. + "Host: headers-inspect.test\r\n" .. "Connection: close\r\n" .. "X-Real-IP: 10.0.0.2\r\n" .. "X-Forwarded-For: 127.0.0.1:14, 10.0.0.1:15, 192.168.0.1:16, 127.0.0.1:17, 172.16.0.1:18\r\n" .. @@ -1200,7 +1200,7 @@ for _, strategy in helpers.each_strategy() do describe("X-Forwarded-Port", function() it("should be added if not present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", } assert.equal(80, tonumber(headers["x-forwarded-port"])) @@ -1208,7 +1208,7 @@ for _, strategy in helpers.each_strategy() do it("should be replaced if present in request", function() local headers = request_headers { - ["Host"] = "headers-inspect.com", + ["Host"] = "headers-inspect.test", ["X-Forwarded-Port"] = "81", } diff --git a/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua b/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua index 781abf4fea9d..6eb231eecc11 100644 --- a/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua +++ b/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua @@ -98,7 +98,7 @@ for _, strategy in helpers.each_strategy() do } bp.routes:insert { - hosts = { "global1.com" }, + hosts = { "global1.test" }, protocols = { "http" }, service = service1, } @@ -120,7 +120,7 @@ for _, strategy in helpers.each_strategy() do } local route1 = bp.routes:insert { - hosts = { "api1.com" }, + hosts = { "api1.test" }, protocols = { "http" }, service = service2, } @@ -151,7 +151,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "api2.com" }, + hosts = { "api2.test" }, protocols = { "http" }, service = service3, } @@ -172,7 +172,7 @@ for _, strategy in helpers.each_strategy() do } local route3 = bp.routes:insert { - hosts = { "api3.com" }, + hosts = { "api3.test" }, protocols = { "http" }, service = service4, } @@ -238,7 +238,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/status/200", - headers = { Host = "global1.com" } + headers = { Host = "global1.test" } }) assert.res_status(401, res) end) @@ -247,7 +247,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/status/200?apikey=secret1", - headers = { Host = "global1.com" } + headers = { Host = "global1.test" } }) assert.res_status(200, res) assert.equal("1", res.headers["x-ratelimit-limit-hour"]) @@ -257,7 +257,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/status/200?apikey=secret1", - headers = { Host = "api1.com" } + headers = { Host = "api1.test" } }) assert.res_status(200, res) assert.equal("2", res.headers["x-ratelimit-limit-hour"]) @@ -267,7 +267,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/status/200?apikey=secret2", - headers = { Host = "global1.com" } + headers = { Host = "global1.test" } }) assert.res_status(200, res) assert.equal("3", res.headers["x-ratelimit-limit-hour"]) @@ -277,7 +277,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/status/200?apikey=secret2", - headers = { Host = "api2.com" } + headers = { Host = "api2.test" } }) assert.res_status(200, res) assert.equal("4", res.headers["x-ratelimit-limit-hour"]) @@ -287,7 +287,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/status/200", - headers = { Host = "api3.com" } + headers = { Host = "api3.test" } }) assert.res_status(200, res) assert.equal("5", res.headers["x-ratelimit-limit-hour"]) @@ -1089,7 +1089,7 @@ for _, strategy in helpers.each_strategy() do }) local route = assert(bp.routes:insert { - hosts = { "runs-init-worker.org" }, + hosts = { "runs-init-worker.test" }, protocols = { "http" }, service = service, }) @@ -1123,7 +1123,7 @@ for _, strategy in helpers.each_strategy() do it("is executed", function() local res = assert(proxy_client:get("/status/400", { headers = { - ["Host"] = "runs-init-worker.org", + ["Host"] = "runs-init-worker.test", } })) @@ -1168,7 +1168,7 @@ for _, strategy in helpers.each_strategy() do }) route = assert(bp.routes:insert { - hosts = { "runs-init-worker.org" }, + hosts = { "runs-init-worker.test" }, protocols = { "http" }, service = service, }) @@ -1215,7 +1215,7 @@ for _, strategy in helpers.each_strategy() do helpers.wait_until(function() res = assert(proxy_client:get("/status/400", { headers = { - ["Host"] = "runs-init-worker.org", + ["Host"] = "runs-init-worker.test", } })) diff --git a/spec/02-integration/05-proxy/05-dns_spec.lua b/spec/02-integration/05-proxy/05-dns_spec.lua index d3ce2d0f266a..9607352a26ce 100644 --- a/spec/02-integration/05-proxy/05-dns_spec.lua +++ b/spec/02-integration/05-proxy/05-dns_spec.lua @@ -58,7 +58,7 @@ for _, strategy in helpers.each_strategy() do } bp.routes:insert { - hosts = { "retries.com" }, + hosts = { "retries.test" }, service = service } @@ -86,7 +86,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "retries.com" + host = "retries.test" } } assert.response(r).has.status(502) @@ -115,7 +115,7 @@ for _, strategy in helpers.each_strategy() do } bp.routes:insert { - hosts = { "retries.com" }, + hosts = { "retries.test" }, protocols = { "http" }, service = service } @@ -139,7 +139,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "retries.com" + host = "retries.test" } } assert.response(r).has.status(503) diff --git a/spec/02-integration/05-proxy/10-balancer/01-healthchecks_spec.lua b/spec/02-integration/05-proxy/10-balancer/01-healthchecks_spec.lua index d8db30429383..9de7aacc4f18 100644 --- a/spec/02-integration/05-proxy/10-balancer/01-healthchecks_spec.lua +++ b/spec/02-integration/05-proxy/10-balancer/01-healthchecks_spec.lua @@ -38,12 +38,12 @@ for _, strategy in helpers.each_strategy() do } fixtures.dns_mock:SRV { - name = "my.srv.test.com", - target = "a.my.srv.test.com", + name = "my.srv.test.test", + target = "a.my.srv.test.test", port = 80, -- port should fail to connect } fixtures.dns_mock:A { - name = "a.my.srv.test.com", + name = "a.my.srv.test.test", address = "127.0.0.1", } @@ -114,7 +114,7 @@ for _, strategy in helpers.each_strategy() do }) -- the following port will not be used, will be overwritten by -- the mocked SRV record. - bu.add_target(bp, upstream_id, "my.srv.test.com", 80) + bu.add_target(bp, upstream_id, "my.srv.test.test", 80) local api_host = bu.add_api(bp, upstream_name) bu.end_testcase_setup(strategy, bp) diff --git a/spec/02-integration/05-proxy/10-balancer/04-round-robin_spec.lua b/spec/02-integration/05-proxy/10-balancer/04-round-robin_spec.lua index 5e375132733c..070fab7da5a2 100644 --- a/spec/02-integration/05-proxy/10-balancer/04-round-robin_spec.lua +++ b/spec/02-integration/05-proxy/10-balancer/04-round-robin_spec.lua @@ -24,12 +24,12 @@ for _, consistency in ipairs(bu.consistencies) do } fixtures.dns_mock:SRV { - name = "my.srv.test.com", - target = "a.my.srv.test.com", + name = "my.srv.test.test", + target = "a.my.srv.test.test", port = 80, -- port should fail to connect } fixtures.dns_mock:A { - name = "a.my.srv.test.com", + name = "a.my.srv.test.test", address = "127.0.0.1", } diff --git a/spec/02-integration/05-proxy/10-balancer/05-recreate-request_spec.lua b/spec/02-integration/05-proxy/10-balancer/05-recreate-request_spec.lua index a2207de82e0b..540c6b0dfcfa 100644 --- a/spec/02-integration/05-proxy/10-balancer/05-recreate-request_spec.lua +++ b/spec/02-integration/05-proxy/10-balancer/05-recreate-request_spec.lua @@ -118,7 +118,7 @@ for _, strategy in helpers.each_strategy() do service = { id = service.id }, preserve_host = true, paths = { "/", }, - hosts = { "test.com" } + hosts = { "test.test" } }) bu.end_testcase_setup(strategy, bp, "strict") @@ -126,12 +126,12 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/recreate_test", - headers = { ["Host"] = "test.com" }, + headers = { ["Host"] = "test.test" }, }) return pcall(function() local body = assert.response(res).has_status(200) - assert.equal("host is: test.com", body) + assert.equal("host is: test.test", body) end) end, 10) end) diff --git a/spec/02-integration/05-proxy/14-server_tokens_spec.lua b/spec/02-integration/05-proxy/14-server_tokens_spec.lua index b75ed2db205e..6cee745a1354 100644 --- a/spec/02-integration/05-proxy/14-server_tokens_spec.lua +++ b/spec/02-integration/05-proxy/14-server_tokens_spec.lua @@ -22,7 +22,7 @@ describe("headers [#" .. strategy .. "]", function() local function start(config) return function() bp.routes:insert { - hosts = { "headers-inspect.com" }, + hosts = { "headers-inspect.test" }, } local route = bp.routes:insert { @@ -89,7 +89,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -103,7 +103,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -141,7 +141,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -155,7 +155,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -179,7 +179,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -193,7 +193,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -217,7 +217,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -231,7 +231,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -255,7 +255,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -270,7 +270,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -290,7 +290,7 @@ describe("headers [#" .. strategy .. "]", function() local function start(config) return function() bp.routes:insert { - hosts = { "headers-inspect.com" }, + hosts = { "headers-inspect.test" }, } local service = bp.services:insert({ @@ -379,7 +379,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -394,7 +394,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -425,7 +425,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/status/" .. code, headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -540,7 +540,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com" + host = "headers-inspect.test" } }) @@ -555,7 +555,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -580,7 +580,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com" + host = "headers-inspect.test" } }) @@ -595,7 +595,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -620,7 +620,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com" + host = "headers-inspect.test" } }) @@ -635,7 +635,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -660,7 +660,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com" + host = "headers-inspect.test" } }) @@ -675,7 +675,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -700,7 +700,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -715,7 +715,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -740,7 +740,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -756,7 +756,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -801,7 +801,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -817,7 +817,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) @@ -838,7 +838,7 @@ describe("headers [#" .. strategy .. "]", function() local function start(config) return function() bp.routes:insert { - hosts = { "headers-inspect.com" }, + hosts = { "headers-inspect.test" }, } config = config or {} @@ -879,7 +879,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "headers-inspect.com", + host = "headers-inspect.test", } }) @@ -895,7 +895,7 @@ describe("headers [#" .. strategy .. "]", function() method = "GET", path = "/get", headers = { - host = "404.com", + host = "404.test", } }) diff --git a/spec/02-integration/05-proxy/25-upstream_keepalive_spec.lua b/spec/02-integration/05-proxy/25-upstream_keepalive_spec.lua index 7982c74f6c64..91ee0e436df8 100644 --- a/spec/02-integration/05-proxy/25-upstream_keepalive_spec.lua +++ b/spec/02-integration/05-proxy/25-upstream_keepalive_spec.lua @@ -55,7 +55,7 @@ describe("#postgres upstream keepalive", function() -- upstream TLS bp.routes:insert { - hosts = { "one.com" }, + hosts = { "one.test" }, preserve_host = true, service = bp.services:insert { protocol = helpers.mock_upstream_ssl_protocol, @@ -65,7 +65,7 @@ describe("#postgres upstream keepalive", function() } bp.routes:insert { - hosts = { "two.com" }, + hosts = { "two.test" }, preserve_host = true, service = bp.services:insert { protocol = helpers.mock_upstream_ssl_protocol, @@ -97,7 +97,7 @@ describe("#postgres upstream keepalive", function() -- upstream mTLS bp.routes:insert { - hosts = { "example.com", }, + hosts = { "example.test", }, service = bp.services:insert { url = "https://127.0.0.1:16798/", client_certificate = bp.certificates:insert { @@ -108,7 +108,7 @@ describe("#postgres upstream keepalive", function() } bp.routes:insert { - hosts = { "example2.com", }, + hosts = { "example2.test", }, service = bp.services:insert { url = "https://127.0.0.1:16798/", client_certificate = bp.certificates:insert { @@ -136,19 +136,19 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/echo_sni", headers = { - Host = "one.com", + Host = "one.test", } }) local body = assert.res_status(200, res) - assert.equal("SNI=one.com", body) + assert.equal("SNI=one.test", body) assert.errlog() .has - .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.com]]) + .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.test]]) assert.errlog() - .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.com, cpool: 0+]]) + .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.test, cpool: 0+]]) assert.errlog() - .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|one.com, size: \d+]]) + .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|one.test, size: \d+]]) assert.errlog() .has.line([[lua balancer: keepalive no free connection, cpool: [A-F0-9]+]]) assert.errlog() @@ -160,19 +160,19 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/echo_sni", headers = { - Host = "two.com", + Host = "two.test", } }) local body = assert.res_status(200, res) - assert.equal("SNI=two.com", body) + assert.equal("SNI=two.test", body) assert.errlog() .has - .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|two.com]]) + .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|two.test]]) assert.errlog() - .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|two.com, cpool: 0+]]) + .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|two.test, cpool: 0+]]) assert.errlog() - .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|two.com, size: \d+]]) + .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|two.test, size: \d+]]) assert.errlog() .has.line([[lua balancer: keepalive no free connection, cpool: [A-F0-9]+]]) assert.errlog() @@ -206,7 +206,7 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/", headers = { - Host = "example.com", + Host = "example.test", } }) local fingerprint_1 = assert.res_status(200, res) @@ -216,7 +216,7 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/", headers = { - Host = "example2.com", + Host = "example2.test", } }) local fingerprint_2 = assert.res_status(200, res) @@ -249,11 +249,11 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/echo_sni", headers = { - Host = "one.com", + Host = "one.test", } }) local body = assert.res_status(200, res) - assert.equal("SNI=one.com", body) + assert.equal("SNI=one.test", body) assert.errlog() .not_has .line("enabled connection keepalive", true) @@ -267,11 +267,11 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/echo_sni", headers = { - Host = "two.com", + Host = "two.test", } }) local body = assert.res_status(200, res) - assert.equal("SNI=two.com", body) + assert.equal("SNI=two.test", body) assert.errlog() .not_has .line("enabled connection keepalive", true) @@ -292,19 +292,19 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/echo_sni", headers = { - Host = "one.com", + Host = "one.test", } }) local body = assert.res_status(200, res) - assert.equal("SNI=one.com", body) + assert.equal("SNI=one.test", body) assert.errlog() .has - .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.com]]) + .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.test]]) assert.errlog() - .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.com, cpool: 0+]]) + .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.test, cpool: 0+]]) assert.errlog() - .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|one.com, size: \d+]]) + .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|one.test, size: \d+]]) assert.errlog() .has.line([[keepalive no free connection, cpool: [A-F0-9]+]]) assert.errlog() @@ -323,17 +323,17 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/echo_sni", headers = { - Host = "one.com", + Host = "one.test", } }) local body = assert.res_status(200, res) - assert.equal("SNI=one.com", body) + assert.equal("SNI=one.test", body) assert.errlog() .has - .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.com]]) + .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.test]]) assert.errlog() - .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.com, ]] .. upool_ptr) + .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.test, ]] .. upool_ptr) assert.errlog() .has.line([[keepalive reusing connection [A-F0-9]+, requests: \d+, ]] .. upool_ptr) assert.errlog() @@ -350,25 +350,25 @@ describe("#postgres upstream keepalive", function() method = "GET", path = "/echo_sni", headers = { - Host = "one.com", + Host = "one.test", } }) local body = assert.res_status(200, res) - assert.equal("SNI=one.com", body) + assert.equal("SNI=one.test", body) assert.errlog() .has - .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.com]]) + .line([[enabled connection keepalive \(pool=[A-F0-9.:]+\|\d+\|one.test]]) assert.errlog() - .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.com, cpool: 0+]]) + .has.line([[keepalive get pool, name: [A-F0-9.:]+\|\d+\|one.test, cpool: 0+]]) assert.errlog() - .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|one.com, size: \d+]]) + .has.line([[keepalive create pool, name: [A-F0-9.:]+\|\d+\|one.test, size: \d+]]) assert.errlog() .has.line([[keepalive no free connection, cpool: [A-F0-9]+]]) assert.errlog() .has.line([[keepalive not saving connection [A-F0-9]+, cpool: [A-F0-9]+]]) assert.errlog() - .has.line([[keepalive free pool, name: [A-F0-9.:]+\|\d+\|one.com, cpool: [A-F0-9]+]]) + .has.line([[keepalive free pool, name: [A-F0-9.:]+\|\d+\|one.test, cpool: [A-F0-9]+]]) assert.errlog() .not_has.line([[keepalive saving connection]], true) diff --git a/spec/02-integration/05-proxy/31-stream_tls_spec.lua b/spec/02-integration/05-proxy/31-stream_tls_spec.lua index d47b10eef494..17a2897e68cc 100644 --- a/spec/02-integration/05-proxy/31-stream_tls_spec.lua +++ b/spec/02-integration/05-proxy/31-stream_tls_spec.lua @@ -71,7 +71,7 @@ for _, strategy in helpers.each_strategy({"postgres"}) do it("tls not set host_header", function() local tcp = ngx.socket.tcp() assert(tcp:connect(helpers.get_proxy_ip(true), 19443)) - assert(tcp:sslhandshake(nil, "ssl-hello.com", false)) + assert(tcp:sslhandshake(nil, "ssl-hello.test", false)) assert(tcp:send("get_sni\n")) local body = assert(tcp:receive("*a")) assert.equal("nil\n", body) @@ -100,10 +100,10 @@ for _, strategy in helpers.each_strategy({"postgres"}) do local tcp = ngx.socket.tcp() assert(tcp:connect(helpers.get_proxy_ip(true), 19443)) - assert(tcp:sslhandshake(nil, "ssl-hello.com", false)) + assert(tcp:sslhandshake(nil, "ssl-hello.test", false)) assert(tcp:send("get_sni\n")) local body = assert(tcp:receive("*a")) - assert.equal("ssl-hello.com\n", body) + assert.equal("ssl-hello.test\n", body) tcp:close() end) @@ -129,7 +129,7 @@ for _, strategy in helpers.each_strategy({"postgres"}) do local tcp = ngx.socket.tcp() assert(tcp:connect(helpers.get_proxy_ip(true), 19443)) - assert(tcp:sslhandshake(nil, "ssl-hello.com", false)) + assert(tcp:sslhandshake(nil, "ssl-hello.test", false)) assert(tcp:send("get_sni\n")) local body = assert(tcp:receive("*a")) assert.equal("nil\n", body) @@ -139,7 +139,7 @@ for _, strategy in helpers.each_strategy({"postgres"}) do method = "PATCH", path = "/upstreams/upstream_srv", body = { - host_header = "ssl-hello.com" + host_header = "ssl-hello.test" }, headers = { ["Content-Type"] = "application/json" @@ -150,10 +150,10 @@ for _, strategy in helpers.each_strategy({"postgres"}) do local tcp = ngx.socket.tcp() assert(tcp:connect(helpers.get_proxy_ip(true), 19443)) - assert(tcp:sslhandshake(nil, "ssl-hello.com", false)) + assert(tcp:sslhandshake(nil, "ssl-hello.test", false)) assert(tcp:send("get_sni\n")) local body = assert(tcp:receive("*a")) - assert.equal("ssl-hello.com\n", body) + assert.equal("ssl-hello.test\n", body) tcp:close() end) end) diff --git a/spec/02-integration/05-proxy/33-request-id-header_spec.lua b/spec/02-integration/05-proxy/33-request-id-header_spec.lua index f8e0f2224255..cd773594f6de 100644 --- a/spec/02-integration/05-proxy/33-request-id-header_spec.lua +++ b/spec/02-integration/05-proxy/33-request-id-header_spec.lua @@ -94,7 +94,7 @@ describe(constants.HEADERS.REQUEST_ID .. " header", function() method = "GET", path = "/", headers = { - host = "404.com", + host = "404.test", } }) local body = assert.res_status(404, res) diff --git a/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua b/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua index 860b6b961edc..c6552713f16e 100644 --- a/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua +++ b/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua @@ -114,7 +114,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "example.com", + host = "example.test", } }) assert.res_status(404, res_1) @@ -123,7 +123,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "example.com", + host = "example.test", } }) assert.res_status(404, res) @@ -137,7 +137,7 @@ for _, strategy in helpers.each_strategy() do path = "/routes", body = { protocols = { "http" }, - hosts = { "example.com" }, + hosts = { "example.test" }, service = { id = service_fixture.id, } @@ -162,7 +162,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "example.com", + host = "example.test", } }) assert.res_status(200, res) @@ -171,7 +171,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "example.com", + host = "example.test", } }, 200) end) @@ -182,7 +182,7 @@ for _, strategy in helpers.each_strategy() do path = "/routes/" .. route_fixture_id, body = { methods = cjson.null, - hosts = { "updated-example.com" }, + hosts = { "updated-example.test" }, paths = cjson.null, }, headers = { @@ -205,7 +205,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "updated-example.com", + host = "updated-example.test", } }) assert.res_status(200, res_1) @@ -216,7 +216,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "example.com", + host = "example.test", } }) assert.res_status(404, res_1_old) @@ -227,7 +227,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "updated-example.com", + host = "updated-example.test", } }, 200) @@ -237,7 +237,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "example.com", + host = "example.test", } }, 404) end) @@ -261,7 +261,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "updated-example.com", + host = "updated-example.test", } }) assert.res_status(404, res_1) @@ -270,7 +270,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "updated-example.com", + host = "updated-example.test", } }, 404) end) @@ -289,7 +289,7 @@ for _, strategy in helpers.each_strategy() do path = "/routes", body = { protocols = { "http" }, - hosts = { "service.com" }, + hosts = { "service.test" }, service = { id = service_fixture.id, } @@ -311,7 +311,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "service.com", + host = "service.test", } }) assert.res_status(200, res_1) @@ -320,7 +320,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "service.com", + host = "service.test", } }, 200) @@ -350,7 +350,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "service.com", + host = "service.test", } }) assert.res_status(418, res_1) @@ -359,7 +359,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "service.com", + host = "service.test", } }, 418) end) @@ -387,7 +387,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "service.com", + host = "service.test", } }) assert.res_status(404, res_1) @@ -396,7 +396,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "service.com", + host = "service.test", } }, 404) end) @@ -857,7 +857,7 @@ for _, strategy in helpers.each_strategy() do path = "/routes", body = { protocols = { "http" }, - hosts = { "dummy.com" }, + hosts = { "dummy.test" }, service = { id = service_fixture.id, } @@ -883,7 +883,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -894,7 +894,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }, 200, { ["Dummy-Plugin"] = ngx.null }) @@ -902,7 +902,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }, 200, { ["Dummy-Plugin"] = ngx.null }) @@ -935,7 +935,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -945,7 +945,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }, 200, { ["Dummy-Plugin"] = "1" }) end) @@ -977,7 +977,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -987,7 +987,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }, 200, { ["Dummy-Plugin"] = "2" }) end) @@ -1011,7 +1011,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -1021,7 +1021,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }, 200, { ["Dummy-Plugin"] = ngx.null }) end) @@ -1039,7 +1039,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -1050,7 +1050,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res) @@ -1083,7 +1083,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -1093,7 +1093,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }, 200, { ["Dummy-Plugin"] = "1" }) end) @@ -1103,7 +1103,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -1127,7 +1127,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -1137,7 +1137,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }, 200, { ["Dummy-Plugin"] = ngx.null }) end) diff --git a/spec/02-integration/06-invalidations/03-plugins_iterator_invalidation_spec.lua b/spec/02-integration/06-invalidations/03-plugins_iterator_invalidation_spec.lua index 6b2321957344..e0ab5ccba749 100644 --- a/spec/02-integration/06-invalidations/03-plugins_iterator_invalidation_spec.lua +++ b/spec/02-integration/06-invalidations/03-plugins_iterator_invalidation_spec.lua @@ -88,7 +88,7 @@ for _, strategy in helpers.each_strategy() do path = "/routes", body = { protocols = { "http" }, - hosts = { "dummy.com" }, + hosts = { "dummy.test" }, service = { id = service_fixture.id, } @@ -176,7 +176,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_1) @@ -201,7 +201,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - host = "dummy.com", + host = "dummy.test", } }) assert.res_status(200, res_2) diff --git a/spec/02-integration/07-sdk/01-ctx_spec.lua b/spec/02-integration/07-sdk/01-ctx_spec.lua index 501c70b3089c..3d882e4f8f8f 100644 --- a/spec/02-integration/07-sdk/01-ctx_spec.lua +++ b/spec/02-integration/07-sdk/01-ctx_spec.lua @@ -28,7 +28,7 @@ describe("PDK: kong.ctx", function() it("isolates kong.ctx.plugin per-plugin", function() local route = bp.routes:insert({ - hosts = { "ctx-plugin.com" } + hosts = { "ctx-plugin.test" } }) bp.plugins:insert({ @@ -65,7 +65,7 @@ describe("PDK: kong.ctx", function() proxy_client = helpers.proxy_client() local res = proxy_client:get("/request", { - headers = { Host = "ctx-plugin.com" } + headers = { Host = "ctx-plugin.test" } }) assert.status(200, res) @@ -77,7 +77,7 @@ describe("PDK: kong.ctx", function() it("can share values using kong.ctx.shared", function() local route = bp.routes:insert({ - hosts = { "ctx-shared.com" } + hosts = { "ctx-shared.test" } }) bp.plugins:insert({ @@ -108,7 +108,7 @@ describe("PDK: kong.ctx", function() proxy_client = helpers.proxy_client() local res = proxy_client:get("/request", { - headers = { Host = "ctx-shared.com" } + headers = { Host = "ctx-shared.test" } }) assert.status(200, res) diff --git a/spec/02-integration/07-sdk/02-log_spec.lua b/spec/02-integration/07-sdk/02-log_spec.lua index a60a01d72284..f440014a64d8 100644 --- a/spec/02-integration/07-sdk/02-log_spec.lua +++ b/spec/02-integration/07-sdk/02-log_spec.lua @@ -55,7 +55,7 @@ describe("PDK: kong.log", function() bp.routes:insert({ service = service, protocols = { "https" }, - hosts = { "logger-plugin.com" } + hosts = { "logger-plugin.test" } }) bp.plugins:insert({ @@ -76,7 +76,7 @@ describe("PDK: kong.log", function() -- Do two requests for i = 1, 2 do local res = proxy_client:get("/request", { - headers = { Host = "logger-plugin.com" } + headers = { Host = "logger-plugin.test" } }) assert.status(200, res) end diff --git a/spec/02-integration/07-sdk/04-plugin-config_spec.lua b/spec/02-integration/07-sdk/04-plugin-config_spec.lua index 551dab5da34a..b56e98e73112 100644 --- a/spec/02-integration/07-sdk/04-plugin-config_spec.lua +++ b/spec/02-integration/07-sdk/04-plugin-config_spec.lua @@ -12,7 +12,7 @@ describe("Plugin configuration", function() "plugin-config-dump", }) - local route = bp.routes:insert({ hosts = { "test.com" } }) + local route = bp.routes:insert({ hosts = { "test.test" } }) bp.plugins:insert({ name = "plugin-config-dump", @@ -43,7 +43,7 @@ describe("Plugin configuration", function() it("conf", function() local res = proxy_client:get("/request", { - headers = { Host = "test.com" } + headers = { Host = "test.test" } }) local body = assert.status(200, res) diff --git a/spec/02-integration/07-sdk/05-pdk_spec.lua b/spec/02-integration/07-sdk/05-pdk_spec.lua index 0eb286c1fb8f..9e460427435d 100644 --- a/spec/02-integration/07-sdk/05-pdk_spec.lua +++ b/spec/02-integration/07-sdk/05-pdk_spec.lua @@ -17,7 +17,7 @@ describe("kong.plugin.get_id()", function() "get-plugin-id", }) - local route = assert(bp.routes:insert({ hosts = { "test.com" } })) + local route = assert(bp.routes:insert({ hosts = { "test.test" } })) assert(bp.plugins:insert({ name = "get-plugin-id", @@ -48,7 +48,7 @@ describe("kong.plugin.get_id()", function() it("conf", function() local res = proxy_client:get("/request", { - headers = { Host = "test.com" } + headers = { Host = "test.test" } }) local body = assert.status(200, res) diff --git a/spec/02-integration/16-queues/01-shutdown_spec.lua b/spec/02-integration/16-queues/01-shutdown_spec.lua index 3b970643e67b..0934f05b7d74 100644 --- a/spec/02-integration/16-queues/01-shutdown_spec.lua +++ b/spec/02-integration/16-queues/01-shutdown_spec.lua @@ -55,7 +55,7 @@ for _, strategy in helpers.each_strategy() do route = { id = route2.id }, name = "http-log", config = { - http_endpoint = "http://this-does-not-exist.example.com:80/this-does-not-exist", + http_endpoint = "http://this-does-not-exist.example.test:80/this-does-not-exist", queue = { max_batch_size = 10, max_coalescing_delay = 10, diff --git a/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua b/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua index b9eaa23c9592..a2751611fd5c 100644 --- a/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua +++ b/spec/03-plugins/01-tcp-log/01-tcp-log_spec.lua @@ -19,7 +19,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert { - hosts = { "tcp_logging.com" }, + hosts = { "tcp_logging.test" }, } bp.plugins:insert { @@ -45,7 +45,7 @@ for _, strategy in helpers.each_strategy() do local route2 = bp.routes:insert { - hosts = { "tcp_logging_tls.com" }, + hosts = { "tcp_logging_tls.test" }, } bp.plugins:insert { @@ -99,7 +99,7 @@ for _, strategy in helpers.each_strategy() do } local route5 = bp.routes:insert { - hosts = { "early_termination.example.com" }, + hosts = { "early_termination.example.test" }, } bp.plugins:insert { @@ -174,7 +174,7 @@ for _, strategy in helpers.each_strategy() do } local route6 = bp.routes:insert { - hosts = { "custom_tcp_logging.com" }, + hosts = { "custom_tcp_logging.test" }, } bp.plugins:insert { @@ -219,7 +219,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "tcp_logging.com", + host = "tcp_logging.test", }, }) assert.response(r).has.status(200) @@ -246,7 +246,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "custom_tcp_logging.com", + host = "custom_tcp_logging.test", }, }) assert.response(r).has.status(200) @@ -272,7 +272,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "custom_tcp_logging.com", + host = "custom_tcp_logging.test", }, }) assert.response(r).has.status(200) @@ -329,7 +329,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/delay/1", headers = { - host = "tcp_logging.com", + host = "tcp_logging.test", }, }) @@ -437,7 +437,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "tcp_logging_tls.com", + host = "tcp_logging_tls.test", }, }) assert.response(r).has.status(200) @@ -460,7 +460,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "tcp_logging.com", + host = "tcp_logging.test", }, }) @@ -486,7 +486,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "tcp_logging.com", + host = "tcp_logging.test", ["x-ssl-client-verify"] = "SUCCESS", }, }) @@ -543,7 +543,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "early_termination.example.com", + host = "early_termination.example.test", }, }) assert.response(r).has.status(200) diff --git a/spec/03-plugins/02-udp-log/01-udp-log_spec.lua b/spec/03-plugins/02-udp-log/01-udp-log_spec.lua index 4ed5472f2abe..bc1082573215 100644 --- a/spec/03-plugins/02-udp-log/01-udp-log_spec.lua +++ b/spec/03-plugins/02-udp-log/01-udp-log_spec.lua @@ -18,7 +18,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert { - hosts = { "udp_logging.com" }, + hosts = { "udp_logging.test" }, } bp.plugins:insert { @@ -31,7 +31,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "custom_udp_logging.com" }, + hosts = { "custom_udp_logging.test" }, } bp.plugins:insert { @@ -113,7 +113,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/delay/2", headers = { - host = "udp_logging.com", + host = "udp_logging.test", }, }) @@ -147,7 +147,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/delay/2", headers = { - host = "custom_udp_logging.com", + host = "custom_udp_logging.test", }, }) @@ -170,7 +170,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/delay/2", headers = { - host = "custom_udp_logging.com", + host = "custom_udp_logging.test", }, }) @@ -268,7 +268,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "udp_logging.com", + host = "udp_logging.test", }, }) assert.response(res).has.status(200) diff --git a/spec/03-plugins/03-http-log/02-schema_spec.lua b/spec/03-plugins/03-http-log/02-schema_spec.lua index 737a2e51017b..f96b4eadb0ac 100644 --- a/spec/03-plugins/03-http-log/02-schema_spec.lua +++ b/spec/03-plugins/03-http-log/02-schema_spec.lua @@ -54,7 +54,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("accepts minimal config with defaults", function() local ok, err = validate({ - http_endpoint = "http://myservice.com/path", + http_endpoint = "http://myservice.test/path", }) assert.is_nil(err) assert.is_truthy(ok) @@ -62,7 +62,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("accepts empty headers with username/password in the http_endpoint", function() local ok, err = validate({ - http_endpoint = "http://bob:password@myservice.com/path", + http_endpoint = "http://bob:password@myservice.test/path", }) assert.is_nil(err) assert.is_truthy(ok) @@ -70,7 +70,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("accepts custom fields by lua", function() local ok, err = validate({ - http_endpoint = "http://myservice.com/path", + http_endpoint = "http://myservice.test/path", custom_fields_by_lua = { foo = "return 'bar'", } @@ -81,7 +81,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("does accept allowed headers", function() local ok, err = validate({ - http_endpoint = "http://myservice.com/path", + http_endpoint = "http://myservice.test/path", headers = { ["X-My-Header"] = "123", ["X-Your-Header"] = "abc", @@ -93,7 +93,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("does not accept empty header values", function() local ok, err = validate({ - http_endpoint = "http://myservice.com/path", + http_endpoint = "http://myservice.test/path", headers = { ["X-My-Header"] = "", } @@ -107,7 +107,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("does not accept Host header", function() local ok, err = validate({ - http_endpoint = "http://myservice.com/path", + http_endpoint = "http://myservice.test/path", headers = { ["X-My-Header"] = "123", Host = "MyHost", @@ -123,7 +123,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("does not accept Content-Length header", function() local ok, err = validate({ - http_endpoint = "http://myservice.com/path", + http_endpoint = "http://myservice.test/path", headers = { ["coNTEnt-Length"] = "123", -- also validate casing } @@ -138,7 +138,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("does not accept Content-Type header", function() local ok, err = validate({ - http_endpoint = "http://myservice.com/path", + http_endpoint = "http://myservice.test/path", headers = { ["coNTEnt-Type"] = "bad" -- also validate casing } @@ -153,7 +153,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("does not accept userinfo in URL and 'Authorization' header", function() local ok, err = validate({ - http_endpoint = "http://hi:there@myservice.com/path", + http_endpoint = "http://hi:there@myservice.test/path", headers = { ["AuthoRIZATion"] = "bad" -- also validate casing } @@ -166,7 +166,7 @@ describe(PLUGIN_NAME .. ": (schema)", function() it("converts legacy queue parameters", function() local entity = validate({ - http_endpoint = "http://hi:there@myservice.com/path", + http_endpoint = "http://hi:there@myservice.test/path", retry_count = 23, queue_size = 46, flush_timeout = 92, diff --git a/spec/03-plugins/04-file-log/01-log_spec.lua b/spec/03-plugins/04-file-log/01-log_spec.lua index fc8344523068..3f50bce497e0 100644 --- a/spec/03-plugins/04-file-log/01-log_spec.lua +++ b/spec/03-plugins/04-file-log/01-log_spec.lua @@ -112,7 +112,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert { - hosts = { "file_logging.com" }, + hosts = { "file_logging.test" }, } bp.plugins:insert { @@ -165,7 +165,7 @@ for _, strategy in helpers.each_strategy() do } local route4 = bp.routes:insert { - hosts = { "file_logging_by_lua.com" }, + hosts = { "file_logging_by_lua.test" }, } bp.plugins:insert { @@ -182,7 +182,7 @@ for _, strategy in helpers.each_strategy() do } local route5 = bp.routes:insert { - hosts = { "file_logging2.com" }, + hosts = { "file_logging2.test" }, } bp.plugins:insert { @@ -195,7 +195,7 @@ for _, strategy in helpers.each_strategy() do } local route6 = bp.routes:insert { - hosts = { "file_logging3.com" }, + hosts = { "file_logging3.test" }, } bp.plugins:insert { @@ -208,7 +208,7 @@ for _, strategy in helpers.each_strategy() do } local route7 = bp.routes:insert { - hosts = { "file_logging4.com" }, + hosts = { "file_logging4.test" }, } bp.plugins:insert { @@ -221,7 +221,7 @@ for _, strategy in helpers.each_strategy() do } local route8 = bp.routes:insert { - hosts = { "file_logging5.com" }, + hosts = { "file_logging5.test" }, } bp.plugins:insert { @@ -234,7 +234,7 @@ for _, strategy in helpers.each_strategy() do } local route9 = bp.routes:insert { - hosts = { "file_logging6.com" }, + hosts = { "file_logging6.test" }, } bp.plugins:insert { @@ -280,7 +280,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid, - ["Host"] = "file_logging.com" + ["Host"] = "file_logging.test" } })) assert.res_status(200, res) @@ -302,7 +302,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid, - ["Host"] = "file_logging_by_lua.com" + ["Host"] = "file_logging_by_lua.test" } })) assert.res_status(200, res) @@ -324,7 +324,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid, - ["Host"] = "file_logging_by_lua.com" + ["Host"] = "file_logging_by_lua.test" } })) assert.res_status(200, res) @@ -391,7 +391,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid1, - ["Host"] = "file_logging.com" + ["Host"] = "file_logging.test" } })) assert.res_status(200, res) @@ -408,7 +408,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid2, - ["Host"] = "file_logging.com" + ["Host"] = "file_logging.test" } })) assert.res_status(200, res) @@ -419,7 +419,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid3, - ["Host"] = "file_logging.com" + ["Host"] = "file_logging.test" } })) assert.res_status(200, res) @@ -442,7 +442,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid, - ["Host"] = "file_logging2.com" + ["Host"] = "file_logging2.test" } })) assert.res_status(200, res) @@ -462,7 +462,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid, - ["Host"] = "file_logging3.com" + ["Host"] = "file_logging3.test" } })) assert.res_status(200, res) @@ -482,7 +482,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid1, - ["Host"] = "file_logging4.com" + ["Host"] = "file_logging4.test" } })) assert.res_status(200, res) @@ -501,7 +501,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid, - ["Host"] = "file_logging5.com" + ["Host"] = "file_logging5.test" } })) assert.res_status(200, res) @@ -521,7 +521,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["file-log-uuid"] = uuid, - ["Host"] = "file_logging6.com" + ["Host"] = "file_logging6.test" } })) assert.res_status(200, res) diff --git a/spec/03-plugins/05-syslog/01-log_spec.lua b/spec/03-plugins/05-syslog/01-log_spec.lua index 4e5c9d13e514..c84c55213c68 100644 --- a/spec/03-plugins/05-syslog/01-log_spec.lua +++ b/spec/03-plugins/05-syslog/01-log_spec.lua @@ -18,19 +18,19 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "logging.com" }, + hosts = { "logging.test" }, } local route2 = bp.routes:insert { - hosts = { "logging2.com" }, + hosts = { "logging2.test" }, } local route3 = bp.routes:insert { - hosts = { "logging3.com" }, + hosts = { "logging3.test" }, } local route4 = bp.routes:insert { - hosts = { "logging4.com" }, + hosts = { "logging4.test" }, } bp.plugins:insert { @@ -89,17 +89,17 @@ for _, strategy in helpers.each_strategy() do local grpc_route1 = bp.routes:insert { service = grpc_service, - hosts = { "grpc_logging.com" }, + hosts = { "grpc_logging.test" }, } local grpc_route2 = bp.routes:insert { service = grpc_service, - hosts = { "grpc_logging2.com" }, + hosts = { "grpc_logging2.test" }, } local grpc_route3 = bp.routes:insert { service = grpc_service, - hosts = { "grpc_logging3.com" }, + hosts = { "grpc_logging3.test" }, } bp.plugins:insert { @@ -259,28 +259,28 @@ for _, strategy in helpers.each_strategy() do end it("logs to syslog if log_level is lower", function() - do_test("logging.com", true) + do_test("logging.test", true) end) it("does not log to syslog if log_level is higher", function() - do_test("logging2.com", false) + do_test("logging2.test", false) end) it("logs to syslog if log_level is the same", function() - do_test("logging3.com", true) + do_test("logging3.test", true) end) it("logs custom values", function() - local resp = do_test("logging4.com", true) + local resp = do_test("logging4.test", true) assert.matches("\"new_field\".*123", resp) assert.not_matches("\"route\"", resp) end) it("logs to syslog if log_level is lower #grpc", function() - do_test("grpc_logging.com", true, true) + do_test("grpc_logging.test", true, true) end) it("does not log to syslog if log_level is higher #grpc", function() - do_test("grpc_logging2.com", false, true) + do_test("grpc_logging2.test", false, true) end) it("logs to syslog if log_level is the same #grpc", function() - do_test("grpc_logging3.com", true, true) + do_test("grpc_logging3.test", true, true) end) end) end diff --git a/spec/03-plugins/06-statsd/01-log_spec.lua b/spec/03-plugins/06-statsd/01-log_spec.lua index a43a5a5e92c5..4df2c3633044 100644 --- a/spec/03-plugins/06-statsd/01-log_spec.lua +++ b/spec/03-plugins/06-statsd/01-log_spec.lua @@ -83,7 +83,7 @@ for _, strategy in helpers.each_strategy() do name = fmt("statsd%s", i) } routes[i] = bp.routes:insert { - hosts = { fmt("logging%d.com", i) }, + hosts = { fmt("logging%d.test", i) }, service = service } end @@ -692,7 +692,7 @@ for _, strategy in helpers.each_strategy() do port = helpers.mock_upstream_port, } routes[i] = bp.routes:insert { - hosts = { fmt("logging%d.com", i) }, + hosts = { fmt("logging%d.test", i) }, service = service } end @@ -846,7 +846,7 @@ for _, strategy in helpers.each_strategy() do name = fmt("grpc_statsd%s", i) } grpc_routes[i] = bp.routes:insert { - hosts = { fmt("grpc_logging%d.com", i) }, + hosts = { fmt("grpc_logging%d.test", i) }, service = service } end @@ -924,7 +924,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging1.com" + host = "logging1.test" } }) assert.res_status(200, response) @@ -954,7 +954,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging25.com" + host = "logging25.test" } }) assert.res_status(200, response) @@ -977,7 +977,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging26.com" + host = "logging26.test" } }) assert.res_status(200, response) @@ -1000,7 +1000,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging27.com" + host = "logging27.test" } }) assert.res_status(200, response) @@ -1023,7 +1023,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging28.com" + host = "logging28.test" } }) assert.res_status(200, response) @@ -1050,7 +1050,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging13.com" + host = "logging13.test" } }) assert.res_status(200, response) @@ -1080,7 +1080,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging31.com" + host = "logging31.test" } }) assert.res_status(200, response) @@ -1103,7 +1103,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging32.com" + host = "logging32.test" } }) assert.res_status(200, response) @@ -1126,7 +1126,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging33.com" + host = "logging33.test" } }) assert.res_status(200, response) @@ -1149,7 +1149,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging34.com" + host = "logging34.test" } }) assert.res_status(200, response) @@ -1172,7 +1172,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging35.com" + host = "logging35.test" } }) assert.res_status(200, response) @@ -1192,7 +1192,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging36.com" + host = "logging36.test" } }) assert.res_status(200, response) @@ -1212,7 +1212,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging37.com" + host = "logging37.test" } }) assert.res_status(200, response) @@ -1232,7 +1232,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging38.com" + host = "logging38.test" } }) assert.res_status(200, response) @@ -1252,7 +1252,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging5.com" + host = "logging5.test" } }) assert.res_status(200, response) @@ -1270,7 +1270,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging3.com" + host = "logging3.test" } }) assert.res_status(200, response) @@ -1287,7 +1287,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging4.com" + host = "logging4.test" } }) assert.res_status(200, response) @@ -1304,7 +1304,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging2.com" + host = "logging2.test" } }) assert.res_status(200, response) @@ -1321,7 +1321,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging6.com" + host = "logging6.test" } }) assert.res_status(200, response) @@ -1338,7 +1338,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging7.com" + host = "logging7.test" } }) assert.res_status(200, response) @@ -1355,7 +1355,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging8.com" + host = "logging8.test" } }) assert.res_status(200, response) @@ -1372,7 +1372,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging9.com" + host = "logging9.test" } }) assert.res_status(200, response) @@ -1388,7 +1388,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging10.com" + host = "logging10.test" } }) assert.res_status(200, response) @@ -1406,7 +1406,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging11.com" + host = "logging11.test" } }) assert.res_status(200, response) @@ -1424,7 +1424,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging12.com" + host = "logging12.test" } }) assert.res_status(200, response) @@ -1441,7 +1441,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging14.com" + host = "logging14.test" } }) assert.res_status(200, response) @@ -1459,7 +1459,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging15.com" + host = "logging15.test" } }) assert.res_status(200, response) @@ -1477,7 +1477,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging16.com" + host = "logging16.test" } }) assert.res_status(200, response) @@ -1495,7 +1495,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging17.com" + host = "logging17.test" } }) assert.res_status(200, response) @@ -1513,7 +1513,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging18.com" + host = "logging18.test" } }) assert.res_status(200, response) @@ -1531,7 +1531,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging19.com" + host = "logging19.test" } }) assert.res_status(200, response) @@ -1556,7 +1556,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging20.com" + host = "logging20.test" } }) assert.res_status(200, response) @@ -1583,7 +1583,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging21.com" + host = "logging21.test" } }) assert.res_status(200, response) @@ -1607,7 +1607,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging22.com" + host = "logging22.test" } }) assert.res_status(200, response) @@ -1626,7 +1626,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging23.com" + host = "logging23.test" } }) assert.res_status(200, response) @@ -1645,7 +1645,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging24.com" + host = "logging24.test" } }) assert.res_status(200, response) @@ -1666,7 +1666,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging100.com" + host = "logging100.test" } }) assert.res_status(200, response) @@ -1687,7 +1687,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging101.com" + host = "logging101.test" } }) assert.res_status(200, response) @@ -1722,7 +1722,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging1.com" + host = "logging1.test" } }) assert.res_status(200, response) @@ -1758,7 +1758,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging25.com" + host = "logging25.test" } }) assert.res_status(200, response) @@ -1794,7 +1794,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging26.com" + host = "logging26.test" } }) assert.res_status(200, response) @@ -1830,7 +1830,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging27.com" + host = "logging27.test" } }) assert.res_status(200, response) @@ -1866,7 +1866,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging28.com" + host = "logging28.test" } }) assert.res_status(200, response) @@ -1893,7 +1893,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging102.com" + host = "logging102.test" } }) assert.res_status(200, response) @@ -1922,7 +1922,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging103.com" + host = "logging103.test" } }) assert.res_status(200, response) @@ -1954,7 +1954,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging104.com" + host = "logging104.test" } }) assert.res_status(200, response) @@ -1986,7 +1986,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging105.com" + host = "logging105.test" } }) assert.res_status(200, response) @@ -2018,7 +2018,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging106.com" + host = "logging106.test" } }) assert.res_status(200, response) @@ -2050,7 +2050,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging107.com" + host = "logging107.test" } }) assert.res_status(200, response) @@ -2084,7 +2084,7 @@ for _, strategy in helpers.each_strategy() do greeting = "world!" }, opts = { - ["-authority"] = "grpc_logging1.com", + ["-authority"] = "grpc_logging1.test", } }) assert.truthy(ok) @@ -2109,7 +2109,7 @@ for _, strategy in helpers.each_strategy() do greeting = "world!" }, opts = { - ["-authority"] = "grpc_logging2.com", + ["-authority"] = "grpc_logging2.test", } }) assert.truthy(ok) @@ -2177,7 +2177,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging1.com" + host = "logging1.test" } }) assert.res_status(404, response) @@ -2259,7 +2259,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging1.com" + host = "logging1.test" } }) assert.res_status(404, response) @@ -2314,7 +2314,7 @@ for _, strategy in helpers.each_strategy() do name = "statsd" } local route = bp.routes:insert { - hosts = { "logging.com" }, + hosts = { "logging.test" }, service = service } bp.key_auth_plugins:insert { route = { id = route.id } } @@ -2351,7 +2351,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - host = "logging.com" + host = "logging.test" } }) assert.res_status(200, response) diff --git a/spec/03-plugins/07-loggly/01-log_spec.lua b/spec/03-plugins/07-loggly/01-log_spec.lua index ef415c5fb1ef..dd5e35a0199d 100644 --- a/spec/03-plugins/07-loggly/01-log_spec.lua +++ b/spec/03-plugins/07-loggly/01-log_spec.lua @@ -18,23 +18,23 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "logging.com" }, + hosts = { "logging.test" }, } local route2 = bp.routes:insert { - hosts = { "logging1.com" }, + hosts = { "logging1.test" }, } local route3 = bp.routes:insert { - hosts = { "logging2.com" }, + hosts = { "logging2.test" }, } local route4 = bp.routes:insert { - hosts = { "logging3.com" }, + hosts = { "logging3.test" }, } local route5 = bp.routes:insert { - hosts = { "logging4.com" }, + hosts = { "logging4.test" }, } bp.plugins:insert { @@ -107,17 +107,17 @@ for _, strategy in helpers.each_strategy() do local grpc_route1 = bp.routes:insert { service = grpc_service, - hosts = { "grpc_logging.com" }, + hosts = { "grpc_logging.test" }, } local grpc_route2 = bp.routes:insert { service = grpc_service, - hosts = { "grpc_logging1.com" }, + hosts = { "grpc_logging1.test" }, } local grpc_route3 = bp.routes:insert { service = grpc_service, - hosts = { "grpc_logging2.com" }, + hosts = { "grpc_logging2.test" }, } bp.plugins:insert { @@ -231,7 +231,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging.com" + host = "logging.test" } }) assert.equal("12", pri) @@ -239,7 +239,7 @@ for _, strategy in helpers.each_strategy() do end) it("logs to UDP when severity is warning and log level info #grpc", function() - local pri, message = run_grpc("grpc_logging.com") + local pri, message = run_grpc("grpc_logging.test") assert.equal("12", pri) assert.equal("127.0.0.1", message.client_ip) end) @@ -249,7 +249,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging1.com" + host = "logging1.test" } }) assert.equal("14", pri) @@ -257,7 +257,7 @@ for _, strategy in helpers.each_strategy() do end) it("logs to UDP when severity is info and log level debug #grpc", function() - local pri, message = run_grpc("grpc_logging1.com") + local pri, message = run_grpc("grpc_logging1.test") assert.equal("14", pri) assert.equal("127.0.0.1", message.client_ip) end) @@ -267,7 +267,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "logging2.com" + host = "logging2.test" } }) assert.equal("10", pri) @@ -275,7 +275,7 @@ for _, strategy in helpers.each_strategy() do end) it("logs to UDP when severity is critical and log level critical #grpc", function() - local pri, message = run_grpc("grpc_logging2.com") + local pri, message = run_grpc("grpc_logging2.test") assert.equal("10", pri) assert.equal("127.0.0.1", message.client_ip) end) @@ -285,7 +285,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "logging3.com" + host = "logging3.test" } }) assert.equal("14", pri) @@ -297,7 +297,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - host = "logging3.com" + host = "logging3.test" } }) assert.equal("14", pri) @@ -309,7 +309,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/401", headers = { - host = "logging3.com" + host = "logging3.test" } }, 401) assert.equal("14", pri) @@ -321,7 +321,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/500", headers = { - host = "logging3.com" + host = "logging3.test" } }, 500) assert.equal("14", pri) @@ -334,7 +334,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/500", headers = { - host = "logging4.com" + host = "logging4.test" } }, 500) assert.equal("14", pri) @@ -345,7 +345,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/500", headers = { - host = "logging4.com" + host = "logging4.test" } }, 500) assert.equal("14", pri) diff --git a/spec/03-plugins/08-datadog/01-log_spec.lua b/spec/03-plugins/08-datadog/01-log_spec.lua index 90b9e2f9f266..214b7832961a 100644 --- a/spec/03-plugins/08-datadog/01-log_spec.lua +++ b/spec/03-plugins/08-datadog/01-log_spec.lua @@ -42,27 +42,27 @@ describe("Plugin: datadog (log)", function() }) local route1 = bp.routes:insert { - hosts = { "datadog1.com" }, + hosts = { "datadog1.test" }, service = bp.services:insert { name = "dd1" } } local route2 = bp.routes:insert { - hosts = { "datadog2.com" }, + hosts = { "datadog2.test" }, service = bp.services:insert { name = "dd2" } } local route3 = bp.routes:insert { - hosts = { "datadog3.com" }, + hosts = { "datadog3.test" }, service = bp.services:insert { name = "dd3" } } local route4 = bp.routes:insert { - hosts = { "datadog4.com" }, + hosts = { "datadog4.test" }, service = bp.services:insert { name = "dd4" } } local route5 = bp.routes:insert { - hosts = { "datadog5.com" }, + hosts = { "datadog5.test" }, service = bp.services:insert { name = "dd5" } } @@ -76,17 +76,17 @@ describe("Plugin: datadog (log)", function() }) local route6 = bp.routes:insert { - hosts = { "datadog6.com" }, + hosts = { "datadog6.test" }, service = bp.services:insert { name = "dd6" } } local route7 = bp.routes:insert { - hosts = { "datadog7.com" }, + hosts = { "datadog7.test" }, service = bp.services:insert { name = "dd7" } } local route8 = bp.routes:insert { - hosts = { "datadog8.com" }, + hosts = { "datadog8.test" }, paths = { "/test_schema" }, service = bp.services:insert { name = "dd8", @@ -293,7 +293,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200?apikey=kong", headers = { - ["Host"] = "datadog1.com" + ["Host"] = "datadog1.test" } }) assert.res_status(200, res) @@ -341,7 +341,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200?apikey=kong", headers = { - ["Host"] = "datadog4.com" + ["Host"] = "datadog4.test" } }) assert.res_status(200, res) @@ -364,7 +364,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200?apikey=kong", headers = { - ["Host"] = "datadog6.com" + ["Host"] = "datadog6.test" } }) assert.res_status(200, res) @@ -387,7 +387,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "datadog2.com" + ["Host"] = "datadog2.test" } }) assert.res_status(200, res) @@ -406,7 +406,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "datadog3.com" + ["Host"] = "datadog3.test" } }) assert.res_status(200, res) @@ -425,7 +425,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200?apikey=kong", headers = { - ["Host"] = "datadog5.com" + ["Host"] = "datadog5.test" } }) assert.res_status(200, res) @@ -448,7 +448,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200?apikey=kong", headers = { - ["Host"] = "datadog7.com" + ["Host"] = "datadog7.test" } }) assert.res_status(200, res) @@ -473,7 +473,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200?apikey=kong", headers = { - ["Host"] = "datadog7.com" + ["Host"] = "datadog7.test" } }) assert.res_status(200, res) @@ -490,7 +490,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/NonMatch", headers = { - ["Host"] = "fakedns.com" + ["Host"] = "fakedns.test" } }) @@ -502,7 +502,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "datadog3.com" + ["Host"] = "datadog3.test" } }) @@ -517,7 +517,7 @@ describe("Plugin: datadog (log)", function() method = "GET", path = "/test_schema", headers = { - ["Host"] = "datadog8.com" + ["Host"] = "datadog8.test" } }) diff --git a/spec/03-plugins/09-key-auth/02-access_spec.lua b/spec/03-plugins/09-key-auth/02-access_spec.lua index f176e7f246ca..c75904f057f1 100644 --- a/spec/03-plugins/09-key-auth/02-access_spec.lua +++ b/spec/03-plugins/09-key-auth/02-access_spec.lua @@ -31,27 +31,27 @@ for _, strategy in helpers.each_strategy() do } local route1 = bp.routes:insert { - hosts = { "key-auth1.com" }, + hosts = { "key-auth1.test" }, } local route2 = bp.routes:insert { - hosts = { "key-auth2.com" }, + hosts = { "key-auth2.test" }, } local route3 = bp.routes:insert { - hosts = { "key-auth3.com" }, + hosts = { "key-auth3.test" }, } local route4 = bp.routes:insert { - hosts = { "key-auth4.com" }, + hosts = { "key-auth4.test" }, } local route5 = bp.routes:insert { - hosts = { "key-auth5.com" }, + hosts = { "key-auth5.test" }, } local route6 = bp.routes:insert { - hosts = { "key-auth6.com" }, + hosts = { "key-auth6.test" }, } local service7 = bp.services:insert{ @@ -61,21 +61,21 @@ for _, strategy in helpers.each_strategy() do } local route7 = bp.routes:insert { - hosts = { "key-auth7.com" }, + hosts = { "key-auth7.test" }, service = service7, strip_path = true, } local route8 = bp.routes:insert { - hosts = { "key-auth8.com" }, + hosts = { "key-auth8.test" }, } local route9 = bp.routes:insert { - hosts = { "key-auth9.com" }, + hosts = { "key-auth9.test" }, } local route10 = bp.routes:insert { - hosts = { "key-auth10.com" }, + hosts = { "key-auth10.test" }, } local route_grpc = assert(bp.routes:insert { @@ -197,7 +197,7 @@ for _, strategy in helpers.each_strategy() do method = "OPTIONS", path = "/status/200", headers = { - ["Host"] = "key-auth7.com" + ["Host"] = "key-auth7.test" } }) assert.res_status(200, res) @@ -207,7 +207,7 @@ for _, strategy in helpers.each_strategy() do method = "OPTIONS", path = "/status/200", headers = { - ["Host"] = "key-auth1.com" + ["Host"] = "key-auth1.test" } }) assert.res_status(401, res) @@ -221,7 +221,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-auth1.com" + ["Host"] = "key-auth1.test" } }) local body = assert.res_status(401, res) @@ -234,7 +234,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-auth1.com", + ["Host"] = "key-auth1.test", ["apikey"] = "", } }) @@ -248,7 +248,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200?apikey", headers = { - ["Host"] = "key-auth1.com", + ["Host"] = "key-auth1.test", } }) local body = assert.res_status(401, res) @@ -261,7 +261,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-auth1.com" + ["Host"] = "key-auth1.test" } }) res:read_body() @@ -275,7 +275,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - ["Host"] = "key-auth1.com", + ["Host"] = "key-auth1.test", } }) assert.res_status(200, res) @@ -285,7 +285,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200?apikey=123", headers = { - ["Host"] = "key-auth1.com" + ["Host"] = "key-auth1.test" } }) local body = assert.res_status(401, res) @@ -298,7 +298,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200?apikey=kong&apikey=kong", headers = { - ["Host"] = "key-auth1.com" + ["Host"] = "key-auth1.test" } }) local body = assert.res_status(401, res) @@ -315,7 +315,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { path = "/request", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, }, body = { @@ -328,7 +328,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { path = "/request?apikey=kong", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, }, body = { @@ -341,7 +341,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { path = "/request?apikey=kong", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, ["apikey"] = "kong", }, @@ -355,7 +355,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { path = "/status/200", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, }, body = { @@ -376,7 +376,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/status/200", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, }, body = { @@ -395,7 +395,7 @@ for _, strategy in helpers.each_strategy() do local res = proxy_client:post("/status/200", { body = "apikey=kong&apikey=kong", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, }, }) @@ -409,7 +409,7 @@ for _, strategy in helpers.each_strategy() do local res = proxy_client:post("/status/200", { body = "apikey[]=kong&apikey[]=kong", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, }, }) @@ -423,7 +423,7 @@ for _, strategy in helpers.each_strategy() do local res = proxy_client:post("/status/200", { body = "apikey[1]=kong&apikey[1]=kong", headers = { - ["Host"] = "key-auth5.com", + ["Host"] = "key-auth5.test", ["Content-Type"] = type, }, }) @@ -443,7 +443,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "key-auth1.com", + ["Host"] = "key-auth1.test", ["apikey"] = "kong" } }) @@ -454,7 +454,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-auth1.com", + ["Host"] = "key-auth1.test", ["apikey"] = "123" } }) @@ -492,7 +492,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "key-auth8.com", + ["Host"] = "key-auth8.test", ["api_key"] = "kong" } }) @@ -502,7 +502,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "key-auth8.com", + ["Host"] = "key-auth8.test", ["api-key"] = "kong" } }) @@ -514,7 +514,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-auth8.com", + ["Host"] = "key-auth8.test", ["api_key"] = "123" } }) @@ -527,7 +527,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-auth8.com", + ["Host"] = "key-auth8.test", ["api-key"] = "123" } }) @@ -544,7 +544,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - ["Host"] = "key-auth1.com", + ["Host"] = "key-auth1.test", } }) local body = assert.res_status(200, res) @@ -566,37 +566,37 @@ for _, strategy in helpers.each_strategy() do local harness = { uri_args = { -- query string { - headers = { Host = "key-auth1.com" }, + headers = { Host = "key-auth1.test" }, path = "/request?apikey=kong", method = "GET", }, { - headers = { Host = "key-auth2.com" }, + headers = { Host = "key-auth2.test" }, path = "/request?apikey=kong", method = "GET", } }, headers = { { - headers = { Host = "key-auth1.com", apikey = "kong" }, + headers = { Host = "key-auth1.test", apikey = "kong" }, path = "/request", method = "GET", }, { - headers = { Host = "key-auth2.com", apikey = "kong" }, + headers = { Host = "key-auth2.test", apikey = "kong" }, path = "/request", method = "GET", }, }, ["post_data.params"] = { { - headers = { Host = "key-auth5.com" }, + headers = { Host = "key-auth5.test" }, body = { apikey = "kong" }, method = "POST", path = "/request", }, { - headers = { Host = "key-auth6.com" }, + headers = { Host = "key-auth6.test" }, body = { apikey = "kong" }, method = "POST", path = "/request", @@ -640,7 +640,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/request", headers = { - Host = "key-auth6.com", + Host = "key-auth6.test", ["Content-Type"] = content_type, }, body = { apikey = "kong", foo = "bar" }, @@ -655,7 +655,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { path = "/status/200", headers = { - ["Host"] = "key-auth6.com", + ["Host"] = "key-auth6.test", ["Content-Type"] = "text/plain", }, body = "foobar", @@ -674,7 +674,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request?apikey=kong", headers = { - ["Host"] = "key-auth3.com", + ["Host"] = "key-auth3.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -687,7 +687,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "key-auth3.com" + ["Host"] = "key-auth3.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -700,7 +700,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "key-auth10.com" + ["Host"] = "key-auth10.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -712,7 +712,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "key-auth4.com" + ["Host"] = "key-auth4.test" } }) assert.response(res).has.status(500) @@ -738,7 +738,7 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "logical-and.com" }, + hosts = { "logical-and.test" }, } local service = bp.services:insert { @@ -746,7 +746,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "logical-or.com" }, + hosts = { "logical-or.test" }, service = service, } @@ -822,7 +822,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } @@ -839,7 +839,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", } }) @@ -851,7 +851,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } }) @@ -863,7 +863,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", } }) assert.response(res).has.status(401) @@ -877,7 +877,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } @@ -894,7 +894,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", } }) @@ -910,7 +910,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } }) @@ -926,7 +926,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", } }) assert.response(res).has.status(200) @@ -955,7 +955,7 @@ for _, strategy in helpers.each_strategy() do }) local r = bp.routes:insert { - hosts = { "key-ttl.com" }, + hosts = { "key-ttl.test" }, } bp.plugins:insert { @@ -995,7 +995,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-ttl.com", + ["Host"] = "key-ttl.test", ["apikey"] = "kong", } }) @@ -1011,7 +1011,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-ttl.com", + ["Host"] = "key-ttl.test", ["apikey"] = "kong", } }) diff --git a/spec/03-plugins/09-key-auth/03-invalidations_spec.lua b/spec/03-plugins/09-key-auth/03-invalidations_spec.lua index 8a8485c7616b..6532a3cc5df7 100644 --- a/spec/03-plugins/09-key-auth/03-invalidations_spec.lua +++ b/spec/03-plugins/09-key-auth/03-invalidations_spec.lua @@ -18,7 +18,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert { - hosts = { "key-auth.com" }, + hosts = { "key-auth.test" }, } bp.plugins:insert { @@ -59,7 +59,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "key-auth.com", + ["Host"] = "key-auth.test", ["apikey"] = "kong" } }) @@ -87,7 +87,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "key-auth.com", + ["Host"] = "key-auth.test", ["apikey"] = "kong" } }) @@ -100,7 +100,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "key-auth.com", + ["Host"] = "key-auth.test", ["apikey"] = "kong" } }) @@ -129,7 +129,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "key-auth.com", + ["Host"] = "key-auth.test", ["apikey"] = "kong" } }) @@ -142,7 +142,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "key-auth.com", + ["Host"] = "key-auth.test", ["apikey"] = "kong" } }) @@ -177,7 +177,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "key-auth.com", + ["Host"] = "key-auth.test", ["apikey"] = "kong" } }) @@ -187,7 +187,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/", headers = { - ["Host"] = "key-auth.com", + ["Host"] = "key-auth.test", ["apikey"] = "kong-updated" } }) diff --git a/spec/03-plugins/09-key-auth/04-hybrid_mode_spec.lua b/spec/03-plugins/09-key-auth/04-hybrid_mode_spec.lua index ba3a0faaa2aa..7fb4bd9ed0b9 100644 --- a/spec/03-plugins/09-key-auth/04-hybrid_mode_spec.lua +++ b/spec/03-plugins/09-key-auth/04-hybrid_mode_spec.lua @@ -17,7 +17,7 @@ for _, strategy in helpers.each_strategy({"postgres"}) do }) local r = bp.routes:insert { - hosts = { "key-ttl-hybrid.com" }, + hosts = { "key-ttl-hybrid.test" }, } bp.plugins:insert { @@ -89,7 +89,7 @@ for _, strategy in helpers.each_strategy({"postgres"}) do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-ttl-hybrid.com", + ["Host"] = "key-ttl-hybrid.test", ["apikey"] = "kong", } }) @@ -109,7 +109,7 @@ for _, strategy in helpers.each_strategy({"postgres"}) do method = "GET", path = "/status/200", headers = { - ["Host"] = "key-ttl-hybrid.com", + ["Host"] = "key-ttl-hybrid.test", ["apikey"] = "kong", } }) diff --git a/spec/03-plugins/10-basic-auth/03-access_spec.lua b/spec/03-plugins/10-basic-auth/03-access_spec.lua index acf2c4374d13..097943753f3a 100644 --- a/spec/03-plugins/10-basic-auth/03-access_spec.lua +++ b/spec/03-plugins/10-basic-auth/03-access_spec.lua @@ -26,23 +26,23 @@ for _, strategy in helpers.each_strategy() do } local route1 = bp.routes:insert { - hosts = { "basic-auth1.com" }, + hosts = { "basic-auth1.test" }, } local route2 = bp.routes:insert { - hosts = { "basic-auth2.com" }, + hosts = { "basic-auth2.test" }, } local route3 = bp.routes:insert { - hosts = { "basic-auth3.com" }, + hosts = { "basic-auth3.test" }, } local route4 = bp.routes:insert { - hosts = { "basic-auth4.com" }, + hosts = { "basic-auth4.test" }, } local route5 = bp.routes:insert { - hosts = { "basic-auth5.com" }, + hosts = { "basic-auth5.test" }, } local route_grpc = assert(bp.routes:insert { @@ -138,7 +138,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(401, res) @@ -152,7 +152,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) assert.res_status(401, res) @@ -169,7 +169,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["Authorization"] = "foobar", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(401, res) @@ -184,7 +184,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["Proxy-Authorization"] = "foobar", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(401, res) @@ -199,7 +199,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["Authorization"] = "Basic a29uZw==", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(401, res) @@ -214,7 +214,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["Authorization"] = "Basic Ym9i", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(401, res) @@ -249,7 +249,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) assert.res_status(200, res) @@ -261,7 +261,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = "Basic dXNlcjEyMzpwYXNzd29yZDEyMw==", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -275,7 +275,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = "Basic dXNlcjMyMTpwYXNzd29yZDoxMjM=", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -289,7 +289,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["Authorization"] = "Basic adXNlcjEyMzpwYXNzd29yZDEyMw==", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(401, res) @@ -304,7 +304,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/200", headers = { ["Proxy-Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) assert.res_status(200, res) @@ -320,7 +320,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(200, res) @@ -340,7 +340,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth1.com" + ["Host"] = "basic-auth1.test" } }) local body = assert.res_status(200, res) @@ -354,7 +354,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth2.com" + ["Host"] = "basic-auth2.test" } }) local body = assert.res_status(200, res) @@ -373,7 +373,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = "Basic dXNlcjEyMzpwYXNzd29yZDEyMw==", - ["Host"] = "basic-auth3.com" + ["Host"] = "basic-auth3.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -387,7 +387,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "basic-auth3.com" + ["Host"] = "basic-auth3.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -401,7 +401,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "basic-auth5.com" + ["Host"] = "basic-auth5.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -414,7 +414,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "basic-auth4.com" + ["Host"] = "basic-auth4.test" } }) assert.response(res).has.status(500) @@ -461,12 +461,12 @@ for _, strategy in helpers.each_strategy() do } local route1 = bp.routes:insert { - hosts = { "logical-and.com" }, + hosts = { "logical-and.test" }, service = service1, } local route2 = bp.routes:insert { - hosts = { "logical-or.com" }, + hosts = { "logical-or.test" }, service = service2, } @@ -530,7 +530,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } @@ -547,7 +547,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", } }) @@ -559,7 +559,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } }) @@ -571,7 +571,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", } }) assert.response(res).has.status(401) @@ -586,7 +586,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } @@ -603,7 +603,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", } }) @@ -619,7 +619,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["Authorization"] = "Basic QWxhZGRpbjpPcGVuU2VzYW1l", } }) @@ -635,7 +635,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", } }) assert.response(res).has.status(200) @@ -671,7 +671,7 @@ for _, strategy in helpers.each_strategy() do } local route = bp.routes:insert { - hosts = { "anonymous-with-username.com" }, + hosts = { "anonymous-with-username.test" }, service = service, } @@ -708,7 +708,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "anonymous-with-username.com", + ["Host"] = "anonymous-with-username.test", }, }) assert.response(res).has.status(200) @@ -729,7 +729,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "anonymous-with-username.com", + ["Host"] = "anonymous-with-username.test", } }) assert.res_status(500, res) diff --git a/spec/03-plugins/10-basic-auth/04-invalidations_spec.lua b/spec/03-plugins/10-basic-auth/04-invalidations_spec.lua index 334664362124..906a693685ed 100644 --- a/spec/03-plugins/10-basic-auth/04-invalidations_spec.lua +++ b/spec/03-plugins/10-basic-auth/04-invalidations_spec.lua @@ -45,7 +45,7 @@ for _, strategy in helpers.each_strategy() do if not route then route = admin_api.routes:insert { - hosts = { "basic-auth.com" }, + hosts = { "basic-auth.test" }, } end @@ -78,7 +78,7 @@ for _, strategy in helpers.each_strategy() do path = "/", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth.com" + ["Host"] = "basic-auth.test" } }) assert.res_status(200, res) @@ -108,7 +108,7 @@ for _, strategy in helpers.each_strategy() do path = "/", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth.com" + ["Host"] = "basic-auth.test" } }) assert.res_status(401, res) @@ -121,7 +121,7 @@ for _, strategy in helpers.each_strategy() do path = "/", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth.com" + ["Host"] = "basic-auth.test" } }) assert.res_status(200, res) @@ -151,7 +151,7 @@ for _, strategy in helpers.each_strategy() do path = "/", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth.com" + ["Host"] = "basic-auth.test" } }) assert.res_status(401, res) @@ -164,7 +164,7 @@ for _, strategy in helpers.each_strategy() do path = "/", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth.com" + ["Host"] = "basic-auth.test" } }) assert.res_status(200, res) @@ -201,7 +201,7 @@ for _, strategy in helpers.each_strategy() do path = "/", headers = { ["Authorization"] = "Basic Ym9iOmtvbmc=", - ["Host"] = "basic-auth.com" + ["Host"] = "basic-auth.test" } }) assert.res_status(401, res) @@ -211,7 +211,7 @@ for _, strategy in helpers.each_strategy() do path = "/", headers = { ["Authorization"] = "Basic Ym9iOmtvbmctdXBkYXRlZA==", - ["Host"] = "basic-auth.com" + ["Host"] = "basic-auth.test" } }) assert.res_status(200, res) diff --git a/spec/03-plugins/11-correlation-id/01-access_spec.lua b/spec/03-plugins/11-correlation-id/01-access_spec.lua index 3bd73572f2fc..65de363f8d18 100644 --- a/spec/03-plugins/11-correlation-id/01-access_spec.lua +++ b/spec/03-plugins/11-correlation-id/01-access_spec.lua @@ -40,23 +40,23 @@ for _, strategy in helpers.each_strategy() do local bp = helpers.get_db_utils(strategy, nil, { "error-generator-last" }) local route1 = bp.routes:insert { - hosts = { "correlation1.com" }, + hosts = { "correlation1.test" }, } local route2 = bp.routes:insert { - hosts = { "correlation2.com" }, + hosts = { "correlation2.test" }, } local route3 = bp.routes:insert { - hosts = { "correlation3.com" }, + hosts = { "correlation3.test" }, } local route4 = bp.routes:insert { - hosts = { "correlation-tracker.com" }, + hosts = { "correlation-tracker.test" }, } local route5 = bp.routes:insert { - hosts = { "correlation5.com" }, + hosts = { "correlation5.test" }, } local mock_service = bp.services:insert { @@ -65,12 +65,12 @@ for _, strategy in helpers.each_strategy() do } local route6 = bp.routes:insert { - hosts = { "correlation-timeout.com" }, + hosts = { "correlation-timeout.test" }, service = mock_service, } local route7 = bp.routes:insert { - hosts = { "correlation-error.com" }, + hosts = { "correlation-error.test" }, } local route_grpc = assert(bp.routes:insert { @@ -83,7 +83,7 @@ for _, strategy in helpers.each_strategy() do }) local route_serializer = bp.routes:insert { - hosts = { "correlation-serializer.com" }, + hosts = { "correlation-serializer.test" }, } bp.plugins:insert { @@ -203,7 +203,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation1.com" + ["Host"] = "correlation1.test" } }) local body = assert.res_status(200, res) @@ -215,7 +215,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation1.com" + ["Host"] = "correlation1.test" } }) @@ -271,7 +271,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation3.com" + ["Host"] = "correlation3.test" } }) local body = assert.res_status(200, res) @@ -283,7 +283,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation3.com" + ["Host"] = "correlation3.test" } }) body = assert.res_status(200, res) @@ -300,7 +300,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation-tracker.com" + ["Host"] = "correlation-tracker.test" } }) local body = assert.res_status(200, res) @@ -312,7 +312,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation-tracker.com" + ["Host"] = "correlation-tracker.test" } }) body = assert.res_status(200, res) @@ -329,7 +329,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation3.com" + ["Host"] = "correlation3.test" } }) local body = assert.res_status(200, res) @@ -344,7 +344,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation-timeout.com" + ["Host"] = "correlation-timeout.test" } }) assert.res_status(502, res) @@ -355,7 +355,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation-error.com" + ["Host"] = "correlation-error.test" } }) assert.res_status(500, res) @@ -366,7 +366,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation2.com" + ["Host"] = "correlation2.test" } }) assert.res_status(200, res) @@ -377,7 +377,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation2.com" + ["Host"] = "correlation2.test" } }) local body = assert.res_status(200, res) @@ -392,7 +392,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation2.com", + ["Host"] = "correlation2.test", ["Kong-Request-ID"] = "foobar" } }) @@ -407,7 +407,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation2.com", + ["Host"] = "correlation2.test", ["Kong-Request-ID"] = "" } }) @@ -422,7 +422,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation2.com", + ["Host"] = "correlation2.test", ["Kong-Request-ID"] = " " } }) @@ -437,7 +437,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation5.com", + ["Host"] = "correlation5.test", } }) assert.response(res).has.status(418, res) @@ -450,7 +450,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "correlation5.com", + ["Host"] = "correlation5.test", ["kong-request-id"] = "my very personal id", } }) @@ -472,7 +472,7 @@ for _, strategy in helpers.each_strategy() do local correlation_id = "1234" local r = proxy_client:get("/", { headers = { - host = "correlation-serializer.com", + host = "correlation-serializer.test", ["Kong-Request-ID"] = correlation_id, }, }) diff --git a/spec/03-plugins/12-request-size-limiting/01-access_spec.lua b/spec/03-plugins/12-request-size-limiting/01-access_spec.lua index eeef6f0a233c..b3bfa3aa45a5 100644 --- a/spec/03-plugins/12-request-size-limiting/01-access_spec.lua +++ b/spec/03-plugins/12-request-size-limiting/01-access_spec.lua @@ -23,7 +23,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert { - hosts = { "limit.com" }, + hosts = { "limit.test" }, } bp.plugins:insert { @@ -35,7 +35,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "required.com" }, + hosts = { "required.test" }, } bp.plugins:insert { @@ -49,7 +49,7 @@ for _, strategy in helpers.each_strategy() do for _, unit in ipairs(size_units) do local route = bp.routes:insert { - hosts = { string.format("limit_%s.com", unit) }, + hosts = { string.format("limit_%s.test", unit) }, } bp.plugins:insert { @@ -86,7 +86,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com", + ["Host"] = "limit.test", ["Content-Length"] = #body } }) @@ -100,7 +100,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com", + ["Host"] = "limit.test", ["Expect"] = "100-continue", ["Content-Length"] = #body } @@ -115,7 +115,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com", + ["Host"] = "limit.test", ["Content-Length"] = #body } }) @@ -132,7 +132,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com", + ["Host"] = "limit.test", ["Expect"] = "100-continue", ["Content-Length"] = #body } @@ -151,7 +151,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = string.format("limit_%s.com", unit), + ["Host"] = string.format("limit_%s.test", unit), ["Content-Length"] = #body } }) @@ -170,7 +170,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = string.format("limit_%s.com", unit), + ["Host"] = string.format("limit_%s.test", unit), ["Content-Length"] = #body } }) @@ -188,7 +188,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com" + ["Host"] = "limit.test" } }) assert.res_status(200, res) @@ -202,7 +202,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com", + ["Host"] = "limit.test", ["Expect"] = "100-continue" } }) @@ -217,7 +217,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com" + ["Host"] = "limit.test" } }) local body = assert.res_status(413, res) @@ -234,7 +234,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = "limit.com", + ["Host"] = "limit.test", ["Expect"] = "100-continue" } }) @@ -253,7 +253,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = string.format("limit_%s.com", unit), + ["Host"] = string.format("limit_%s.test", unit), } }) local body = assert.res_status(413, res) @@ -272,7 +272,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = body, headers = { - ["Host"] = string.format("limit_%s.com", unit), + ["Host"] = string.format("limit_%s.test", unit), } }) assert.res_status(200, res) @@ -287,7 +287,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", -- if POST, then lua-rsty-http adds content-length anyway path = "/request", headers = { - ["Host"] = "required.com", + ["Host"] = "required.test", } }) assert.response(res).has.status(411) diff --git a/spec/03-plugins/13-cors/01-access_spec.lua b/spec/03-plugins/13-cors/01-access_spec.lua index 7113948c57af..7bba3a82ce88 100644 --- a/spec/03-plugins/13-cors/01-access_spec.lua +++ b/spec/03-plugins/13-cors/01-access_spec.lua @@ -236,55 +236,55 @@ for _, strategy in helpers.each_strategy() do local bp = helpers.get_db_utils(strategy, nil, { "error-generator-last" }) local route1 = bp.routes:insert({ - hosts = { "cors1.com" }, + hosts = { "cors1.test" }, }) local route2 = bp.routes:insert({ - hosts = { "cors2.com" }, + hosts = { "cors2.test" }, }) local route3 = bp.routes:insert({ - hosts = { "cors3.com" }, + hosts = { "cors3.test" }, }) local route4 = bp.routes:insert({ - hosts = { "cors4.com" }, + hosts = { "cors4.test" }, }) local route5 = bp.routes:insert({ - hosts = { "cors5.com" }, + hosts = { "cors5.test" }, }) local route6 = bp.routes:insert({ - hosts = { "cors6.com" }, + hosts = { "cors6.test" }, }) local route7 = bp.routes:insert({ - hosts = { "cors7.com" }, + hosts = { "cors7.test" }, }) local route8 = bp.routes:insert({ - hosts = { "cors-empty-origins.com" }, + hosts = { "cors-empty-origins.test" }, }) local route9 = bp.routes:insert({ - hosts = { "cors9.com" }, + hosts = { "cors9.test" }, }) local route10 = bp.routes:insert({ - hosts = { "cors10.com" }, + hosts = { "cors10.test" }, }) local route11 = bp.routes:insert({ - hosts = { "cors11.com" }, + hosts = { "cors11.test" }, }) local route12 = bp.routes:insert({ - hosts = { "cors12.com" }, + hosts = { "cors12.test" }, }) local route13 = bp.routes:insert({ - hosts = { "cors13.com" }, + hosts = { "cors13.test" }, }) local mock_upstream = bp.services:insert { @@ -293,7 +293,7 @@ for _, strategy in helpers.each_strategy() do } local route_upstream = bp.routes:insert({ - hosts = { "cors-upstream.com" }, + hosts = { "cors-upstream.test" }, service = mock_upstream }) @@ -303,12 +303,12 @@ for _, strategy in helpers.each_strategy() do } local route_timeout = bp.routes:insert { - hosts = { "cors-timeout.com" }, + hosts = { "cors-timeout.test" }, service = mock_service, } local route_error = bp.routes:insert { - hosts = { "cors-error.com" }, + hosts = { "cors-error.test" }, } bp.plugins:insert { @@ -320,7 +320,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route2.id }, config = { - origins = { "example.com" }, + origins = { "example.test" }, methods = { "GET" }, headers = { "origin", "type", "accepts" }, exposed_headers = { "x-auth-token" }, @@ -333,7 +333,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route3.id }, config = { - origins = { "example.com" }, + origins = { "example.test" }, methods = { "GET" }, headers = { "origin", "type", "accepts" }, exposed_headers = { "x-auth-token" }, @@ -365,7 +365,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route6.id }, config = { - origins = { "example.com", "example.org" }, + origins = { "example.test", "example.org" }, methods = { "GET" }, headers = { "origin", "type", "accepts" }, exposed_headers = { "x-auth-token" }, @@ -395,7 +395,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route9.id }, config = { - origins = { [[.*\.?example(?:-foo)?.com]] }, + origins = { [[.*\.?example(?:-foo)?.test]] }, } } @@ -403,7 +403,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route10.id }, config = { - origins = { "http://my-site.com", "http://my-other-site.com" }, + origins = { "http://my-site.test", "http://my-other-site.test" }, } } @@ -411,7 +411,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route11.id }, config = { - origins = { "http://my-site.com", "https://my-other-site.com:9000" }, + origins = { "http://my-site.test", "https://my-other-site.test:9000" }, } } @@ -435,7 +435,7 @@ for _, strategy in helpers.each_strategy() do }, methods = ngx.null, origins = { - "a.xxx.com", + "a.xxx.test", "allowed-domain.test" }, } @@ -455,7 +455,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route_timeout.id }, config = { - origins = { "example.com" }, + origins = { "example.test" }, methods = { "GET" }, headers = { "origin", "type", "accepts" }, exposed_headers = { "x-auth-token" }, @@ -468,7 +468,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route_error.id }, config = { - origins = { "example.com" }, + origins = { "example.test" }, methods = { "GET" }, headers = { "origin", "type", "accepts" }, exposed_headers = { "x-auth-token" }, @@ -481,7 +481,7 @@ for _, strategy in helpers.each_strategy() do name = "cors", route = { id = route_upstream.id }, config = { - origins = { "example.com" }, + origins = { "example.test" }, methods = { "GET" }, headers = { "origin", "type", "accepts" }, exposed_headers = { "x-auth-token" }, @@ -571,8 +571,8 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors1.com", - ["Origin"] = "origin1.com", + ["Host"] = "cors1.test", + ["Origin"] = "origin1.test", ["Access-Control-Request-Method"] = "GET", } }) @@ -597,8 +597,8 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors-empty-origins.com", - ["Origin"] = "empty-origin.com", + ["Host"] = "cors-empty-origins.test", + ["Origin"] = "empty-origin.test", ["Access-Control-Request-Method"] = "GET", } }) @@ -617,15 +617,15 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors5.com", - ["Origin"] = "origin5.com", + ["Host"] = "cors5.test", + ["Origin"] = "origin5.test", ["Access-Control-Request-Method"] = "GET", } }) assert.res_status(200, res) assert.equal("0", res.headers["Content-Length"]) assert.equal(CORS_DEFAULT_METHODS, res.headers["Access-Control-Allow-Methods"]) - assert.equal("origin5.com", res.headers["Access-Control-Allow-Origin"]) + assert.equal("origin5.test", res.headers["Access-Control-Allow-Origin"]) assert.equal("true", res.headers["Access-Control-Allow-Credentials"]) assert.equal("Origin", res.headers["Vary"]) assert.is_nil(res.headers["Access-Control-Allow-Headers"]) @@ -637,15 +637,15 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors2.com", - ["Origin"] = "origin5.com", + ["Host"] = "cors2.test", + ["Origin"] = "origin5.test", ["Access-Control-Request-Method"] = "GET", } }) assert.res_status(200, res) assert.equal("0", res.headers["Content-Length"]) assert.equal("GET", res.headers["Access-Control-Allow-Methods"]) - assert.equal("example.com", res.headers["Access-Control-Allow-Origin"]) + assert.equal("example.test", res.headers["Access-Control-Allow-Origin"]) assert.equal("23", res.headers["Access-Control-Max-Age"]) assert.equal("true", res.headers["Access-Control-Allow-Credentials"]) assert.equal("origin,type,accepts", res.headers["Access-Control-Allow-Headers"]) @@ -658,7 +658,7 @@ for _, strategy in helpers.each_strategy() do method = "OPTIONS", path = "/status/201", headers = { - ["Host"] = "cors3.com" + ["Host"] = "cors3.test" } }) local body = assert.res_status(201, res) @@ -670,8 +670,8 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors5.com", - ["Origin"] = "origin5.com", + ["Host"] = "cors5.test", + ["Origin"] = "origin5.test", ["Access-Control-Request-Headers"] = "origin,accepts", ["Access-Control-Request-Method"] = "GET", } @@ -687,20 +687,20 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors10.com", - ["Origin"] = "http://my-site.com" + ["Host"] = "cors10.test", + ["Origin"] = "http://my-site.test" } }) assert.res_status(200, res) - assert.equal("http://my-site.com", res.headers["Access-Control-Allow-Origin"]) + assert.equal("http://my-site.test", res.headers["Access-Control-Allow-Origin"]) -- Illegitimate origins res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors10.com", - ["Origin"] = "http://bad-guys.com" + ["Host"] = "cors10.test", + ["Origin"] = "http://bad-guys.test" } }) @@ -711,8 +711,8 @@ for _, strategy in helpers.each_strategy() do res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors10.com", - ["Origin"] = "http://my-site.com.bad-guys.com" + ["Host"] = "cors10.test", + ["Origin"] = "http://my-site.test.bad-guys.test" } }) @@ -724,7 +724,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "OPTIONS", headers = { - ["Host"] = "cors13.com", + ["Host"] = "cors13.test", ["Origin"] = "allowed-domain.test", ["Access-Control-Request-Private-Network"] = "true", ["Access-Control-Request-Method"] = "PUT", @@ -740,7 +740,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors1.com" + ["Host"] = "cors1.test" } }) assert.res_status(200, res) @@ -758,7 +758,7 @@ for _, strategy in helpers.each_strategy() do method = "OPTIONS", path = "/anything", headers = { - ["Host"] = "cors1.com" + ["Host"] = "cors1.test" } }) local body = assert.res_status(200, res) @@ -777,11 +777,11 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors2.com" + ["Host"] = "cors2.test" } }) assert.res_status(200, res) - assert.equal("example.com", res.headers["Access-Control-Allow-Origin"]) + assert.equal("example.test", res.headers["Access-Control-Allow-Origin"]) assert.equal("x-auth-token", res.headers["Access-Control-Expose-Headers"]) assert.equal("true", res.headers["Access-Control-Allow-Credentials"]) assert.equal("Origin", res.headers["Vary"]) @@ -794,11 +794,11 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors-timeout.com" + ["Host"] = "cors-timeout.test" } }) assert.res_status(502, res) - assert.equal("example.com", res.headers["Access-Control-Allow-Origin"]) + assert.equal("example.test", res.headers["Access-Control-Allow-Origin"]) assert.equal("x-auth-token", res.headers["Access-Control-Expose-Headers"]) assert.equal("Origin", res.headers["Vary"]) assert.is_nil(res.headers["Access-Control-Allow-Credentials"]) @@ -811,11 +811,11 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors-error.com" + ["Host"] = "cors-error.test" } }) assert.res_status(500, res) - assert.equal("example.com", res.headers["Access-Control-Allow-Origin"]) + assert.equal("example.test", res.headers["Access-Control-Allow-Origin"]) assert.equal("x-auth-token", res.headers["Access-Control-Expose-Headers"]) assert.equal("Origin", res.headers["Vary"]) assert.is_nil(res.headers["Access-Control-Allow-Credentials"]) @@ -829,7 +829,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/asdasdasd", headers = { - ["Host"] = "cors1.com" + ["Host"] = "cors1.test" } }) assert.res_status(404, res) @@ -846,7 +846,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors4.com" + ["Host"] = "cors4.test" } }) assert.res_status(401, res) @@ -863,27 +863,27 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors6.com", - ["Origin"] = "example.com" + ["Host"] = "cors6.test", + ["Origin"] = "example.test" } }) assert.res_status(200, res) - assert.equal("example.com", res.headers["Access-Control-Allow-Origin"]) + assert.equal("example.test", res.headers["Access-Control-Allow-Origin"]) assert.equal("Origin", res.headers["Vary"]) local domains = { - ["example.com"] = true, - ["www.example.com"] = true, - ["example-foo.com"] = true, - ["www.example-foo.com"] = true, - ["www.example-fo0.com"] = false, + ["example.test"] = true, + ["www.example.test"] = true, + ["example-foo.test"] = true, + ["www.example-foo.test"] = true, + ["www.example-fo0.test"] = false, } for domain in pairs(domains) do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors9.com", + ["Host"] = "cors9.test", ["Origin"] = domain } }) @@ -899,8 +899,8 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/response-headers?vary=Accept-Encoding", headers = { - ["Host"] = "cors-upstream.com", - ["Origin"] = "example.com", + ["Host"] = "cors-upstream.test", + ["Origin"] = "example.test", } }) assert.res_status(200, res) @@ -911,8 +911,8 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors6.com", - ["Origin"] = "http://example.com" + ["Host"] = "cors6.test", + ["Origin"] = "http://example.test" } }) assert.res_status(200, res) @@ -922,8 +922,8 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors6.com", - ["Origin"] = "https://example.com" + ["Host"] = "cors6.test", + ["Origin"] = "https://example.test" } }) assert.res_status(200, res) @@ -934,28 +934,28 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors11.com", - ["Origin"] = "http://my-site.com" + ["Host"] = "cors11.test", + ["Origin"] = "http://my-site.test" } }) assert.res_status(200, res) - assert.equals("http://my-site.com", res.headers["Access-Control-Allow-Origin"]) + assert.equals("http://my-site.test", res.headers["Access-Control-Allow-Origin"]) local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors11.com", - ["Origin"] = "http://my-site.com:80" + ["Host"] = "cors11.test", + ["Origin"] = "http://my-site.test:80" } }) assert.res_status(200, res) - assert.equals("http://my-site.com", res.headers["Access-Control-Allow-Origin"]) + assert.equals("http://my-site.test", res.headers["Access-Control-Allow-Origin"]) local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors11.com", - ["Origin"] = "http://my-site.com:8000" + ["Host"] = "cors11.test", + ["Origin"] = "http://my-site.test:8000" } }) assert.res_status(200, res) @@ -964,8 +964,8 @@ for _, strategy in helpers.each_strategy() do res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors11.com", - ["Origin"] = "https://my-site.com" + ["Host"] = "cors11.test", + ["Origin"] = "https://my-site.test" } }) assert.res_status(200, res) @@ -974,18 +974,18 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors11.com", - ["Origin"] = "https://my-other-site.com:9000" + ["Host"] = "cors11.test", + ["Origin"] = "https://my-other-site.test:9000" } }) assert.res_status(200, res) - assert.equals("https://my-other-site.com:9000", res.headers["Access-Control-Allow-Origin"]) + assert.equals("https://my-other-site.test:9000", res.headers["Access-Control-Allow-Origin"]) local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors11.com", - ["Origin"] = "https://my-other-site.com:9001" + ["Host"] = "cors11.test", + ["Origin"] = "https://my-other-site.test:9001" } }) assert.res_status(200, res) @@ -996,7 +996,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors6.com", + ["Host"] = "cors6.test", ["Origin"] = "http://www.example.net" } }) @@ -1008,7 +1008,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors5.com", + ["Host"] = "cors5.test", ["Origin"] = "http://www.example.net" } }) @@ -1022,7 +1022,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors5.com", + ["Host"] = "cors5.test", ["Origin"] = "http://www.example.net:3000" } }) @@ -1036,7 +1036,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", headers = { - ["Host"] = "cors7.com", + ["Host"] = "cors7.test", ["Origin"] = "http://www.example.net" } }) @@ -1053,7 +1053,7 @@ for _, strategy in helpers.each_strategy() do ["Access-Control-Allow-Origin"] = "*", }), headers = { - ["Host"] = "cors12.com", + ["Host"] = "cors12.test", ["Origin"] = "allowed-domain.test", } }) @@ -1073,7 +1073,7 @@ for _, strategy in helpers.each_strategy() do ["Access-Control-Allow-Origin"] = "*", }), headers = { - ["Host"] = "cors12.com", + ["Host"] = "cors12.test", ["Origin"] = "disallowed-domain.test", } }) diff --git a/spec/03-plugins/14-request-termination/02-access_spec.lua b/spec/03-plugins/14-request-termination/02-access_spec.lua index f8a28bea24e0..013d009acf4f 100644 --- a/spec/03-plugins/14-request-termination/02-access_spec.lua +++ b/spec/03-plugins/14-request-termination/02-access_spec.lua @@ -19,45 +19,45 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert({ - hosts = { "api1.request-termination.com" }, + hosts = { "api1.request-termination.test" }, }) local route2 = bp.routes:insert({ - hosts = { "api2.request-termination.com" }, + hosts = { "api2.request-termination.test" }, }) local route3 = bp.routes:insert({ - hosts = { "api3.request-termination.com" }, + hosts = { "api3.request-termination.test" }, }) local route4 = bp.routes:insert({ - hosts = { "api4.request-termination.com" }, + hosts = { "api4.request-termination.test" }, }) local route5 = bp.routes:insert({ - hosts = { "api5.request-termination.com" }, + hosts = { "api5.request-termination.test" }, }) local route6 = bp.routes:insert({ - hosts = { "api6.request-termination.com" }, + hosts = { "api6.request-termination.test" }, }) local route7 = db.routes:insert({ - hosts = { "api7.request-termination.com" }, + hosts = { "api7.request-termination.test" }, }) local route8 = bp.routes:insert({ - hosts = { "api8.request-termination.com" }, + hosts = { "api8.request-termination.test" }, }) local route9 = bp.routes:insert({ - hosts = { "api9.request-termination.com" }, + hosts = { "api9.request-termination.test" }, strip_path = false, paths = { "~/(?[^#?/]+)/200" } }) local route10 = bp.routes:insert({ - hosts = { "api10.request-termination.com" }, + hosts = { "api10.request-termination.test" }, }) bp.plugins:insert { @@ -191,7 +191,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api1.request-termination.com" + ["Host"] = "api1.request-termination.test" } }) local body = assert.res_status(503, res) @@ -204,7 +204,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api7.request-termination.com" + ["Host"] = "api7.request-termination.test" } }) local body = assert.res_status(503, res) @@ -217,7 +217,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api2.request-termination.com" + ["Host"] = "api2.request-termination.test" } }) local body = assert.res_status(404, res) @@ -230,7 +230,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api3.request-termination.com" + ["Host"] = "api3.request-termination.test" } }) local body = assert.res_status(406, res) @@ -243,7 +243,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/204", headers = { - ["Host"] = "api8.request-termination.com" + ["Host"] = "api8.request-termination.test" } }) @@ -259,7 +259,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api4.request-termination.com" + ["Host"] = "api4.request-termination.test" } }) local body = assert.res_status(503, res) @@ -271,7 +271,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api5.request-termination.com" + ["Host"] = "api5.request-termination.test" } }) local body = assert.res_status(451, res) @@ -294,7 +294,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api6.request-termination.com" + ["Host"] = "api6.request-termination.test" } }) local body = assert.res_status(503, res) @@ -308,7 +308,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api1.request-termination.com" + ["Host"] = "api1.request-termination.test" } }) @@ -324,20 +324,20 @@ for _, strategy in helpers.each_strategy() do }, path = "/status/200", headers = { - ["Host"] = "api9.request-termination.com" + ["Host"] = "api9.request-termination.test" }, body = "cool body", }) assert.response(res).has.status(404) local json = assert.response(res).has.jsonbody() - assert.equal("api9.request-termination.com", json.matched_route.hosts[1]) + assert.equal("api9.request-termination.test", json.matched_route.hosts[1]) json.request.headers["user-agent"] = nil -- clear, depends on lua-resty-http version assert.same({ headers = { ["content-length"] = '9', - host = 'api9.request-termination.com', + host = 'api9.request-termination.test', }, - host = 'api9.request-termination.com', + host = 'api9.request-termination.test', method = 'GET', path = '/status/200', port = helpers.get_proxy_port(), @@ -357,7 +357,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "api10.request-termination.com" + ["Host"] = "api10.request-termination.test" } }) assert.response(res).has.status(200) @@ -370,22 +370,22 @@ for _, strategy in helpers.each_strategy() do }, path = "/status/200", headers = { - ["Host"] = "api10.request-termination.com", + ["Host"] = "api10.request-termination.test", ["Gimme-An-Echo"] = "anything will do" }, body = "cool body", }) assert.response(res).has.status(404) local json = assert.response(res).has.jsonbody() - assert.equal("api10.request-termination.com", json.matched_route.hosts[1]) + assert.equal("api10.request-termination.test", json.matched_route.hosts[1]) json.request.headers["user-agent"] = nil -- clear, depends on lua-resty-http version assert.same({ headers = { ["content-length"] = '9', ["gimme-an-echo"] = 'anything will do', - host = 'api10.request-termination.com', + host = 'api10.request-termination.test', }, - host = 'api10.request-termination.com', + host = 'api10.request-termination.test', method = 'GET', path = '/status/200', port = helpers.get_proxy_port(), @@ -409,20 +409,20 @@ for _, strategy in helpers.each_strategy() do }, path = "/status/200", headers = { - ["Host"] = "api10.request-termination.com", + ["Host"] = "api10.request-termination.test", }, body = "cool body", }) assert.response(res).has.status(404) local json = assert.response(res).has.jsonbody() - assert.equal("api10.request-termination.com", json.matched_route.hosts[1]) + assert.equal("api10.request-termination.test", json.matched_route.hosts[1]) json.request.headers["user-agent"] = nil -- clear, depends on lua-resty-http version assert.same({ headers = { ["content-length"] = '9', - host = 'api10.request-termination.com', + host = 'api10.request-termination.test', }, - host = 'api10.request-termination.com', + host = 'api10.request-termination.test', method = 'GET', path = '/status/200', port = helpers.get_proxy_port(), diff --git a/spec/03-plugins/14-request-termination/03-integration_spec.lua b/spec/03-plugins/14-request-termination/03-integration_spec.lua index 46e2992997dc..a4cdb33035db 100644 --- a/spec/03-plugins/14-request-termination/03-integration_spec.lua +++ b/spec/03-plugins/14-request-termination/03-integration_spec.lua @@ -17,7 +17,7 @@ for _, strategy in helpers.each_strategy() do }) bp.routes:insert({ - hosts = { "api1.request-termination.com" }, + hosts = { "api1.request-termination.test" }, }) bp.plugins:insert { @@ -71,7 +71,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "api1.request-termination.com", + ["Host"] = "api1.request-termination.test", ["apikey"] = "kong", }, }) diff --git a/spec/03-plugins/15-response-transformer/04-filter_spec.lua b/spec/03-plugins/15-response-transformer/04-filter_spec.lua index 9b92bbad5796..12709c6899fb 100644 --- a/spec/03-plugins/15-response-transformer/04-filter_spec.lua +++ b/spec/03-plugins/15-response-transformer/04-filter_spec.lua @@ -13,15 +13,15 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert({ - hosts = { "response.com" }, + hosts = { "response.test" }, }) local route2 = bp.routes:insert({ - hosts = { "response2.com" }, + hosts = { "response2.test" }, }) local route3 = bp.routes:insert({ - hosts = { "response3.com" }, + hosts = { "response3.test" }, }) bp.plugins:insert { @@ -86,7 +86,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get", headers = { - host = "response.com" + host = "response.test" } }) assert.response(res).has.status(200) @@ -98,7 +98,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/response-headers", headers = { - host = "response.com" + host = "response.test" } }) assert.response(res).has.status(200) @@ -110,7 +110,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get", headers = { - host = "response2.com" + host = "response2.test" } }) assert.response(res).status(200) @@ -132,7 +132,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get", headers = { - host = "response3.com" + host = "response3.test" } }) diff --git a/spec/03-plugins/15-response-transformer/05-big_response_body_spec.lua b/spec/03-plugins/15-response-transformer/05-big_response_body_spec.lua index 22ffb6c7f01c..5ba54532c149 100644 --- a/spec/03-plugins/15-response-transformer/05-big_response_body_spec.lua +++ b/spec/03-plugins/15-response-transformer/05-big_response_body_spec.lua @@ -22,7 +22,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert({ - hosts = { "response.com" }, + hosts = { "response.test" }, methods = { "POST" }, }) @@ -63,7 +63,7 @@ for _, strategy in helpers.each_strategy() do path = "/post", body = create_big_data(1024 * 1024), headers = { - host = "response.com", + host = "response.test", ["content-type"] = "application/json", } }) @@ -78,7 +78,7 @@ for _, strategy in helpers.each_strategy() do path = "/post", body = create_big_data(1024 * 1024), headers = { - host = "response.com", + host = "response.test", ["content-type"] = "application/json", } }) diff --git a/spec/03-plugins/16-jwt/03-access_spec.lua b/spec/03-plugins/16-jwt/03-access_spec.lua index dfa90e592d08..e4b2682ac536 100644 --- a/spec/03-plugins/16-jwt/03-access_spec.lua +++ b/spec/03-plugins/16-jwt/03-access_spec.lua @@ -42,7 +42,7 @@ for _, strategy in helpers.each_strategy() do for i = 1, 13 do routes[i] = bp.routes:insert { - hosts = { "jwt" .. i .. ".com" }, + hosts = { "jwt" .. i .. ".test" }, } end @@ -248,7 +248,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) assert.res_status(401, res) @@ -262,7 +262,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = assert.res_status(401, res) @@ -278,7 +278,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = assert.res_status(401, res) @@ -294,7 +294,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = assert.res_status(401, res) @@ -310,7 +310,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = assert.res_status(401, res) @@ -326,7 +326,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = assert.res_status(401, res) @@ -338,7 +338,7 @@ for _, strategy in helpers.each_strategy() do method = "OPTIONS", path = "/request", headers = { - ["Host"] = "jwt8.com" + ["Host"] = "jwt8.test" } }) assert.res_status(200, res) @@ -348,7 +348,7 @@ for _, strategy in helpers.each_strategy() do method = "OPTIONS", path = "/request", headers = { - ["Host"] = "jwt1.com" + ["Host"] = "jwt1.test" } }) local body = assert.res_status(401, res) @@ -365,7 +365,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request/?jwt=" .. jwt, headers = { - ["Host"] = "jwt11.com" + ["Host"] = "jwt11.test" } }) local body = assert.res_status(401, res) @@ -382,7 +382,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request/?jwt=" .. jwt, headers = { - ["Host"] = "jwt11.com" + ["Host"] = "jwt11.test" } }) assert.res_status(200, res) @@ -405,7 +405,7 @@ for _, strategy in helpers.each_strategy() do path = "/request?jwt=" .. jwt, headers = { ["Authorization"] = "Bearer invalid.jwt.token", - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(401, res)) @@ -423,7 +423,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -456,7 +456,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt10.com", + ["Host"] = "jwt10.test", } }) assert.res_status(200, res) @@ -470,7 +470,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt4.com" + ["Host"] = "jwt4.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -501,7 +501,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt5.com" + ["Host"] = "jwt5.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -516,8 +516,8 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt9.com", - ["Cookie"] = "crumble=" .. jwt .. "; path=/;domain=.jwt9.com", + ["Host"] = "jwt9.test", + ["Cookie"] = "crumble=" .. jwt .. "; path=/;domain=.jwt9.test", } }) assert.res_status(200, res) @@ -529,8 +529,8 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt9.com", - ["Cookie"] = "silly=" .. jwt .. "; path=/;domain=.jwt9.com", + ["Host"] = "jwt9.test", + ["Cookie"] = "silly=" .. jwt .. "; path=/;domain=.jwt9.test", } }) assert.res_status(200, res) @@ -542,8 +542,8 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt9.com", - ["Cookie"] = "silly=" .. jwt .. "; path=/;domain=.jwt9.com", + ["Host"] = "jwt9.test", + ["Cookie"] = "silly=" .. jwt .. "; path=/;domain=.jwt9.test", } }) local body = assert.res_status(401, res) @@ -557,8 +557,8 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt9.com", - ["Cookie"] = "silly=" .. jwt .. "; path=/;domain=.jwt9.com", + ["Host"] = "jwt9.test", + ["Cookie"] = "silly=" .. jwt .. "; path=/;domain=.jwt9.test", } }) local body = assert.res_status(401, res) @@ -571,7 +571,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt9.com", + ["Host"] = "jwt9.test", ["Authorization"] = "Bearer " .. jwt, } }) @@ -582,7 +582,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt9.com", + ["Host"] = "jwt9.test", } }) assert.res_status(401, res) @@ -594,7 +594,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt12.com", + ["Host"] = "jwt12.test", ["CustomAuthorization"] = "Bearer " .. jwt, } }) @@ -607,7 +607,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt12.com", + ["Host"] = "jwt12.test", ["CustomAuthorization"] = {"Bearer " .. jwt, "Bearer other-token"} } }) @@ -620,7 +620,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request/?jwt=" .. jwt, headers = { - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) assert.res_status(200, res) @@ -632,7 +632,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request/?token=" .. jwt, headers = { - ["Host"] = "jwt2.com", + ["Host"] = "jwt2.test", } }) assert.res_status(200, res) @@ -649,7 +649,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com" + ["Host"] = "jwt1.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -666,7 +666,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com" + ["Host"] = "jwt1.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -686,7 +686,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -703,7 +703,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -723,7 +723,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -740,7 +740,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -761,7 +761,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -778,7 +778,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -798,7 +798,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -819,7 +819,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) local body = cjson.decode(assert.res_status(200, res)) @@ -840,7 +840,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request/?jwt=" .. jwt, headers = { - ["Host"] = "jwt3.com" + ["Host"] = "jwt3.test" } }) local body = cjson.decode(assert.res_status(401, res)) @@ -857,7 +857,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request/?jwt=" .. jwt, headers = { - ["Host"] = "jwt3.com" + ["Host"] = "jwt3.test" } }) local body = assert.res_status(401, res) @@ -874,7 +874,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request/?jwt=" .. jwt, headers = { - ["Host"] = "jwt3.com" + ["Host"] = "jwt3.test" } }) local body = assert.res_status(401, res) @@ -892,7 +892,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt1.com", + ["Host"] = "jwt1.test", } }) assert.res_status(200, res) @@ -911,7 +911,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = authorization, - ["Host"] = "jwt6.com" + ["Host"] = "jwt6.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -924,7 +924,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt6.com" + ["Host"] = "jwt6.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -937,7 +937,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt13.com" + ["Host"] = "jwt13.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -950,7 +950,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "jwt7.com" + ["Host"] = "jwt7.test" } }) assert.response(res).has.status(500) @@ -983,7 +983,7 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "logical-and.com" }, + hosts = { "logical-and.test" }, service = service1, } @@ -1014,7 +1014,7 @@ for _, strategy in helpers.each_strategy() do }) local route2 = bp.routes:insert { - hosts = { "logical-or.com" }, + hosts = { "logical-or.test" }, service = service2, } @@ -1069,7 +1069,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", ["Authorization"] = jwt_token, } @@ -1088,7 +1088,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", } }) @@ -1100,7 +1100,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["Authorization"] = jwt_token, } }) @@ -1112,7 +1112,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", } }) assert.response(res).has.status(401) @@ -1127,7 +1127,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", ["Authorization"] = jwt_token, } @@ -1146,7 +1146,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", } }) @@ -1163,7 +1163,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["Authorization"] = jwt_token, } }) @@ -1181,7 +1181,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", } }) assert.response(res).has.status(200) diff --git a/spec/03-plugins/16-jwt/04-invalidations_spec.lua b/spec/03-plugins/16-jwt/04-invalidations_spec.lua index 1138f5492c98..703e267d80d2 100644 --- a/spec/03-plugins/16-jwt/04-invalidations_spec.lua +++ b/spec/03-plugins/16-jwt/04-invalidations_spec.lua @@ -22,7 +22,7 @@ for _, strategy in helpers.each_strategy() do }) route = bp.routes:insert { - hosts = { "jwt.com" }, + hosts = { "jwt.test" }, } consumer = bp.consumers:insert { @@ -80,7 +80,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("key123", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(200, res) @@ -118,7 +118,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("key123", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(401, res) @@ -130,7 +130,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("key123", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(200, res) @@ -141,7 +141,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("keyhello", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(401, res) @@ -185,7 +185,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("keyhello", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(200, res) @@ -196,7 +196,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("key123", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(401, res) @@ -210,7 +210,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("key123", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(200, res) @@ -239,7 +239,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", headers = { ["Authorization"] = get_authorization("key123", "secret123"), - ["Host"] = "jwt.com" + ["Host"] = "jwt.test" } }) assert.res_status(401, res) diff --git a/spec/03-plugins/17-ip-restriction/02-access_spec.lua b/spec/03-plugins/17-ip-restriction/02-access_spec.lua index aa79f234de14..d487c957bca2 100644 --- a/spec/03-plugins/17-ip-restriction/02-access_spec.lua +++ b/spec/03-plugins/17-ip-restriction/02-access_spec.lua @@ -19,51 +19,51 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "ip-restriction1.com" }, + hosts = { "ip-restriction1.test" }, } local route2 = bp.routes:insert { - hosts = { "ip-restriction2.com" }, + hosts = { "ip-restriction2.test" }, } local route3 = bp.routes:insert { - hosts = { "ip-restriction3.com" }, + hosts = { "ip-restriction3.test" }, } local route4 = bp.routes:insert { - hosts = { "ip-restriction4.com" }, + hosts = { "ip-restriction4.test" }, } local route5 = bp.routes:insert { - hosts = { "ip-restriction5.com" }, + hosts = { "ip-restriction5.test" }, } local route6 = bp.routes:insert { - hosts = { "ip-restriction6.com" }, + hosts = { "ip-restriction6.test" }, } local route7 = bp.routes:insert { - hosts = { "ip-restriction7.com" }, + hosts = { "ip-restriction7.test" }, } local route8 = bp.routes:insert { - hosts = { "ip-restriction8.com" }, + hosts = { "ip-restriction8.test" }, } local route9 = bp.routes:insert { - hosts = { "ip-restriction9.com" }, + hosts = { "ip-restriction9.test" }, } local route10 = bp.routes:insert { - hosts = { "ip-restriction10.com" }, + hosts = { "ip-restriction10.test" }, } local route11 = bp.routes:insert { - hosts = { "ip-restriction11.com" }, + hosts = { "ip-restriction11.test" }, } local route12 = bp.routes:insert { - hosts = { "ip-restriction12.com" }, + hosts = { "ip-restriction12.test" }, } local grpc_service = bp.services:insert { @@ -74,21 +74,21 @@ for _, strategy in helpers.each_strategy() do local route_grpc_deny = assert(bp.routes:insert { protocols = { "grpc" }, paths = { "/hello.HelloService/" }, - hosts = { "ip-restriction-grpc1.com" }, + hosts = { "ip-restriction-grpc1.test" }, service = grpc_service, }) local route_grpc_allow = assert(bp.routes:insert { protocols = { "grpc" }, paths = { "/hello.HelloService/" }, - hosts = { "ip-restriction-grpc2.com" }, + hosts = { "ip-restriction-grpc2.test" }, service = grpc_service, }) local route_grpc_xforwarded_deny = assert(bp.routes:insert { protocols = { "grpc" }, paths = { "/hello.HelloService/" }, - hosts = { "ip-restriction-grpc3.com" }, + hosts = { "ip-restriction-grpc3.test" }, service = grpc_service, }) @@ -301,7 +301,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction1.com" + ["Host"] = "ip-restriction1.test" } }) local body = assert.res_status(403, res) @@ -313,7 +313,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction12.com" + ["Host"] = "ip-restriction12.test" } }) local body = assert.res_status(401, res) @@ -327,7 +327,7 @@ for _, strategy in helpers.each_strategy() do local ok, err = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "ip-restriction-grpc1.com", + ["-authority"] = "ip-restriction-grpc1.test", ["-v"] = true, }, } @@ -351,7 +351,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com" + ["Host"] = "ip-restriction2.test" } }) local body = assert.res_status(200, res) @@ -363,7 +363,7 @@ for _, strategy in helpers.each_strategy() do local ok = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "ip-restriction-grpc2.com", + ["-authority"] = "ip-restriction-grpc2.test", ["-v"] = true, }, } @@ -385,7 +385,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction5.com" + ["Host"] = "ip-restriction5.test" } }) local body = assert.res_status(403, res) @@ -396,7 +396,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction10.com" + ["Host"] = "ip-restriction10.test" } }) local body = assert.res_status(403, res) @@ -407,7 +407,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction9.com" + ["Host"] = "ip-restriction9.test" } }) local body = assert.res_status(403, res) @@ -418,7 +418,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction11.com" + ["Host"] = "ip-restriction11.test" } }) local body = assert.res_status(403, res) @@ -431,7 +431,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction7.com" + ["Host"] = "ip-restriction7.test" } }) local body = assert.res_status(200, res) @@ -443,7 +443,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction7.com", + ["Host"] = "ip-restriction7.test", ["X-Forwarded-For"] = "127.0.0.3" } }) @@ -456,7 +456,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction7.com", + ["Host"] = "ip-restriction7.test", ["X-Forwarded-For"] = "127.0.0.4" } }) @@ -472,7 +472,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction3.com" + ["Host"] = "ip-restriction3.test" } }) local body = assert.res_status(403, res) @@ -483,7 +483,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction4.com" + ["Host"] = "ip-restriction4.test" } }) assert.res_status(200, res) @@ -495,7 +495,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction6.com" + ["Host"] = "ip-restriction6.test" } }) local body = assert.res_status(403, res) @@ -506,7 +506,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction6.com", + ["Host"] = "ip-restriction6.test", ["X-Forwarded-For"] = "127.0.0.3" } }) @@ -517,7 +517,7 @@ for _, strategy in helpers.each_strategy() do local ok, err = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "ip-restriction-grpc3.com", + ["-authority"] = "ip-restriction-grpc3.test", ["-v"] = true, }, } @@ -529,7 +529,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction6.com", + ["Host"] = "ip-restriction6.test", ["X-Forwarded-For"] = "127.0.0.4" } }) @@ -539,7 +539,7 @@ for _, strategy in helpers.each_strategy() do assert.truthy(helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "ip-restriction-grpc3.com", + ["-authority"] = "ip-restriction-grpc3.test", ["-v"] = true, ["-H"] = "'X-Forwarded-For: 127.0.0.4'", }, @@ -550,7 +550,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction6.com", + ["Host"] = "ip-restriction6.test", ["X-Forwarded-For"] = "127.0.0.4, 127.0.0.3" } }) @@ -564,7 +564,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com" + ["Host"] = "ip-restriction2.test" } }) assert.res_status(200, res) @@ -589,7 +589,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com" + ["Host"] = "ip-restriction2.test" } }) local body = assert.res_status(403, res) @@ -615,7 +615,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com" + ["Host"] = "ip-restriction2.test" } }) assert.res_status(200, res) @@ -627,7 +627,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction8.com" + ["Host"] = "ip-restriction8.test" } }) assert.res_status(200, res) @@ -650,39 +650,39 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "ip-restriction1.com" }, + hosts = { "ip-restriction1.test" }, } local route2 = bp.routes:insert { - hosts = { "ip-restriction2.com" }, + hosts = { "ip-restriction2.test" }, } local route3 = bp.routes:insert { - hosts = { "ip-restriction3.com" }, + hosts = { "ip-restriction3.test" }, } local route4 = bp.routes:insert { - hosts = { "ip-restriction4.com" }, + hosts = { "ip-restriction4.test" }, } local route5 = bp.routes:insert { - hosts = { "ip-restriction5.com" }, + hosts = { "ip-restriction5.test" }, } local route6 = bp.routes:insert { - hosts = { "ip-restriction6.com" }, + hosts = { "ip-restriction6.test" }, } local route7 = bp.routes:insert { - hosts = { "ip-restriction7.com" }, + hosts = { "ip-restriction7.test" }, } local route8 = bp.routes:insert { - hosts = { "ip-restriction8.com" }, + hosts = { "ip-restriction8.test" }, } local route9 = bp.routes:insert { - hosts = { "ip-restriction9.com" }, + hosts = { "ip-restriction9.test" }, } bp.plugins:insert { @@ -787,7 +787,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction1.com", + ["Host"] = "ip-restriction1.test", ["X-Real-IP"] = "::1", } }) @@ -799,7 +799,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Real-IP"] = "::1", } }) @@ -812,7 +812,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction3.com", + ["Host"] = "ip-restriction3.test", ["X-Real-IP"] = "fe80::1", } }) @@ -824,7 +824,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction8.com", + ["Host"] = "ip-restriction8.test", ["X-Real-IP"] = "::1", } }) @@ -836,7 +836,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction7.com", + ["Host"] = "ip-restriction7.test", ["X-Real-IP"] = "::1", } }) @@ -848,7 +848,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction9.com" + ["Host"] = "ip-restriction9.test" } }) local body = assert.res_status(403, res) @@ -862,7 +862,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction4.com", + ["Host"] = "ip-restriction4.test", ["X-Real-IP"] = "::1", } }) @@ -874,7 +874,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction5.com", + ["Host"] = "ip-restriction5.test", ["X-Real-IP"] = "::1", } }) @@ -889,7 +889,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Real-IP"] = "::1", } }) @@ -924,7 +924,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Real-IP"] = "::1", } }) @@ -958,7 +958,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Real-IP"] = "::1", } }) @@ -973,7 +973,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction6.com", + ["Host"] = "ip-restriction6.test", ["X-Real-IP"] = "::1", } }) @@ -997,11 +997,11 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "ip-restriction1.com" }, + hosts = { "ip-restriction1.test" }, } local route2 = bp.routes:insert { - hosts = { "ip-restriction2.com" }, + hosts = { "ip-restriction2.test" }, } bp.plugins:insert { @@ -1047,7 +1047,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "ip-restriction1.com", + ["Host"] = "ip-restriction1.test", ["X-Forwarded-For"] = "::3", } }) @@ -1060,7 +1060,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction1.com", + ["Host"] = "ip-restriction1.test", ["X-Forwarded-For"] = "::4" } }) @@ -1072,7 +1072,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction1.com", + ["Host"] = "ip-restriction1.test", ["X-Forwarded-For"] = "::4, ::3" } }) @@ -1084,7 +1084,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction1.com", + ["Host"] = "ip-restriction1.test", ["X-Forwarded-For"] = "::3, ::4" } }) @@ -1100,7 +1100,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Forwarded-For"] = "::3" } }) @@ -1112,7 +1112,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Forwarded-For"] = "::4" } }) @@ -1125,7 +1125,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Forwarded-For"] = "::4, ::3" } }) @@ -1138,7 +1138,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/status/200", headers = { - ["Host"] = "ip-restriction2.com", + ["Host"] = "ip-restriction2.test", ["X-Forwarded-For"] = "::3, ::4" } }) diff --git a/spec/03-plugins/18-acl/02-access_spec.lua b/spec/03-plugins/18-acl/02-access_spec.lua index 6112802f00f2..157fc2afcf7b 100644 --- a/spec/03-plugins/18-acl/02-access_spec.lua +++ b/spec/03-plugins/18-acl/02-access_spec.lua @@ -90,7 +90,7 @@ for _, strategy in helpers.each_strategy() do } local route1 = bp.routes:insert { - hosts = { "acl1.com" }, + hosts = { "acl1.test" }, } bp.plugins:insert { @@ -102,7 +102,7 @@ for _, strategy in helpers.each_strategy() do } local route1b = bp.routes:insert { - hosts = { "acl1b.com" }, + hosts = { "acl1b.test" }, } bp.plugins:insert { @@ -123,7 +123,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "acl2.com" }, + hosts = { "acl2.test" }, } bp.plugins:insert { @@ -141,7 +141,7 @@ for _, strategy in helpers.each_strategy() do } local route2b = bp.routes:insert { - hosts = { "acl2b.com" }, + hosts = { "acl2b.test" }, } bp.plugins:insert { @@ -163,7 +163,7 @@ for _, strategy in helpers.each_strategy() do } local route2c = bp.routes:insert { - hosts = { "acl2c.com" }, + hosts = { "acl2c.test" }, } bp.plugins:insert { @@ -185,7 +185,7 @@ for _, strategy in helpers.each_strategy() do } local route3 = bp.routes:insert { - hosts = { "acl3.com" }, + hosts = { "acl3.test" }, } bp.plugins:insert { @@ -203,7 +203,7 @@ for _, strategy in helpers.each_strategy() do } local route3b = bp.routes:insert { - hosts = { "acl3b.com" }, + hosts = { "acl3b.test" }, } bp.plugins:insert { @@ -225,7 +225,7 @@ for _, strategy in helpers.each_strategy() do } local route3c = bp.routes:insert { - hosts = { "acl3c.com" }, + hosts = { "acl3c.test" }, } bp.plugins:insert { @@ -247,7 +247,7 @@ for _, strategy in helpers.each_strategy() do } local route3d = bp.routes:insert { - hosts = { "acl3d.com" }, + hosts = { "acl3d.test" }, } bp.plugins:insert { @@ -259,7 +259,7 @@ for _, strategy in helpers.each_strategy() do } local route4 = bp.routes:insert { - hosts = { "acl4.com" }, + hosts = { "acl4.test" }, } bp.plugins:insert { @@ -277,7 +277,7 @@ for _, strategy in helpers.each_strategy() do } local route4b = bp.routes:insert { - hosts = { "acl4b.com" }, + hosts = { "acl4b.test" }, } bp.plugins:insert { @@ -299,7 +299,7 @@ for _, strategy in helpers.each_strategy() do } local route4c = bp.routes:insert { - hosts = { "acl4c.com" }, + hosts = { "acl4c.test" }, } bp.plugins:insert { @@ -321,7 +321,7 @@ for _, strategy in helpers.each_strategy() do } local route5 = bp.routes:insert { - hosts = { "acl5.com" }, + hosts = { "acl5.test" }, } bp.plugins:insert { @@ -339,7 +339,7 @@ for _, strategy in helpers.each_strategy() do } local route5b = bp.routes:insert { - hosts = { "acl5b.com" }, + hosts = { "acl5b.test" }, } bp.plugins:insert { @@ -361,7 +361,7 @@ for _, strategy in helpers.each_strategy() do } local route5c = bp.routes:insert { - hosts = { "acl5c.com" }, + hosts = { "acl5c.test" }, } bp.plugins:insert { @@ -383,7 +383,7 @@ for _, strategy in helpers.each_strategy() do } local route6 = bp.routes:insert { - hosts = { "acl6.com" }, + hosts = { "acl6.test" }, } bp.plugins:insert { @@ -401,7 +401,7 @@ for _, strategy in helpers.each_strategy() do } local route6b = bp.routes:insert { - hosts = { "acl6b.com" }, + hosts = { "acl6b.test" }, } bp.plugins:insert { @@ -423,7 +423,7 @@ for _, strategy in helpers.each_strategy() do } local route6c = bp.routes:insert { - hosts = { "acl6c.com" }, + hosts = { "acl6c.test" }, } bp.plugins:insert { @@ -445,7 +445,7 @@ for _, strategy in helpers.each_strategy() do } local route7 = bp.routes:insert { - hosts = { "acl7.com" }, + hosts = { "acl7.test" }, } bp.plugins:insert { @@ -463,7 +463,7 @@ for _, strategy in helpers.each_strategy() do } local route7b = bp.routes:insert { - hosts = { "acl7b.com" }, + hosts = { "acl7b.test" }, } bp.plugins:insert { @@ -485,7 +485,7 @@ for _, strategy in helpers.each_strategy() do } local route8 = bp.routes:insert { - hosts = { "acl8.com" }, + hosts = { "acl8.test" }, } bp.plugins:insert { @@ -505,7 +505,7 @@ for _, strategy in helpers.each_strategy() do } local route8b = bp.routes:insert { - hosts = { "acl8b.com" }, + hosts = { "acl8b.test" }, } bp.plugins:insert { @@ -535,7 +535,7 @@ for _, strategy in helpers.each_strategy() do } local route9 = bp.routes:insert { - hosts = { "acl9.com" }, + hosts = { "acl9.test" }, } bp.plugins:insert { @@ -554,7 +554,7 @@ for _, strategy in helpers.each_strategy() do } local route9b = bp.routes:insert { - hosts = { "acl9b.com" }, + hosts = { "acl9b.test" }, } bp.plugins:insert { @@ -577,7 +577,7 @@ for _, strategy in helpers.each_strategy() do } local route10 = bp.routes:insert { - hosts = { "acl10.com" }, + hosts = { "acl10.test" }, } bp.plugins:insert { @@ -596,7 +596,7 @@ for _, strategy in helpers.each_strategy() do } local route10b = bp.routes:insert { - hosts = { "acl10b.com" }, + hosts = { "acl10b.test" }, } bp.plugins:insert { @@ -619,7 +619,7 @@ for _, strategy in helpers.each_strategy() do } local route11 = bp.routes:insert { - hosts = { "acl11.com" }, + hosts = { "acl11.test" }, } bp.plugins:insert { @@ -650,7 +650,7 @@ for _, strategy in helpers.each_strategy() do } local route12 = bp.routes:insert { - hosts = { "acl12.com" }, + hosts = { "acl12.test" }, } bp.plugins:insert { @@ -681,7 +681,7 @@ for _, strategy in helpers.each_strategy() do } local route13 = bp.routes:insert { - hosts = { "acl13.com" }, + hosts = { "acl13.test" }, } bp.plugins:insert { @@ -712,7 +712,7 @@ for _, strategy in helpers.each_strategy() do } local route14 = bp.routes:insert({ - hosts = { "acl14.com" } + hosts = { "acl14.test" } }) local acl_prefunction_code = " local consumer_id = \"" .. tostring(consumer2.id) .. "\"\n" .. [[ @@ -766,7 +766,7 @@ for _, strategy in helpers.each_strategy() do it("should work with consumer with credentials", function() local res = assert(proxy_client:get("/request?apikey=apikey124", { headers = { - ["Host"] = "acl2.com" + ["Host"] = "acl2.test" } })) @@ -778,7 +778,7 @@ for _, strategy in helpers.each_strategy() do it("should work with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl2b.com" + ["Host"] = "acl2b.test" } })) @@ -790,7 +790,7 @@ for _, strategy in helpers.each_strategy() do it("should work with consumer without credentials", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl8.com" + ["Host"] = "acl8.test" } })) @@ -802,7 +802,7 @@ for _, strategy in helpers.each_strategy() do it("should work with authenticated groups without credentials", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl8b.com" + ["Host"] = "acl8b.test" } })) @@ -817,7 +817,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when an authentication plugin is missing", function() local res = assert(proxy_client:get("/status/200", { headers = { - ["Host"] = "acl1.com" + ["Host"] = "acl1.test" } })) local body = assert.res_status(401, res) @@ -829,7 +829,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when an authentication plugin is missing (with credential)", function() local res = assert(proxy_client:get("/status/200", { headers = { - ["Host"] = "acl1b.com" + ["Host"] = "acl1b.test" } })) local body = assert.res_status(403, res) @@ -841,7 +841,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when not allowed", function() local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl2.com" + ["Host"] = "acl2.test" } })) local body = assert.res_status(403, res) @@ -853,7 +853,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when not allowed with authenticated groups", function() local res = assert(proxy_client:get("/status/200", { headers = { - ["Host"] = "acl2c.com" + ["Host"] = "acl2c.test" } })) local body = assert.res_status(403, res) @@ -865,7 +865,7 @@ for _, strategy in helpers.each_strategy() do it("should work when allowed", function() local res = assert(proxy_client:get("/request?apikey=apikey124", { headers = { - ["Host"] = "acl2.com" + ["Host"] = "acl2.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -876,7 +876,7 @@ for _, strategy in helpers.each_strategy() do it("should work when allowed with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl2b.com" + ["Host"] = "acl2b.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -887,7 +887,7 @@ for _, strategy in helpers.each_strategy() do it("should not send x-consumer-groups header when hide_groups_header flag true", function() local res = assert(proxy_client:get("/request?apikey=apikey124", { headers = { - ["Host"] = "acl9.com" + ["Host"] = "acl9.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -898,7 +898,7 @@ for _, strategy in helpers.each_strategy() do it("should not send x-authenticated-groups header when hide_groups_header flag true", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl9b.com" + ["Host"] = "acl9b.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -909,7 +909,7 @@ for _, strategy in helpers.each_strategy() do it("should send x-consumer-groups header when hide_groups_header flag false", function() local res = assert(proxy_client:get("/request?apikey=apikey124", { headers = { - ["Host"] = "acl10.com" + ["Host"] = "acl10.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -920,7 +920,7 @@ for _, strategy in helpers.each_strategy() do it("should send x-authenticated-groups header when hide_groups_header flag false", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl10b.com" + ["Host"] = "acl10b.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -931,7 +931,7 @@ for _, strategy in helpers.each_strategy() do it("should work when not denied", function() local res = assert(proxy_client:get("/request?apikey=apikey123", { headers = { - ["Host"] = "acl3.com" + ["Host"] = "acl3.test" } })) assert.res_status(200, res) @@ -940,7 +940,7 @@ for _, strategy in helpers.each_strategy() do it("should work when not denied with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl3b.com" + ["Host"] = "acl3b.test" } })) assert.res_status(200, res) @@ -949,7 +949,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when denied", function() local res = assert(proxy_client:get("/request?apikey=apikey124", { headers = { - ["Host"] = "acl3.com" + ["Host"] = "acl3.test" } })) local body = assert.res_status(403, res) @@ -961,7 +961,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when denied with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl3c.com" + ["Host"] = "acl3c.test" } })) local body = assert.res_status(403, res) @@ -973,7 +973,7 @@ for _, strategy in helpers.each_strategy() do it("should fail denied and with no authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl3d.com" + ["Host"] = "acl3d.test" } })) local body = assert.res_status(401, res) @@ -987,7 +987,7 @@ for _, strategy in helpers.each_strategy() do it("should work when allowed", function() local res = assert(proxy_client:get("/request?apikey=apikey125", { headers = { - ["Host"] = "acl4.com" + ["Host"] = "acl4.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -998,7 +998,7 @@ for _, strategy in helpers.each_strategy() do it("should work when allowed with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl4b.com" + ["Host"] = "acl4b.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -1009,7 +1009,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when not allowed", function() local res = assert(proxy_client:get("/request?apikey=apikey126", { headers = { - ["Host"] = "acl4.com" + ["Host"] = "acl4.test" } })) local body = assert.res_status(403, res) @@ -1021,7 +1021,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when not allowed with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl4c.com" + ["Host"] = "acl4c.test" } })) local body = assert.res_status(403, res) @@ -1033,7 +1033,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when denied", function() local res = assert(proxy_client:get("/request?apikey=apikey125", { headers = { - ["Host"] = "acl5.com" + ["Host"] = "acl5.test" } })) local body = assert.res_status(403, res) @@ -1045,7 +1045,7 @@ for _, strategy in helpers.each_strategy() do it("should fail when denied with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl5b.com" + ["Host"] = "acl5b.test" } })) local body = assert.res_status(403, res) @@ -1058,7 +1058,7 @@ for _, strategy in helpers.each_strategy() do it("should work when not denied", function() local res = assert(proxy_client:get("/request?apikey=apikey126", { headers = { - ["Host"] = "acl5.com" + ["Host"] = "acl5.test" } })) assert.res_status(200, res) @@ -1067,7 +1067,7 @@ for _, strategy in helpers.each_strategy() do it("should work when not denied with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl5c.com" + ["Host"] = "acl5c.test" } })) assert.res_status(200, res) @@ -1076,7 +1076,7 @@ for _, strategy in helpers.each_strategy() do it("should not work when one of the ACLs denied", function() local res = assert(proxy_client:get("/request?apikey=apikey126", { headers = { - ["Host"] = "acl6.com" + ["Host"] = "acl6.test" } })) local body = assert.res_status(403, res) @@ -1088,7 +1088,7 @@ for _, strategy in helpers.each_strategy() do it("should not work when one of the ACLs denied with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl6b.com" + ["Host"] = "acl6b.test" } })) local body = assert.res_status(403, res) @@ -1100,7 +1100,7 @@ for _, strategy in helpers.each_strategy() do it("should work when one of the ACLs is allowed", function() local res = assert(proxy_client:get("/request?apikey=apikey126", { headers = { - ["Host"] = "acl7.com" + ["Host"] = "acl7.test" } })) assert.res_status(200, res) @@ -1109,7 +1109,7 @@ for _, strategy in helpers.each_strategy() do it("should work when one of the ACLs is allowed with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl7b.com" + ["Host"] = "acl7b.test" } })) assert.res_status(200, res) @@ -1118,7 +1118,7 @@ for _, strategy in helpers.each_strategy() do it("should not work when at least one of the ACLs denied", function() local res = assert(proxy_client:get("/request?apikey=apikey125", { headers = { - ["Host"] = "acl6.com" + ["Host"] = "acl6.test" } })) local body = assert.res_status(403, res) @@ -1130,7 +1130,7 @@ for _, strategy in helpers.each_strategy() do it("should not work when at least one of the ACLs denied with authenticated groups", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl6c.com" + ["Host"] = "acl6c.test" } })) local body = assert.res_status(403, res) @@ -1174,7 +1174,7 @@ for _, strategy in helpers.each_strategy() do ["Content-Type"] = "application/json" }, body = { - hosts = { "acl_test" .. i .. ".com" }, + hosts = { "acl_test" .. i .. ".test" }, protocols = { "http", "https" }, service = { id = service.id @@ -1233,7 +1233,7 @@ for _, strategy in helpers.each_strategy() do helpers.wait_until(function() res = assert(proxy_client:get("/status/200?apikey=secret123", { headers = { - ["Host"] = "acl_test" .. i .. ".com" + ["Host"] = "acl_test" .. i .. ".test" } })) res:read_body() @@ -1253,7 +1253,7 @@ for _, strategy in helpers.each_strategy() do ["Content-Type"] = "application/json" }, body = { - hosts = { "acl_test" .. i .. "b.com" }, + hosts = { "acl_test" .. i .. "b.test" }, protocols = { "http", "https" }, service = { id = service.id @@ -1300,7 +1300,7 @@ for _, strategy in helpers.each_strategy() do helpers.wait_until(function() res = assert(proxy_client:get("/status/200", { headers = { - ["Host"] = "acl_test" .. i .. "b.com" + ["Host"] = "acl_test" .. i .. "b.test" } })) res:read_body() @@ -1316,7 +1316,7 @@ for _, strategy in helpers.each_strategy() do it("authenticated consumer even when authorized groups are present", function() local res = assert(proxy_client:get("/request?apikey=apikey124", { headers = { - ["Host"] = "acl11.com" + ["Host"] = "acl11.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -1327,7 +1327,7 @@ for _, strategy in helpers.each_strategy() do it("authorized groups even when anonymous consumer is present", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl11.com" + ["Host"] = "acl11.test" } })) local body = cjson.decode(assert.res_status(200, res)) @@ -1340,7 +1340,7 @@ for _, strategy in helpers.each_strategy() do it("authenticated consumer even when authorized groups are present", function() local res = assert(proxy_client:get("/request?apikey=apikey124", { headers = { - ["Host"] = "acl12.com" + ["Host"] = "acl12.test" } })) local body = assert.res_status(403, res) @@ -1352,7 +1352,7 @@ for _, strategy in helpers.each_strategy() do it("authorized groups even when anonymous consumer is present", function() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl13.com" + ["Host"] = "acl13.test" } })) local body = assert.res_status(403, res) @@ -1374,7 +1374,7 @@ for _, strategy in helpers.each_strategy() do proxy_client = helpers.proxy_client() local res = assert(proxy_client:get("/request", { headers = { - ["Host"] = "acl14.com" + ["Host"] = "acl14.test" } })) assert.res_status(200, res) diff --git a/spec/03-plugins/18-acl/03-invalidations_spec.lua b/spec/03-plugins/18-acl/03-invalidations_spec.lua index 14abec7e3610..164bf125c7a6 100644 --- a/spec/03-plugins/18-acl/03-invalidations_spec.lua +++ b/spec/03-plugins/18-acl/03-invalidations_spec.lua @@ -53,7 +53,7 @@ for _, strategy in helpers.each_strategy() do } local route1 = bp.routes:insert { - hosts = { "acl1.com" }, + hosts = { "acl1.test" }, } bp.plugins:insert { @@ -70,7 +70,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "acl2.com" }, + hosts = { "acl2.test" }, } bp.plugins:insert { @@ -109,7 +109,7 @@ for _, strategy in helpers.each_strategy() do -- It should work local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl1.com" + ["Host"] = "acl1.test" } })) assert.res_status(200, res) @@ -134,7 +134,7 @@ for _, strategy in helpers.each_strategy() do -- It should not work local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl1.com" + ["Host"] = "acl1.test" } })) assert.res_status(403, res) @@ -143,7 +143,7 @@ for _, strategy in helpers.each_strategy() do -- It should work local res = assert(proxy_client:get("/status/200?apikey=apikey123&prova=scemo", { headers = { - ["Host"] = "acl1.com" + ["Host"] = "acl1.test" } })) assert.res_status(200, res) @@ -151,7 +151,7 @@ for _, strategy in helpers.each_strategy() do -- It should not work local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl2.com" + ["Host"] = "acl2.test" } })) assert.res_status(403, res) @@ -180,7 +180,7 @@ for _, strategy in helpers.each_strategy() do -- It should not work local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl1.com" + ["Host"] = "acl1.test" } })) assert.res_status(403, res) @@ -188,7 +188,7 @@ for _, strategy in helpers.each_strategy() do -- It works now local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl2.com" + ["Host"] = "acl2.test" } })) assert.res_status(200, res) @@ -200,7 +200,7 @@ for _, strategy in helpers.each_strategy() do -- It should work local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl1.com" + ["Host"] = "acl1.test" } })) assert.res_status(200, res) @@ -228,7 +228,7 @@ for _, strategy in helpers.each_strategy() do -- It should not work local res = assert(proxy_client:get("/status/200?apikey=apikey123", { headers = { - ["Host"] = "acl1.com" + ["Host"] = "acl1.test" } })) assert.res_status(401, res) diff --git a/spec/03-plugins/19-hmac-auth/03-access_spec.lua b/spec/03-plugins/19-hmac-auth/03-access_spec.lua index 0269ecafc5f0..9d88f4a50553 100644 --- a/spec/03-plugins/19-hmac-auth/03-access_spec.lua +++ b/spec/03-plugins/19-hmac-auth/03-access_spec.lua @@ -31,7 +31,7 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "hmacauth.com" }, + hosts = { "hmacauth.test" }, } local route_grpc = assert(bp.routes:insert { @@ -75,7 +75,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "hmacauth2.com" }, + hosts = { "hmacauth2.test" }, } bp.plugins:insert { @@ -88,7 +88,7 @@ for _, strategy in helpers.each_strategy() do } local route3 = bp.routes:insert { - hosts = { "hmacauth3.com" }, + hosts = { "hmacauth3.test" }, } bp.plugins:insert { @@ -101,7 +101,7 @@ for _, strategy in helpers.each_strategy() do } local route4 = bp.routes:insert { - hosts = { "hmacauth4.com" }, + hosts = { "hmacauth4.test" }, } bp.plugins:insert { @@ -114,7 +114,7 @@ for _, strategy in helpers.each_strategy() do } local route5 = bp.routes:insert { - hosts = { "hmacauth5.com" }, + hosts = { "hmacauth5.test" }, } bp.plugins:insert { @@ -128,7 +128,7 @@ for _, strategy in helpers.each_strategy() do } local route6 = bp.routes:insert { - hosts = { "hmacauth6.com" }, + hosts = { "hmacauth6.test" }, } bp.plugins:insert { @@ -143,7 +143,7 @@ for _, strategy in helpers.each_strategy() do } local route7 = bp.routes:insert { - hosts = { "hmacauth7.com" }, + hosts = { "hmacauth7.test" }, } bp.plugins:insert { @@ -181,7 +181,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date } }) @@ -205,7 +205,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = "asd" } @@ -222,7 +222,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "POST", headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = hmacAuth } @@ -237,7 +237,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", authorization = "asd" } }) @@ -254,7 +254,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = "asd" } @@ -270,7 +270,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = "hmac :dXNlcm5hbWU6cGFzc3dvcmQ=" } @@ -286,7 +286,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = [[hmac username=,algorithm,]] .. [[headers,dXNlcm5hbWU6cGFzc3dvcmQ=]] @@ -303,7 +303,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = [[hmac username=,algorithm,]] .. [[headers,dXNlcm5hbWU6cGFzc3dvcmQ=]] @@ -320,7 +320,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = "hmac username" } @@ -336,7 +336,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, } }) @@ -355,7 +355,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = hmacAuth, }, @@ -375,7 +375,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = hmacAuth, }, @@ -396,7 +396,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = hmacAuth, }, @@ -450,7 +450,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, }, @@ -468,7 +468,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = hmacAuth, }, @@ -488,7 +488,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = "hmac username", authorization = hmacAuth, @@ -510,7 +510,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -530,7 +530,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -553,7 +553,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -576,7 +576,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -600,7 +600,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -624,7 +624,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -650,7 +650,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -677,7 +677,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -703,7 +703,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -728,7 +728,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -753,7 +753,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -778,7 +778,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -803,7 +803,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -826,7 +826,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -849,7 +849,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, ["proxy-authorization"] = hmacAuth, authorization = "hello", @@ -877,7 +877,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", ["x-date"] = date, authorization = hmacAuth, ["content-md5"] = "md5", @@ -898,7 +898,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", ["proxy-authorization"] = hmacAuth, authorization = "hello", ["content-md5"] = "md5", @@ -923,7 +923,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", ["x-date"] = "wrong date", authorization = hmacAuth, ["content-md5"] = "md5", @@ -948,7 +948,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", ["x-date"] = "wrong date", date = date, authorization = hmacAuth, @@ -971,7 +971,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", ["x-date"] = "wrong date", date = date, authorization = hmacAuth, @@ -994,7 +994,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", ["x-date"] = "wrong date", date = date, authorization = hmacAuth, @@ -1017,7 +1017,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", ["x-date"] = date, date = "wrong date", authorization = hmacAuth, @@ -1037,7 +1037,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth2.com", + ["HOST"] = "hmacauth2.test", date = date, authorization = hmacAuth, }, @@ -1065,7 +1065,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = postBody, headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, authorization = hmacAuth, } @@ -1086,7 +1086,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, authorization = hmacAuth, } @@ -1108,7 +1108,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, digest = digest, authorization = hmacAuth, @@ -1123,7 +1123,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth2.com", + ["HOST"] = "hmacauth2.test", }, }) local body = assert.res_status(200, res) @@ -1139,7 +1139,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth7.com", + ["HOST"] = "hmacauth7.test", }, }) local body = assert.res_status(200, res) @@ -1157,7 +1157,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "hmacauth3.com", + ["Host"] = "hmacauth3.test", }, }) assert.response(res).has.status(500) @@ -1173,7 +1173,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, authorization = hmacAuth, }, @@ -1197,7 +1197,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = postBody, headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, digest = digest, authorization = hmacAuth, @@ -1222,7 +1222,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = postBody, headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, digest = digest, authorization = hmacAuth, @@ -1247,7 +1247,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = postBody, headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, authorization = hmacAuth, }, @@ -1273,7 +1273,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = "abc", headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, digest = digest, authorization = hmacAuth, @@ -1300,7 +1300,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = postBody, headers = { - ["HOST"] = "hmacauth4.com", + ["HOST"] = "hmacauth4.test", date = date, digest = digest .. "spoofed", authorization = hmacAuth, @@ -1324,7 +1324,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1347,7 +1347,7 @@ for _, strategy in helpers.each_strategy() do path = "/request?name=foo", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1366,7 +1366,7 @@ for _, strategy in helpers.each_strategy() do path = "/request/?name=foo", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1388,7 +1388,7 @@ for _, strategy in helpers.each_strategy() do path = "/request?name=foo", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1407,7 +1407,7 @@ for _, strategy in helpers.each_strategy() do path = "/request/?name=foo", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1431,7 +1431,7 @@ for _, strategy in helpers.each_strategy() do path = escaped_uri, body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1454,7 +1454,7 @@ for _, strategy in helpers.each_strategy() do path = escaped_uri, body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1481,7 +1481,7 @@ for _, strategy in helpers.each_strategy() do path = escaped_uri, body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1506,7 +1506,7 @@ for _, strategy in helpers.each_strategy() do path = "/request?name=foo&name=bar", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1530,7 +1530,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1554,7 +1554,7 @@ for _, strategy in helpers.each_strategy() do path = escaped_uri, body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1580,7 +1580,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1602,7 +1602,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1624,7 +1624,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth5.com", + ["HOST"] = "hmacauth5.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1646,7 +1646,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth6.com", + ["HOST"] = "hmacauth6.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1662,7 +1662,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth6.com", + ["HOST"] = "hmacauth6.test", date = date, ["proxy-authorization"] = "this is no hmac token at all is it?", }, @@ -1683,7 +1683,7 @@ for _, strategy in helpers.each_strategy() do path = "/request", body = {}, headers = { - ["HOST"] = "hmacauth6.com", + ["HOST"] = "hmacauth6.test", date = date, ["proxy-authorization"] = hmacAuth, ["content-md5"] = "md5", @@ -1718,7 +1718,7 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "logical-and.com" }, + hosts = { "logical-and.test" }, protocols = { "http", "https" }, service = service1 } @@ -1750,7 +1750,7 @@ for _, strategy in helpers.each_strategy() do }) local route2 = bp.routes:insert { - hosts = { "logical-or.com" }, + hosts = { "logical-or.test" }, protocols = { "http", "https" }, service = service2 } @@ -1807,7 +1807,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", ["Authorization"] = hmacAuth, ["date"] = hmacDate, @@ -1826,7 +1826,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", }, }) @@ -1838,7 +1838,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["Authorization"] = hmacAuth, ["date"] = hmacDate, }, @@ -1851,7 +1851,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", }, }) assert.response(res).has.status(401) @@ -1866,7 +1866,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", ["Authorization"] = hmacAuth, ["date"] = hmacDate, @@ -1885,7 +1885,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", }, }) @@ -1901,7 +1901,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["Authorization"] = hmacAuth, ["date"] = hmacDate, }, @@ -1918,7 +1918,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", }, }) assert.response(res).has.status(200) diff --git a/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua b/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua index f9eb0f21af19..08e7a6cdcd28 100644 --- a/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua +++ b/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua @@ -21,7 +21,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert { - hosts = { "hmacauth.com" }, + hosts = { "hmacauth.test" }, } bp.plugins:insert { @@ -82,7 +82,7 @@ for _, strategy in helpers.each_strategy() do path = "/requests", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = authorization } @@ -125,7 +125,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = authorization } @@ -155,7 +155,7 @@ for _, strategy in helpers.each_strategy() do path = "/requests", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = authorization } @@ -169,7 +169,7 @@ for _, strategy in helpers.each_strategy() do path = "/requests", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = authorization } @@ -199,7 +199,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = authorization } @@ -216,7 +216,7 @@ for _, strategy in helpers.each_strategy() do path = "/requests", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = authorization } @@ -249,7 +249,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", body = {}, headers = { - ["HOST"] = "hmacauth.com", + ["HOST"] = "hmacauth.test", date = date, authorization = authorization } diff --git a/spec/03-plugins/20-ldap-auth/01-access_spec.lua b/spec/03-plugins/20-ldap-auth/01-access_spec.lua index 9f10529a37ad..bf1cb9f78a04 100644 --- a/spec/03-plugins/20-ldap-auth/01-access_spec.lua +++ b/spec/03-plugins/20-ldap-auth/01-access_spec.lua @@ -47,31 +47,31 @@ for _, ldap_strategy in pairs(ldap_strategies) do }) local route1 = bp.routes:insert { - hosts = { "ldap.com" }, + hosts = { "ldap.test" }, } route2 = bp.routes:insert { - hosts = { "ldap2.com" }, + hosts = { "ldap2.test" }, } local route3 = bp.routes:insert { - hosts = { "ldap3.com" }, + hosts = { "ldap3.test" }, } local route4 = bp.routes:insert { - hosts = { "ldap4.com" }, + hosts = { "ldap4.test" }, } local route5 = bp.routes:insert { - hosts = { "ldap5.com" }, + hosts = { "ldap5.test" }, } bp.routes:insert { - hosts = { "ldap6.com" }, + hosts = { "ldap6.test" }, } local route7 = bp.routes:insert { - hosts = { "ldap7.com" }, + hosts = { "ldap7.test" }, } assert(bp.routes:insert { @@ -207,7 +207,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/get", headers = { - host = "ldap.com" + host = "ldap.test" } }) assert.response(res).has.status(401) @@ -231,7 +231,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/get", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "abcd" } }) @@ -244,7 +244,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/get", headers = { - host = "ldap.com", + host = "ldap.test", ["proxy-authorization"] = "abcd" } }) @@ -257,7 +257,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/get", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " } }) @@ -271,7 +271,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do path = "/request", body = {}, headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " .. ngx.encode_base64("einstein:password"), ["content-type"] = "application/x-www-form-urlencoded", } @@ -296,7 +296,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do path = "/request", body = {}, headers = { - host = "ldap.com", + host = "ldap.test", authorization = "invalidldap " .. ngx.encode_base64("einstein:password"), ["content-type"] = "application/x-www-form-urlencoded", } @@ -308,7 +308,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "POST", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = " ldap " .. ngx.encode_base64("einstein:password") } }) @@ -319,7 +319,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "POST", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "LDAP " .. ngx.encode_base64("einstein:password") } }) @@ -330,7 +330,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " .. ngx.encode_base64("einstein:password") } }) @@ -344,7 +344,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " .. ngx.encode_base64("einstein:") } }) @@ -355,7 +355,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " .. ngx.encode_base64("einstein:e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566e0d91f53c566") } }) @@ -366,7 +366,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " .. ngx.encode_base64("einstein:password:another_password") } }) @@ -377,7 +377,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " .. ngx.encode_base64("einstein:wrong_password") } }) @@ -388,7 +388,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap.com", + host = "ldap.test", authorization = "ldap " .. ngx.encode_base64("einstein:password") } }) @@ -401,7 +401,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap2.com", + host = "ldap2.test", authorization = "ldap " .. ngx.encode_base64("einstein:password") } }) @@ -414,7 +414,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do path = "/request", body = {}, headers = { - host = "ldap5.com", + host = "ldap5.test", authorization = "basic " .. ngx.encode_base64("einstein:password"), ["content-type"] = "application/x-www-form-urlencoded", } @@ -426,7 +426,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/get", headers = { - host = "ldap5.com", + host = "ldap5.test", } }) assert.response(res).has.status(401) @@ -442,7 +442,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do path = "/request", body = {}, headers = { - host = "ldap5.com", + host = "ldap5.test", authorization = "invalidldap " .. ngx.encode_base64("einstein:password"), ["content-type"] = "application/x-www-form-urlencoded", } @@ -454,7 +454,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap6.com", + host = "ldap6.test", authorization = "ldap " .. ngx.encode_base64("einstein:password") } }) @@ -468,7 +468,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap2.com", + host = "ldap2.test", authorization = "ldap " .. ngx.encode_base64("einstein:password") } }) @@ -496,7 +496,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap3.com", + host = "ldap3.test", authorization = "ldap " .. ngx.encode_base64("einstein:password") } }) @@ -512,7 +512,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap3.com" + host = "ldap3.test" } }) assert.response(res).has.status(200) @@ -527,7 +527,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - host = "ldap7.com" + host = "ldap7.test" } }) assert.response(res).has.status(200) @@ -541,7 +541,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "ldap4.com" + ["Host"] = "ldap4.test" } }) assert.response(res).has.status(500) @@ -569,7 +569,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do }) local route1 = bp.routes:insert { - hosts = { "logical-and.com" }, + hosts = { "logical-and.test" }, service = service1, } @@ -603,7 +603,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do }) local route2 = bp.routes:insert { - hosts = { "logical-or.com" }, + hosts = { "logical-or.test" }, service = service2 } @@ -657,7 +657,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", ["Authorization"] = "ldap " .. ngx.encode_base64("einstein:password"), } @@ -671,7 +671,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", } }) @@ -683,7 +683,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["Authorization"] = "ldap " .. ngx.encode_base64("einstein:password"), } }) @@ -695,7 +695,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", } }) assert.response(res).has.status(401) @@ -710,7 +710,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", ["Authorization"] = "ldap " .. ngx.encode_base64("einstein:password"), } @@ -729,7 +729,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", } }) @@ -747,7 +747,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["Authorization"] = "ldap " .. ngx.encode_base64("einstein:password"), } }) @@ -762,7 +762,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", } }) assert.response(res).has.status(200) diff --git a/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua b/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua index b47efc438f1a..49f9dbed0485 100644 --- a/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua +++ b/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua @@ -26,7 +26,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do }) local route = bp.routes:insert { - hosts = { "ldapauth.com" }, + hosts = { "ldapauth.test" }, } plugin = bp.plugins:insert { @@ -86,7 +86,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do path = "/requests", body = {}, headers = { - ["HOST"] = "ldapauth.com", + ["HOST"] = "ldapauth.test", authorization = "ldap " .. ngx.encode_base64("einstein:wrongpassword") } }) @@ -112,7 +112,7 @@ for _, ldap_strategy in pairs(ldap_strategies) do path = "/requests", body = {}, headers = { - ["HOST"] = "ldapauth.com", + ["HOST"] = "ldapauth.test", authorization = "ldap " .. ngx.encode_base64("einstein:password") } }) diff --git a/spec/03-plugins/21-bot-detection/01-access_spec.lua b/spec/03-plugins/21-bot-detection/01-access_spec.lua index bead9c2c6f61..dbd9a8f9ac10 100644 --- a/spec/03-plugins/21-bot-detection/01-access_spec.lua +++ b/spec/03-plugins/21-bot-detection/01-access_spec.lua @@ -17,15 +17,15 @@ for _, strategy in helpers.each_strategy() do }) local route1 = bp.routes:insert { - hosts = { "bot.com" }, + hosts = { "bot.test" }, } local route2 = bp.routes:insert { - hosts = { "bot2.com" }, + hosts = { "bot2.test" }, } local route3 = bp.routes:insert { - hosts = { "bot3.com" }, + hosts = { "bot3.test" }, } local grpc_service = bp.services:insert { @@ -36,21 +36,21 @@ for _, strategy in helpers.each_strategy() do local route_grpc1 = assert(bp.routes:insert { protocols = { "grpc" }, paths = { "/hello.HelloService/" }, - hosts = { "bot-grpc1.com" }, + hosts = { "bot-grpc1.test" }, service = grpc_service, }) local route_grpc2 = assert(bp.routes:insert { protocols = { "grpc" }, paths = { "/hello.HelloService/" }, - hosts = { "bot-grpc2.com" }, + hosts = { "bot-grpc2.test" }, service = grpc_service, }) local route_grpc3 = assert(bp.routes:insert { protocols = { "grpc" }, paths = { "/hello.HelloService/" }, - hosts = { "bot-grpc3.com" }, + hosts = { "bot-grpc3.test" }, service = grpc_service, }) @@ -122,7 +122,7 @@ for _, strategy in helpers.each_strategy() do local res = assert( proxy_client:send { method = "GET", path = "/request", - headers = { host = "bot.com" } + headers = { host = "bot.test" } }) assert.response(res).has.status(200) @@ -130,7 +130,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36" } }) @@ -140,7 +140,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = HELLOWORLD } }) @@ -150,7 +150,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "curl/7.43.0" } }) @@ -161,7 +161,7 @@ for _, strategy in helpers.each_strategy() do local ok = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc1.com", + ["-authority"] = "bot-grpc1.test", ["-v"] = true, }, } @@ -170,7 +170,7 @@ for _, strategy in helpers.each_strategy() do local ok = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc1.com", + ["-authority"] = "bot-grpc1.test", ["-user-agent"] = "'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'", ["-v"] = true, }, @@ -180,7 +180,7 @@ for _, strategy in helpers.each_strategy() do local ok = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc1.com", + ["-authority"] = "bot-grpc1.test", ["-user-agent"] = HELLOWORLD, ["-v"] = true, }, @@ -190,7 +190,7 @@ for _, strategy in helpers.each_strategy() do local ok = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc1.com", + ["-authority"] = "bot-grpc1.test", ["-user-agent"] = "curl/7.43.0", ["-v"] = true, }, @@ -203,7 +203,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "Googlebot/2.1 (+http://www.google.com/bot.html)" }, }) @@ -213,7 +213,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = FACEBOOK, } }) @@ -224,7 +224,7 @@ for _, strategy in helpers.each_strategy() do local ok, err = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc1.com", + ["-authority"] = "bot-grpc1.test", ["-user-agent"] = "'Googlebot/2.1 (+http://www.google.com/bot.html)'", ["-v"] = true, }, @@ -235,7 +235,7 @@ for _, strategy in helpers.each_strategy() do local ok, err = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc1.com", + ["-authority"] = "bot-grpc1.test", ["-user-agent"] = FACEBOOK, ["-v"] = true, }, @@ -249,7 +249,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot2.com", + host = "bot2.test", ["user-agent"] = HELLOWORLD, } }) @@ -260,7 +260,7 @@ for _, strategy in helpers.each_strategy() do local ok, err = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc2.com", + ["-authority"] = "bot-grpc2.test", ["-user-agent"] = HELLOWORLD, ["-v"] = true, }, @@ -274,7 +274,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot3.com", + host = "bot3.test", ["user-agent"] = FACEBOOK } }) @@ -285,7 +285,7 @@ for _, strategy in helpers.each_strategy() do local ok = helpers.proxy_client_grpc(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "bot-grpc3.com", + ["-authority"] = "bot-grpc3.test", ["-user-agent"] = FACEBOOK, ["-v"] = true, }, @@ -305,7 +305,7 @@ for _, strategy in helpers.each_strategy() do }) bp.routes:insert { - hosts = { "bot.com" }, + hosts = { "bot.test" }, } bp.plugins:insert { @@ -338,7 +338,7 @@ for _, strategy in helpers.each_strategy() do local res = assert(proxy_client:send { method = "GET", path = "/request", - headers = { host = "bot.com" } + headers = { host = "bot.test" } }) assert.response(res).has.status(200) @@ -346,7 +346,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36" } }) @@ -356,7 +356,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = HELLOWORLD } }) @@ -366,7 +366,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "curl/7.43.0" } }) diff --git a/spec/03-plugins/21-bot-detection/02-invalidations_spec.lua b/spec/03-plugins/21-bot-detection/02-invalidations_spec.lua index 54794d98a75b..a24fae154478 100644 --- a/spec/03-plugins/21-bot-detection/02-invalidations_spec.lua +++ b/spec/03-plugins/21-bot-detection/02-invalidations_spec.lua @@ -14,7 +14,7 @@ for _, strategy in helpers.each_strategy() do }) local route = bp.routes:insert { - hosts = { "bot.com" }, + hosts = { "bot.test" }, } plugin = bp.plugins:insert { @@ -53,7 +53,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "helloworld" } }) @@ -77,7 +77,7 @@ for _, strategy in helpers.each_strategy() do mehod = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "helloworld", }, }) @@ -92,7 +92,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "facebookexternalhit/1.1" } }) @@ -116,7 +116,7 @@ for _, strategy in helpers.each_strategy() do mehod = "GET", path = "/request", headers = { - host = "bot.com", + host = "bot.test", ["user-agent"] = "facebookexternalhit/1.1" } }) diff --git a/spec/03-plugins/21-bot-detection/03-api_spec.lua b/spec/03-plugins/21-bot-detection/03-api_spec.lua index 99c3e3134f29..e4b87707dd69 100644 --- a/spec/03-plugins/21-bot-detection/03-api_spec.lua +++ b/spec/03-plugins/21-bot-detection/03-api_spec.lua @@ -19,11 +19,11 @@ for _, strategy in helpers.each_strategy() do }) route1 = bp.routes:insert { - hosts = { "bot1.com" }, + hosts = { "bot1.test" }, } route2 = bp.routes:insert { - hosts = { "bot2.com" }, + hosts = { "bot2.test" }, } assert(helpers.start_kong({ diff --git a/spec/03-plugins/23-rate-limiting/03-api_spec.lua b/spec/03-plugins/23-rate-limiting/03-api_spec.lua index 9dd48552d1a1..1e862bdc3a7f 100644 --- a/spec/03-plugins/23-rate-limiting/03-api_spec.lua +++ b/spec/03-plugins/23-rate-limiting/03-api_spec.lua @@ -31,13 +31,13 @@ for _, strategy in helpers.each_strategy() do local service = bp.services:insert() route = bp.routes:insert { - hosts = { "test1.com" }, + hosts = { "test1.test" }, protocols = { "http", "https" }, service = service } route2 = bp.routes:insert { - hosts = { "test2.com" }, + hosts = { "test2.test" }, protocols = { "http", "https" }, service = service } diff --git a/spec/03-plugins/23-rate-limiting/05-integration_spec.lua b/spec/03-plugins/23-rate-limiting/05-integration_spec.lua index 8b00ea67e780..4402c451325d 100644 --- a/spec/03-plugins/23-rate-limiting/05-integration_spec.lua +++ b/spec/03-plugins/23-rate-limiting/05-integration_spec.lua @@ -113,7 +113,7 @@ describe("Plugin: rate-limiting (integration)", function() }) local route1 = assert(bp.routes:insert { - hosts = { "redistest1.com" }, + hosts = { "redistest1.test" }, }) assert(bp.plugins:insert { name = "rate-limiting", @@ -134,7 +134,7 @@ describe("Plugin: rate-limiting (integration)", function() }) local route2 = assert(bp.routes:insert { - hosts = { "redistest2.com" }, + hosts = { "redistest2.test" }, }) assert(bp.plugins:insert { name = "rate-limiting", @@ -155,7 +155,7 @@ describe("Plugin: rate-limiting (integration)", function() if red_version >= version("6.0.0") then local route3 = assert(bp.routes:insert { - hosts = { "redistest3.com" }, + hosts = { "redistest3.test" }, }) assert(bp.plugins:insert { name = "rate-limiting", @@ -177,7 +177,7 @@ describe("Plugin: rate-limiting (integration)", function() }) local route4 = assert(bp.routes:insert { - hosts = { "redistest4.com" }, + hosts = { "redistest4.test" }, }) assert(bp.plugins:insert { name = "rate-limiting", @@ -233,7 +233,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest1.com" + ["Host"] = "redistest1.test" } }) assert.res_status(200, res) @@ -263,7 +263,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest2.com" + ["Host"] = "redistest2.test" } }) assert.res_status(200, res) @@ -294,7 +294,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest3.com" + ["Host"] = "redistest3.test" } }) assert.res_status(200, res) @@ -328,7 +328,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest3.com" + ["Host"] = "redistest3.test" } }) assert.res_status(200, res) @@ -344,7 +344,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest4.com" + ["Host"] = "redistest4.test" } }) assert.res_status(500, res) diff --git a/spec/03-plugins/24-response-rate-limiting/04-access_spec.lua b/spec/03-plugins/24-response-rate-limiting/04-access_spec.lua index 91cd9e8ecec5..a697444a19cf 100644 --- a/spec/03-plugins/24-response-rate-limiting/04-access_spec.lua +++ b/spec/03-plugins/24-response-rate-limiting/04-access_spec.lua @@ -141,7 +141,7 @@ for _, strategy in helpers.each_strategy() do } local route1 = bp.routes:insert { - hosts = { "test1.com" }, + hosts = { "test1.test" }, protocols = { "http", "https" }, } @@ -162,7 +162,7 @@ for _, strategy in helpers.each_strategy() do }) local route2 = bp.routes:insert { - hosts = { "test2.com" }, + hosts = { "test2.test" }, protocols = { "http", "https" }, } @@ -184,7 +184,7 @@ for _, strategy in helpers.each_strategy() do }) local route3 = bp.routes:insert { - hosts = { "test3.com" }, + hosts = { "test3.test" }, protocols = { "http", "https" }, } @@ -223,7 +223,7 @@ for _, strategy in helpers.each_strategy() do }) local route4 = bp.routes:insert { - hosts = { "test4.com" }, + hosts = { "test4.test" }, protocols = { "http", "https" }, } @@ -247,7 +247,7 @@ for _, strategy in helpers.each_strategy() do }) local route7 = bp.routes:insert { - hosts = { "test7.com" }, + hosts = { "test7.test" }, protocols = { "http", "https" }, } @@ -277,7 +277,7 @@ for _, strategy in helpers.each_strategy() do }) local route8 = bp.routes:insert { - hosts = { "test8.com" }, + hosts = { "test8.test" }, protocols = { "http", "https" }, } @@ -299,7 +299,7 @@ for _, strategy in helpers.each_strategy() do }) local route9 = bp.routes:insert { - hosts = { "test9.com" }, + hosts = { "test9.test" }, protocols = { "http", "https" }, } @@ -323,11 +323,11 @@ for _, strategy in helpers.each_strategy() do local service10 = bp.services:insert() bp.routes:insert { - hosts = { "test-service1.com" }, + hosts = { "test-service1.test" }, service = service10, } bp.routes:insert { - hosts = { "test-service2.com" }, + hosts = { "test-service2.test" }, service = service10, } @@ -392,7 +392,7 @@ for _, strategy in helpers.each_strategy() do local n = math.floor(ITERATIONS / 2) for _ = 1, n do local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "test1.com" }, + headers = { Host = "test1.test" }, }) assert.res_status(200, res) end @@ -400,7 +400,7 @@ for _, strategy in helpers.each_strategy() do ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "test1.com" }, + headers = { Host = "test1.test" }, }) assert.res_status(200, res) assert.equal(ITERATIONS, tonumber(res.headers["x-ratelimit-limit-video-second"])) @@ -427,7 +427,7 @@ for _, strategy in helpers.each_strategy() do end) it("blocks if exceeding limit", function() - test_limit("/response-headers?x-kong-limit=video=1", "test1.com") + test_limit("/response-headers?x-kong-limit=video=1", "test1.test") end) it("counts against the same service register from different routes", function() @@ -435,14 +435,14 @@ for _, strategy in helpers.each_strategy() do local n = math.floor(ITERATIONS / 2) for i = 1, n do local res = proxy_client():get("/response-headers?x-kong-limit=video=1, test=" .. ITERATIONS, { - headers = { Host = "test-service1.com" }, + headers = { Host = "test-service1.test" }, }) assert.res_status(200, res) end for i = n+1, ITERATIONS do local res = proxy_client():get("/response-headers?x-kong-limit=video=1, test=" .. ITERATIONS, { - headers = { Host = "test-service2.com" }, + headers = { Host = "test-service2.test" }, }) assert.res_status(200, res) end @@ -451,7 +451,7 @@ for _, strategy in helpers.each_strategy() do -- Additional request, while limit is ITERATIONS/second local res = proxy_client():get("/response-headers?x-kong-limit=video=1, test=" .. ITERATIONS, { - headers = { Host = "test-service1.com" }, + headers = { Host = "test-service1.test" }, }) assert.res_status(429, res) end) @@ -465,7 +465,7 @@ for _, strategy in helpers.each_strategy() do ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit end res = proxy_client():get("/response-headers?x-kong-limit=video=2, image=1", { - headers = { Host = "test2.com" }, + headers = { Host = "test2.test" }, }) assert.res_status(200, res) end @@ -479,7 +479,7 @@ for _, strategy in helpers.each_strategy() do for i = n+1, ITERATIONS do res = proxy_client():get("/response-headers?x-kong-limit=video=1, image=1", { - headers = { Host = "test2.com" }, + headers = { Host = "test2.test" }, }) assert.res_status(200, res) end @@ -487,7 +487,7 @@ for _, strategy in helpers.each_strategy() do ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit local res = proxy_client():get("/response-headers?x-kong-limit=video=1, image=1", { - headers = { Host = "test2.com" }, + headers = { Host = "test2.test" }, }) assert.equal(0, tonumber(res.headers["x-ratelimit-remaining-image-second"])) @@ -500,11 +500,11 @@ for _, strategy in helpers.each_strategy() do describe("With authentication", function() describe("API-specific plugin", function() it("blocks if exceeding limit and a per consumer & route setting", function() - test_limit("/response-headers?apikey=apikey123&x-kong-limit=video=1", "test3.com", ITERATIONS - 2) + test_limit("/response-headers?apikey=apikey123&x-kong-limit=video=1", "test3.test", ITERATIONS - 2) end) it("blocks if exceeding limit and a per route setting", function() - test_limit("/response-headers?apikey=apikey124&x-kong-limit=video=1", "test3.com", ITERATIONS - 3) + test_limit("/response-headers?apikey=apikey124&x-kong-limit=video=1", "test3.test", ITERATIONS - 3) end) end) end) @@ -513,7 +513,7 @@ for _, strategy in helpers.each_strategy() do it("should append the headers with multiple limits", function() wait() local res = proxy_client():get("/get", { - headers = { Host = "test8.com" }, + headers = { Host = "test8.test" }, }) local json = cjson.decode(assert.res_status(200, res)) assert.equal(ITERATIONS-1, tonumber(json.headers["x-ratelimit-remaining-image"])) @@ -521,14 +521,14 @@ for _, strategy in helpers.each_strategy() do -- Actually consume the limits local res = proxy_client():get("/response-headers?x-kong-limit=video=2, image=1", { - headers = { Host = "test8.com" }, + headers = { Host = "test8.test" }, }) assert.res_status(200, res) ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit local res = proxy_client():get("/get", { - headers = { Host = "test8.com" }, + headers = { Host = "test8.test" }, }) local body = cjson.decode(assert.res_status(200, res)) assert.equal(ITERATIONS-2, tonumber(body.headers["x-ratelimit-remaining-image"])) @@ -539,19 +539,19 @@ for _, strategy in helpers.each_strategy() do wait() for _ = 1, ITERATIONS do local res = proxy_client():get("/response-headers?x-kong-limit=video%3D2&x-kong-limit=image%3D1", { - headers = { Host = "test4.com" }, + headers = { Host = "test4.test" }, }) assert.res_status(200, res) end proxy_client():get("/response-headers?x-kong-limit=video%3D1", { - headers = { Host = "test4.com" }, + headers = { Host = "test4.test" }, }) ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit local res = proxy_client():get("/response-headers?x-kong-limit=video%3D2&x-kong-limit=image%3D1", { - headers = { Host = "test4.com" }, + headers = { Host = "test4.test" }, }) assert.res_status(429, res) @@ -563,14 +563,14 @@ for _, strategy in helpers.each_strategy() do it("should block on first violation", function() wait() local res = proxy_client():get("/response-headers?x-kong-limit=video=2, image=4", { - headers = { Host = "test7.com" }, + headers = { Host = "test7.test" }, }) assert.res_status(200, res) ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit local res = proxy_client():get("/response-headers?x-kong-limit=video=2", { - headers = { Host = "test7.com" }, + headers = { Host = "test7.test" }, }) local body = assert.res_status(429, res) local json = cjson.decode(body) @@ -581,7 +581,7 @@ for _, strategy in helpers.each_strategy() do it("does not send rate-limit headers when hide_client_headers==true", function() wait() local res = proxy_client():get("/status/200", { - headers = { Host = "test9.com" }, + headers = { Host = "test9.test" }, }) assert.res_status(200, res) @@ -597,7 +597,7 @@ for _, strategy in helpers.each_strategy() do local bp = init_db(strategy, policy) local route = bp.routes:insert { - hosts = { "expire1.com" }, + hosts = { "expire1.test" }, protocols = { "http", "https" }, } @@ -630,7 +630,7 @@ for _, strategy in helpers.each_strategy() do it("expires a counter", function() wait() local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "expire1.com" }, + headers = { Host = "expire1.test" }, }) ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit @@ -643,7 +643,7 @@ for _, strategy in helpers.each_strategy() do wait() -- Wait for counter to expire local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "expire1.com" }, + headers = { Host = "expire1.test" }, }) ngx.sleep(SLEEP_TIME) -- Wait for async timer to increment the limit @@ -688,7 +688,7 @@ for _, strategy in helpers.each_strategy() do }) for i = 1, ITERATIONS do - bp.routes:insert({ hosts = { fmt("test%d.com", i) } }) + bp.routes:insert({ hosts = { fmt("test%d.test", i) } }) end assert(helpers.start_kong({ @@ -703,7 +703,7 @@ for _, strategy in helpers.each_strategy() do end) it("blocks when the consumer exceeds their quota, no matter what service/route used", function() - test_limit("/response-headers?apikey=apikey126&x-kong-limit=video=1", "test%d.com") + test_limit("/response-headers?apikey=apikey126&x-kong-limit=video=1", "test%d.test") end) end) @@ -729,7 +729,7 @@ for _, strategy in helpers.each_strategy() do }) for i = 1, ITERATIONS do - bp.routes:insert({ hosts = { fmt("test%d.com", i) } }) + bp.routes:insert({ hosts = { fmt("test%d.test", i) } }) end assert(helpers.start_kong({ @@ -751,7 +751,7 @@ for _, strategy in helpers.each_strategy() do wait() for i = 1, ITERATIONS do local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = fmt("test%d.com", i) }, + headers = { Host = fmt("test%d.test", i) }, }) assert.res_status(200, res) end @@ -760,7 +760,7 @@ for _, strategy in helpers.each_strategy() do -- last query, while limit is ITERATIONS/second local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "test1.com" }, + headers = { Host = "test1.test" }, }) assert.res_status(429, res) assert.equal(0, tonumber(res.headers["x-ratelimit-remaining-video-second"])) @@ -778,7 +778,7 @@ for _, strategy in helpers.each_strategy() do bp, db = init_db(strategy, policy) local route1 = bp.routes:insert { - hosts = { "failtest1.com" }, + hosts = { "failtest1.test" }, } bp.response_ratelimiting_plugins:insert { @@ -797,7 +797,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "failtest2.com" }, + hosts = { "failtest2.test" }, } bp.response_ratelimiting_plugins:insert { @@ -830,7 +830,7 @@ for _, strategy in helpers.each_strategy() do it("does not work if an error occurs", function() local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "failtest1.com" }, + headers = { Host = "failtest1.test" }, }) assert.res_status(200, res) assert.equal(ITERATIONS, tonumber(res.headers["x-ratelimit-limit-video-second"])) @@ -844,7 +844,7 @@ for _, strategy in helpers.each_strategy() do -- Make another request local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "failtest1.com" }, + headers = { Host = "failtest1.test" }, }) local body = assert.res_status(500, res) local json = cjson.decode(body) @@ -853,7 +853,7 @@ for _, strategy in helpers.each_strategy() do it("keeps working if an error occurs", function() local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "failtest2.com" }, + headers = { Host = "failtest2.test" }, }) assert.res_status(200, res) assert.equal(ITERATIONS, tonumber(res.headers["x-ratelimit-limit-video-second"])) @@ -867,7 +867,7 @@ for _, strategy in helpers.each_strategy() do -- Make another request local res = proxy_client():get("/response-headers?x-kong-limit=video=1", { - headers = { Host = "failtest2.com" }, + headers = { Host = "failtest2.test" }, }) assert.res_status(200, res) assert.is_nil(res.headers["x-ratelimit-limit-video-second"]) @@ -882,7 +882,7 @@ for _, strategy in helpers.each_strategy() do local bp = init_db(strategy, policy) local route1 = bp.routes:insert { - hosts = { "failtest3.com" }, + hosts = { "failtest3.test" }, protocols = { "http", "https" }, } @@ -897,7 +897,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "failtest4.com" }, + hosts = { "failtest4.test" }, protocols = { "http", "https" }, } @@ -927,7 +927,7 @@ for _, strategy in helpers.each_strategy() do it("does not work if an error occurs", function() -- Make another request local res = proxy_client():get("/status/200", { - headers = { Host = "failtest3.com" }, + headers = { Host = "failtest3.test" }, }) local body = assert.res_status(500, res) local json = cjson.decode(body) @@ -936,7 +936,7 @@ for _, strategy in helpers.each_strategy() do it("keeps working if an error occurs", function() -- Make another request local res = proxy_client():get("/status/200", { - headers = { Host = "failtest4.com" }, + headers = { Host = "failtest4.test" }, }) assert.res_status(200, res) assert.falsy(res.headers["x-ratelimit-limit-video-second"]) diff --git a/spec/03-plugins/24-response-rate-limiting/05-integration_spec.lua b/spec/03-plugins/24-response-rate-limiting/05-integration_spec.lua index ef7c712209fd..3c48b76a3c8d 100644 --- a/spec/03-plugins/24-response-rate-limiting/05-integration_spec.lua +++ b/spec/03-plugins/24-response-rate-limiting/05-integration_spec.lua @@ -115,7 +115,7 @@ describe("Plugin: rate-limiting (integration)", function() }) local route1 = assert(bp.routes:insert { - hosts = { "redistest1.com" }, + hosts = { "redistest1.test" }, }) assert(bp.plugins:insert { name = "response-ratelimiting", @@ -135,7 +135,7 @@ describe("Plugin: rate-limiting (integration)", function() }) local route2 = assert(bp.routes:insert { - hosts = { "redistest2.com" }, + hosts = { "redistest2.test" }, }) assert(bp.plugins:insert { name = "response-ratelimiting", @@ -156,7 +156,7 @@ describe("Plugin: rate-limiting (integration)", function() if red_version >= version("6.0.0") then local route3 = assert(bp.routes:insert { - hosts = { "redistest3.com" }, + hosts = { "redistest3.test" }, }) assert(bp.plugins:insert { name = "response-ratelimiting", @@ -178,7 +178,7 @@ describe("Plugin: rate-limiting (integration)", function() }) local route4 = assert(bp.routes:insert { - hosts = { "redistest4.com" }, + hosts = { "redistest4.test" }, }) assert(bp.plugins:insert { name = "response-ratelimiting", @@ -233,7 +233,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/response-headers?x-kong-limit=video=1", headers = { - ["Host"] = "redistest1.com" + ["Host"] = "redistest1.test" } }) assert.res_status(200, res) @@ -265,7 +265,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/response-headers?x-kong-limit=video=1", headers = { - ["Host"] = "redistest2.com" + ["Host"] = "redistest2.test" } }) assert.res_status(200, res) @@ -298,7 +298,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/response-headers?x-kong-limit=video=1", headers = { - ["Host"] = "redistest3.com" + ["Host"] = "redistest3.test" } }) assert.res_status(200, res) @@ -334,7 +334,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest3.com" + ["Host"] = "redistest3.test" } }) assert.res_status(200, res) @@ -350,7 +350,7 @@ describe("Plugin: rate-limiting (integration)", function() method = "GET", path = "/status/200", headers = { - ["Host"] = "redistest4.com" + ["Host"] = "redistest4.test" } }) assert.res_status(500, res) diff --git a/spec/03-plugins/25-oauth2/02-api_spec.lua b/spec/03-plugins/25-oauth2/02-api_spec.lua index 46e5cb6ea154..14a46dfdb909 100644 --- a/spec/03-plugins/25-oauth2/02-api_spec.lua +++ b/spec/03-plugins/25-oauth2/02-api_spec.lua @@ -42,7 +42,7 @@ for _, strategy in helpers.each_strategy() do local service lazy_setup(function() - service = admin_api.services:insert({ host = "oauth2_token.com" }) + service = admin_api.services:insert({ host = "oauth2_token.test" }) consumer = admin_api.consumers:insert({ username = "bob" }) admin_api.consumers:insert({ username = "sally" }) end) @@ -59,7 +59,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, }, headers = { ["Content-Type"] = "application/json" @@ -68,7 +68,7 @@ for _, strategy in helpers.each_strategy() do local body = cjson.decode(assert.res_status(201, res)) assert.equal(consumer.id, body.consumer.id) assert.equal("Test APP", body.name) - assert.same({ "http://google.com/" }, body.redirect_uris) + assert.same({ "http://google.test/" }, body.redirect_uris) res = assert(admin_client:send { method = "POST", @@ -91,7 +91,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Tags APP", - redirect_uris = { "http://example.com/" }, + redirect_uris = { "http://example.test/" }, tags = { "tag1", "tag2" }, }, headers = { @@ -110,7 +110,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/", "http://google.org/" }, + redirect_uris = { "http://google.test/", "http://google.example/" }, }, headers = { ["Content-Type"] = "application/json" @@ -119,7 +119,7 @@ for _, strategy in helpers.each_strategy() do local body = cjson.decode(assert.res_status(201, res)) assert.equal(consumer.id, body.consumer.id) assert.equal("Test APP", body.name) - assert.same({ "http://google.com/", "http://google.org/" }, body.redirect_uris) + assert.same({ "http://google.test/", "http://google.example/" }, body.redirect_uris) end) it("creates multiple oauth2 credentials with the same client_secret", function() local res = assert(admin_client:send { @@ -127,7 +127,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, client_secret = "secret123", }, headers = { @@ -140,7 +140,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/sally/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, client_secret = "secret123", }, headers = { @@ -156,7 +156,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, hash_secret = true, }, headers = { @@ -173,7 +173,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, client_secret = "test", hash_secret = true, }, @@ -193,7 +193,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, }, headers = { ["Content-Type"] = "application/json" @@ -209,7 +209,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, client_secret = "test", }, headers = { @@ -257,7 +257,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = { "http://test.com/#with-fragment" }, + redirect_uris = { "http://test.test/#with-fragment" }, }, headers = { ["Content-Type"] = "application/json" @@ -265,14 +265,14 @@ for _, strategy in helpers.each_strategy() do }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uris = { "fragment not allowed in 'http://test.com/#with-fragment'" } }, json.fields) + assert.same({ redirect_uris = { "fragment not allowed in 'http://test.test/#with-fragment'" } }, json.fields) local res = assert(admin_client:send { method = "POST", path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = {"http://valid.com", "not-valid"} + redirect_uris = {"http://valid.test", "not-valid"} }, headers = { ["Content-Type"] = "application/json" @@ -287,7 +287,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2", body = { name = "Test APP", - redirect_uris = {"http://valid.com", "http://test.com/#with-fragment"} + redirect_uris = {"http://valid.test", "http://test.test/#with-fragment"} }, headers = { ["Content-Type"] = "application/json" @@ -297,7 +297,7 @@ for _, strategy in helpers.each_strategy() do local json = cjson.decode(body) assert.same({ redirect_uris = { ngx.null, - "fragment not allowed in 'http://test.com/#with-fragment'" + "fragment not allowed in 'http://test.test/#with-fragment'" } }, json.fields) end) end) @@ -310,7 +310,7 @@ for _, strategy in helpers.each_strategy() do path = "/consumers/bob/oauth2/client_one", body = { name = "Test APP", - redirect_uris = { "http://google.com/" }, + redirect_uris = { "http://google.test/" }, }, headers = { ["Content-Type"] = "application/json" @@ -320,7 +320,7 @@ for _, strategy in helpers.each_strategy() do assert.equal(consumer.id, body.consumer.id) assert.equal("Test APP", body.name) assert.equal("client_one", body.client_id) - assert.same({ "http://google.com/" }, body.redirect_uris) + assert.same({ "http://google.test/" }, body.redirect_uris) local res = assert(admin_client:send { method = "PUT", @@ -393,7 +393,7 @@ for _, strategy in helpers.each_strategy() do local service lazy_setup(function() - service = admin_api.services:insert({ host = "oauth2_token.com" }) + service = admin_api.services:insert({ host = "oauth2_token.test" }) consumer = admin_api.consumers:insert({ username = "bob" }) end) @@ -593,7 +593,7 @@ for _, strategy in helpers.each_strategy() do local service lazy_setup(function() - service = admin_api.services:insert({ host = "oauth2_token.com" }) + service = admin_api.services:insert({ host = "oauth2_token.test" }) consumer = admin_api.consumers:insert({ username = "bob" }) end) @@ -647,7 +647,7 @@ for _, strategy in helpers.each_strategy() do local service lazy_setup(function() - service = admin_api.services:insert({ host = "oauth2_token.com" }) + service = admin_api.services:insert({ host = "oauth2_token.test" }) consumer = admin_api.consumers:insert({ username = "bob" }) end) @@ -703,7 +703,7 @@ for _, strategy in helpers.each_strategy() do local service lazy_setup(function() - service = admin_api.services:insert({ host = "oauth2_token.com" }) + service = admin_api.services:insert({ host = "oauth2_token.test" }) consumer = admin_api.consumers:insert({ username = "bob" }) oauth2_credential = admin_api.oauth2_credentials:insert { name = "Test APP", @@ -764,7 +764,7 @@ for _, strategy in helpers.each_strategy() do local service lazy_setup(function() - service = admin_api.services:insert({ host = "oauth2_token.com" }) + service = admin_api.services:insert({ host = "oauth2_token.test" }) consumer = admin_api.consumers:insert({ username = "bob" }) oauth2_credential = admin_api.oauth2_credentials:insert { name = "Test APP", @@ -804,7 +804,7 @@ for _, strategy in helpers.each_strategy() do local service lazy_setup(function() - service = admin_api.services:insert({ host = "oauth2_token.com" }) + service = admin_api.services:insert({ host = "oauth2_token.test" }) consumer = admin_api.consumers:insert({ username = "bob" }) oauth2_credential = admin_api.oauth2_credentials:insert { name = "Test APP", diff --git a/spec/03-plugins/25-oauth2/03-access_spec.lua b/spec/03-plugins/25-oauth2/03-access_spec.lua index cde494c43060..48e1cf018a28 100644 --- a/spec/03-plugins/25-oauth2/03-access_spec.lua +++ b/spec/03-plugins/25-oauth2/03-access_spec.lua @@ -48,7 +48,7 @@ local function provision_code(host, extra_headers, client_id, code_challenge) path = "/oauth2/authorize", body = body, headers = kong.table.merge({ - ["Host"] = host or "oauth2.com", + ["Host"] = host or "oauth2.test", ["Content-Type"] = "application/json" }, extra_headers) }) @@ -57,7 +57,7 @@ local function provision_code(host, extra_headers, client_id, code_challenge) request_client:close() if body.redirect_uri then - local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -85,7 +85,7 @@ local function provision_token(host, extra_headers, client_id, client_secret, co path = "/oauth2/token", body = body, headers = kong.table.merge({ - ["Host"] = host or "oauth2.com", + ["Host"] = host or "oauth2.test", ["Content-Type"] = "application/json" }, extra_headers) }) @@ -110,7 +110,7 @@ local function refresh_token(host, refresh_token) grant_type = "refresh_token" }, headers = { - ["Host"] = host or "oauth2.com", + ["Host"] = host or "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -187,7 +187,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_id = "clientid123", client_secret = "secret123", hash_secret = true, - redirect_uris = { "http://google.com/kong" }, + redirect_uris = { "http://google.test/kong" }, name = "testapp", consumer = { id = consumer.id }, } @@ -195,7 +195,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() admin_api.oauth2_credentials:insert { client_id = "clientid789", client_secret = "secret789", - redirect_uris = { "http://google.com/kong?foo=bar&code=123" }, + redirect_uris = { "http://google.test/kong?foo=bar&code=123" }, name = "testapp2", consumer = { id = consumer.id }, } @@ -204,7 +204,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_id = "clientid333", client_secret = "secret333", hash_secret = true, - redirect_uris = { "http://google.com/kong" }, + redirect_uris = { "http://google.test/kong" }, name = "testapp3", consumer = { id = consumer.id }, } @@ -212,7 +212,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() admin_api.oauth2_credentials:insert { client_id = "clientid456", client_secret = "secret456", - redirect_uris = { "http://one.com/one/", "http://two.com/two" }, + redirect_uris = { "http://one.test/one/", "http://two.test/two" }, name = "testapp3", consumer = { id = consumer.id }, } @@ -221,7 +221,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_id = "clientid1011", client_secret = "secret1011", hash_secret = true, - redirect_uris = { "http://google.com/kong", }, + redirect_uris = { "http://google.test/kong", }, name = "testapp31", consumer = { id = consumer.id }, } @@ -237,7 +237,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() admin_api.oauth2_credentials:insert { client_id = "clientid11211", client_secret = "secret11211", - redirect_uris = { "http://google.com/kong", }, + redirect_uris = { "http://google.test/kong", }, name = "testapp50", client_type = "public", consumer = { id = consumer.id }, @@ -269,13 +269,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() local route1 = assert(admin_api.routes:insert({ - hosts = { "oauth2.com" }, + hosts = { "oauth2.test" }, protocols = { "http", "https" }, service = service1, })) local route2 = assert(admin_api.routes:insert({ - hosts = { "example-path.com" }, + hosts = { "example-path.test" }, protocols = { "http", "https" }, service = service2, })) @@ -287,121 +287,121 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() })) local route3 = assert(admin_api.routes:insert({ - hosts = { "oauth2_3.com" }, + hosts = { "oauth2_3.test" }, protocols = { "http", "https" }, service = service3, })) local route4 = assert(admin_api.routes:insert({ - hosts = { "oauth2_4.com" }, + hosts = { "oauth2_4.test" }, protocols = { "http", "https" }, service = service4, })) local route5 = assert(admin_api.routes:insert({ - hosts = { "oauth2_5.com" }, + hosts = { "oauth2_5.test" }, protocols = { "http", "https" }, service = service5, })) local route6 = assert(admin_api.routes:insert({ - hosts = { "oauth2_6.com" }, + hosts = { "oauth2_6.test" }, protocols = { "http", "https" }, service = service6, })) local route7 = assert(admin_api.routes:insert({ - hosts = { "oauth2_7.com" }, + hosts = { "oauth2_7.test" }, protocols = { "http", "https" }, service = service7, })) local route8 = assert(admin_api.routes:insert({ - hosts = { "oauth2_8.com" }, + hosts = { "oauth2_8.test" }, protocols = { "http", "https" }, service = service8, })) local route9 = assert(admin_api.routes:insert({ - hosts = { "oauth2_9.com" }, + hosts = { "oauth2_9.test" }, protocols = { "http", "https" }, service = service9, })) local route10 = assert(admin_api.routes:insert({ - hosts = { "oauth2_10.com" }, + hosts = { "oauth2_10.test" }, protocols = { "http", "https" }, service = service10, })) local route11 = assert(admin_api.routes:insert({ - hosts = { "oauth2_11.com" }, + hosts = { "oauth2_11.test" }, protocols = { "http", "https" }, service = service11, })) local route12 = assert(admin_api.routes:insert({ - hosts = { "oauth2_12.com" }, + hosts = { "oauth2_12.test" }, protocols = { "http", "https" }, service = service12, })) local route13 = assert(admin_api.routes:insert({ - hosts = { "oauth2_13.com" }, + hosts = { "oauth2_13.test" }, protocols = { "http", "https" }, service = service13, })) local route_c = assert(admin_api.routes:insert({ - hosts = { "oauth2__c.com" }, + hosts = { "oauth2__c.test" }, protocols = { "http", "https" }, service = service_c, })) local route14 = assert(admin_api.routes:insert({ - hosts = { "oauth2_14.com" }, + hosts = { "oauth2_14.test" }, protocols = { "http", "https" }, service = service14, })) local route15 = assert(admin_api.routes:insert({ - hosts = { "oauth2_15.com" }, + hosts = { "oauth2_15.test" }, protocols = { "http", "https" }, service = service15, })) local route16 = assert(admin_api.routes:insert({ - hosts = { "oauth2_16.com" }, + hosts = { "oauth2_16.test" }, protocols = { "http", "https" }, service = service16, })) local route17 = assert(admin_api.routes:insert({ - hosts = { "oauth2_17.com" }, + hosts = { "oauth2_17.test" }, protocols = { "http", "https" }, service = service17, })) local route18 = assert(admin_api.routes:insert({ - hosts = { "oauth2_18.com" }, + hosts = { "oauth2_18.test" }, protocols = { "http", "https" }, service = service18, })) local route19 = assert(admin_api.routes:insert({ - hosts = { "oauth2_19.com" }, + hosts = { "oauth2_19.test" }, protocols = { "http", "https" }, service = service19, })) local route20 = assert(admin_api.routes:insert({ - hosts = { "oauth2_20.com" }, + hosts = { "oauth2_20.test" }, protocols = { "http", "https" }, service = service20, })) local route21 = assert(admin_api.routes:insert({ - hosts = { "oauth2_21.com" }, + hosts = { "oauth2_21.test" }, protocols = { "http", "https" }, service = service21, })) @@ -413,13 +413,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() local route_grpc = assert(admin_api.routes:insert { protocols = { "grpc", "grpcs" }, - hosts = { "oauth2_grpc.com" }, + hosts = { "oauth2_grpc.test" }, paths = { "/hello.HelloService/SayHello" }, service = service_grpc, }) local route_provgrpc = assert(admin_api.routes:insert { - hosts = { "oauth2_grpc.com" }, + hosts = { "oauth2_grpc.test" }, paths = { "/" }, service = service_grpc, }) @@ -636,7 +636,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/oauth2/authorize", headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) local body = assert.res_status(400, res) @@ -650,7 +650,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() local ok, err = helpers.proxy_client_grpcs(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "oauth2.com", + ["-authority"] = "oauth2.test", }, } assert.falsy(ok) @@ -665,7 +665,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() provision_key = "provision123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -682,7 +682,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "id123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -702,13 +702,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_id = "clientid123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=invalid_scope&error_description=You%20must%20specify%20a%20scope" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=invalid_scope&error_description=You%20must%20specify%20a%20scope" }, json) end) it("returns an error when an invalid scope is being sent", function() local res = assert(proxy_ssl_client:send { @@ -721,13 +721,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() scope = "wot" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=invalid_scope&error_description=%22wot%22%20is%20an%20invalid%20scope" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=invalid_scope&error_description=%22wot%22%20is%20an%20invalid%20scope" }, json) end) it("returns an error when no response_type is being sent", function() local res = assert(proxy_ssl_client:send { @@ -740,13 +740,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() scope = "email" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=unsupported_response_type&error_description=Invalid%20response_type" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=unsupported_response_type&error_description=Invalid%20response_type" }, json) end) it("returns an error with a state when no response_type is being sent", function() local res = assert(proxy_ssl_client:send { @@ -760,13 +760,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() state = "somestate" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=unsupported_response_type&error_description=Invalid%20response_type&state=somestate" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=unsupported_response_type&error_description=Invalid%20response_type&state=somestate" }, json) end) it("returns error when the redirect_uri does not match", function() local res = assert(proxy_ssl_client:send { @@ -778,16 +778,16 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_id = "clientid123", scope = "email", response_type = "code", - redirect_uri = "http://hello.com/" + redirect_uri = "http://hello.test/" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=invalid_request&error_description=Invalid%20redirect_uri%20that%20does%20not%20match%20with%20any%20redirect_uri%20created%20with%20the%20application" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=invalid_request&error_description=Invalid%20redirect_uri%20that%20does%20not%20match%20with%20any%20redirect_uri%20created%20with%20the%20application" }, json) end) it("works even if redirect_uri contains a query string", function() local res = assert(proxy_client:send { @@ -801,13 +801,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code" }, headers = { - ["Host"] = "oauth2_6.com", + ["Host"] = "oauth2_6.test", ["Content-Type"] = "application/json", ["X-Forwarded-Proto"] = "https" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}&foo=bar$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}&foo=bar$")) end) it("works with multiple redirect_uris in the application", function() local res = assert(proxy_client:send { @@ -821,14 +821,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code" }, headers = { - ["Host"] = "oauth2_6.com", + ["Host"] = "oauth2_6.test", ["Content-Type"] = "application/json", ["X-Forwarded-Proto"] = "https" } }) assert.response(res).has.status(200) local json = assert.response(res).has.jsonbody() - assert.truthy(ngx.re.match(json.redirect_uri, "^http://one\\.com/one/\\?code=[\\w]{32,32}$")) + assert.truthy(ngx.re.match(json.redirect_uri, "^http://one\\.test/one/\\?code=[\\w]{32,32}$")) end) it("fails when not under HTTPS", function() local res = assert(proxy_client:send { @@ -842,7 +842,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -864,13 +864,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code" }, headers = { - ["Host"] = "oauth2_6.com", + ["Host"] = "oauth2_6.test", ["Content-Type"] = "application/json", ["X-Forwarded-Proto"] = "https" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}$")) end) it("fails when not under HTTPS and accept_http_if_already_terminated is false", function() local res = assert(proxy_client:send { @@ -884,7 +884,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json", ["X-Forwarded-Proto"] = "https" } @@ -907,12 +907,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}$")) end) it("fails with a path when using the DNS", function() local res = assert(proxy_ssl_client:send { @@ -926,7 +926,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code", }, headers = { - ["Host"] = "example-path.com", + ["Host"] = "example-path.test", ["Content-Type"] = "application/json", }, }) @@ -950,7 +950,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}$")) end) it("returns success when requesting the url with final slash", function() local res = assert(proxy_ssl_client:send { @@ -964,12 +964,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}$")) end) it("returns success with a state", function() local res = assert(proxy_ssl_client:send { @@ -984,12 +984,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() state = "hello" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}&state=hello$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}&state=hello$")) -- Checking headers assert.are.equal("no-store", res.headers["cache-control"]) assert.are.equal("no-cache", res.headers["pragma"]) @@ -1007,14 +1007,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "userid123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}&state=hello$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}&state=hello$")) - local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1037,14 +1037,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "userid123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\?code=[\\w]{32,32}&state=hello$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\?code=[\\w]{32,32}&state=hello$")) - local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1068,13 +1068,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_challenge_method = "foo", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=invalid_request&error_description=code_challenge_method%20is%20not%20supported%2c%20must%20be%20S256&state=hello" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=invalid_request&error_description=code_challenge_method%20is%20not%20supported%2c%20must%20be%20S256&state=hello" }, json) end) it("fails when code challenge method is provided without code challenge", function() local res = assert(proxy_ssl_client:send { @@ -1090,13 +1090,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_challenge_method = "H256", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json", } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=invalid_request&error_description=code_challenge%20is%20required%20when%20code_method%20is%20present&state=hello" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=invalid_request&error_description=code_challenge%20is%20required%20when%20code_method%20is%20present&state=hello" }, json) end) it("fails when code challenge is not included for public client", function() local res = assert(proxy_ssl_client:send { @@ -1111,13 +1111,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "userid123", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=invalid_request&error_description=code_challenge%20is%20required%20for%20public%20clients&state=hello" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=invalid_request&error_description=code_challenge%20is%20required%20for%20public%20clients&state=hello" }, json) end) it("fails when code challenge is not included for confidential client when conf.pkce is strict", function() local res = assert(proxy_ssl_client:send { @@ -1132,13 +1132,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "userid123", }, headers = { - ["Host"] = "oauth2_15.com", + ["Host"] = "oauth2_15.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(400, res) local json = cjson.decode(body) - assert.same({ redirect_uri = "http://google.com/kong?error=invalid_request&error_description=code_challenge%20is%20required%20for%20confidential%20clients&state=hello" }, json) + assert.same({ redirect_uri = "http://google.test/kong?error=invalid_request&error_description=code_challenge%20is%20required%20for%20confidential%20clients&state=hello" }, json) end) it("returns success when code challenge is not included for public client when conf.pkce is none", function() local res = assert(proxy_ssl_client:send { @@ -1153,13 +1153,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "userid123", }, headers = { - ["Host"] = "oauth2_14.com", + ["Host"] = "oauth2_14.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(200, res) local json = cjson.decode(body) - local iterator, err = ngx.re.gmatch(json.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(json.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1179,13 +1179,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_challenge = "1234", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(200, res) local json = cjson.decode(body) - local iterator, err = ngx.re.gmatch(json.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(json.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1208,13 +1208,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_challenge_method = "S256", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = assert.res_status(200, res) local json = cjson.decode(body) - local iterator, err = ngx.re.gmatch(json.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(json.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1237,12 +1237,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) assert.are.equal("no-store", res.headers["cache-control"]) assert.are.equal("no-cache", res.headers["pragma"]) end) @@ -1259,12 +1259,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() state = "wot" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&state=wot&token_type=bearer$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&state=wot&token_type=bearer$")) end) it("returns success and the token should have the right expiration", function() local res = assert(proxy_ssl_client:send { @@ -1278,14 +1278,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) - local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.com/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") + local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.test/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1306,14 +1306,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "userid123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) - local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.com/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") + local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.test/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1338,12 +1338,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "userid123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.com/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") + local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.test/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -1353,7 +1353,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. access_token, headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -1376,7 +1376,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "token" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1396,7 +1396,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials", }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1414,7 +1414,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials", }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1428,7 +1428,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() path = "/oauth2/token?client_id&grant_type=client_credentials&client_secret", body = {}, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1448,7 +1448,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "token" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1467,7 +1467,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1489,7 +1489,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() authenticated_userid = "user123" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1510,7 +1510,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() provision_key = "hello" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1529,7 +1529,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1551,7 +1551,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1571,10 +1571,10 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_secret = "secret456", scope = "email", grant_type = "client_credentials", - redirect_uri = "http://two.com/two" + redirect_uri = "http://two.test/two" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1596,7 +1596,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials", }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1620,7 +1620,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() provision_key = "provision123" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1640,7 +1640,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMjM6c2VjcmV0MTIz" } @@ -1662,7 +1662,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMjM6c2VjcmV0MTIz" } @@ -1683,7 +1683,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "client_credentials" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMjM6c2VjcmV0MTI0" } @@ -1706,7 +1706,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() provision_key = "provision123" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "application/json" } }) @@ -1716,7 +1716,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. body.access_token, headers = { - ["Host"] = "oauth2_4.com" + ["Host"] = "oauth2_4.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -1739,7 +1739,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() provision_key = "provision123" }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "multipart/form-data" } }) @@ -1752,7 +1752,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() access_token = body.access_token }, headers = { - ["Host"] = "oauth2_4.com", + ["Host"] = "oauth2_4.test", ["Content-Type"] = "multipart/form-data" } }) @@ -1766,7 +1766,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_5.com" + ["Host"] = "oauth2_5.test" } }) local body = assert.res_status(401, res) @@ -1783,7 +1783,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "token" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -1802,7 +1802,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "token" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -1822,7 +1822,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -1841,7 +1841,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -1861,7 +1861,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -1882,7 +1882,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -1906,7 +1906,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMjM6c2VjcmV0MTIz" } @@ -1931,7 +1931,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMjM6c2VjcmV0MTI0" } @@ -1952,7 +1952,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password" }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMjM6c2VjcmV0MTIz" } @@ -1963,7 +1963,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. body.access_token, headers = { - ["Host"] = "oauth2_5.com" + ["Host"] = "oauth2_5.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -1982,7 +1982,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/oauth2/token", headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) local body = assert.res_status(400, res) @@ -2002,7 +2002,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code = code }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2024,7 +2024,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_secret = "secret123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2047,7 +2047,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_secret = "secret123" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2068,7 +2068,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2089,7 +2089,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2116,7 +2116,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() state = "wot" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2143,7 +2143,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() state = "wot" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2162,7 +2162,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2172,7 +2172,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. body.access_token, headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -2195,7 +2195,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2218,7 +2218,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2238,7 +2238,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2259,7 +2259,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code" }, headers = { - ["Host"] = "oauth2_3.com", + ["Host"] = "oauth2_3.test", ["Content-Type"] = "application/json" } }) @@ -2280,7 +2280,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2305,7 +2305,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMTIxMQ==" } @@ -2331,7 +2331,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMTIxMTo=" } @@ -2357,7 +2357,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMTIxMTogICAg" } @@ -2385,7 +2385,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_secret = "secret11211" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2405,7 +2405,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier, }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json", Authorization = "Basic Y2xpZW50aWQxMTIxMTpzZWNyZXQxMTIxMQ==" } @@ -2426,7 +2426,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2435,7 +2435,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.same({ error_description = "code_verifier is required for PKCE authorization requests", error = "invalid_request" }, json) end) it("success when no code_verifier provided for public app without pkce when conf.pkce is none", function() - local code = provision_code("oauth2_14.com") + local code = provision_code("oauth2_14.test") local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2446,7 +2446,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_14.com", + ["Host"] = "oauth2_14.test", ["Content-Type"] = "application/json" } }) @@ -2473,7 +2473,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = code_verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2500,7 +2500,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = code_verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2527,7 +2527,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2548,7 +2548,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2568,7 +2568,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2592,7 +2592,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code = code }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -2612,7 +2612,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = "" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2632,7 +2632,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = 12 }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2652,7 +2652,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = "abcdelfhigklmnopqrstuvwxyz0123456789abcdefg" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2662,7 +2662,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("fails when code verifier does not match challenge for confidential app when conf.pkce is strict", function() local challenge, _ = get_pkce_tokens() - local code = provision_code("oauth2_15.com", nil, nil, challenge) + local code = provision_code("oauth2_15.test", nil, nil, challenge) local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2674,7 +2674,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = "abcdelfhigklmnopqrstuvwxyz0123456789abcdefg" }, headers = { - ["Host"] = "oauth2_15.com", + ["Host"] = "oauth2_15.test", ["Content-Type"] = "application/json" } }) @@ -2695,7 +2695,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = verifier, }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2713,7 +2713,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() code_verifier = "verifier", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2732,7 +2732,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2742,7 +2742,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("fails when no code verifier provided for confidential app when conf.pkce is strict", function() local challenge, _ = get_pkce_tokens() - local code = provision_code("oauth2_15.com", nil, nil, challenge) + local code = provision_code("oauth2_15.test", nil, nil, challenge) local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2753,7 +2753,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_15.com", + ["Host"] = "oauth2_15.test", ["Content-Type"] = "application/json" } }) @@ -2763,7 +2763,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("fails when no code verifier provided for confidential app with pkce when conf.pkce is lax", function() local challenge, _ = get_pkce_tokens() - local code = provision_code("oauth2_16.com", nil, nil, challenge) + local code = provision_code("oauth2_16.test", nil, nil, challenge) local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2774,7 +2774,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_16.com", + ["Host"] = "oauth2_16.test", ["Content-Type"] = "application/json" } }) @@ -2784,7 +2784,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("fails when no code verifier provided for confidential app with pkce when conf.pkce is none", function() local challenge, _ = get_pkce_tokens() - local code = provision_code("oauth2_14.com", nil, nil, challenge) + local code = provision_code("oauth2_14.test", nil, nil, challenge) local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2795,7 +2795,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_14.com", + ["Host"] = "oauth2_14.test", ["Content-Type"] = "application/json" } }) @@ -2804,7 +2804,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.same({ error_description = "code_verifier is required for PKCE authorization requests", error = "invalid_request" }, json) end) it("suceeds when no code verifier provided for confidential app without pkce when conf.pkce is none", function() - local code = provision_code("oauth2_14.com") + local code = provision_code("oauth2_14.test") local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2815,7 +2815,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_14.com", + ["Host"] = "oauth2_14.test", ["Content-Type"] = "application/json" } }) @@ -2829,7 +2829,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.matches("%w+", json.refresh_token) end) it("suceeds when no code verifier provided for confidential app without pkce when conf.pkce is lax", function() - local code = provision_code("oauth2_16.com") + local code = provision_code("oauth2_16.test") local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2840,7 +2840,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_16.com", + ["Host"] = "oauth2_16.test", ["Content-Type"] = "application/json" } }) @@ -2855,7 +2855,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("fails when exchanging a code created by a different plugin instance when both plugin instances set global_credentials to true", function() - local code = provision_code("oauth2_16.com") -- obtain a code from plugin oauth2_16.com + local code = provision_code("oauth2_16.test") -- obtain a code from plugin oauth2_16.test local res = assert(proxy_ssl_client:send { method = "POST", path = "/oauth2/token", @@ -2866,7 +2866,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_17.com", -- exchange the code from plugin oauth2_17.com + ["Host"] = "oauth2_17.test", -- exchange the code from plugin oauth2_17.test ["Content-Type"] = "application/json", } }) @@ -2879,7 +2879,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("should not fail when plugin_id is not present which indicates it's an old code", function() - local code = provision_code("oauth2_16.com") + local code = provision_code("oauth2_16.test") local db_code, err = db.oauth2_authorization_codes:select_by_code(code) assert.is_nil(err) db_code.plugin = ngx.null @@ -2895,7 +2895,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "authorization_code", }, headers = { - ["Host"] = "oauth2_16.com", + ["Host"] = "oauth2_16.test", ["Content-Type"] = "application/json", } }) @@ -2909,7 +2909,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -2924,32 +2924,32 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) assert.res_status(200, res) end) it("works when a correct access_token is being sent in the custom header", function() - local token = provision_token("oauth2_11.com",nil,"clientid1011","secret1011") + local token = provision_token("oauth2_11.test",nil,"clientid1011","secret1011") local res = assert(proxy_ssl_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_11.com", + ["Host"] = "oauth2_11.test", ["custom_header_name"] = "bearer " .. token.access_token, } }) assert.res_status(200, res) end) it("works when a correct access_token is being sent in duplicate custom headers", function() - local token = provision_token("oauth2_11.com",nil,"clientid1011","secret1011") + local token = provision_token("oauth2_11.test",nil,"clientid1011","secret1011") local res = assert(proxy_ssl_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_11.com", + ["Host"] = "oauth2_11.test", ["custom_header_name"] = { "bearer " .. token.access_token, "bearer " .. token.access_token }, } }) @@ -2960,7 +2960,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_11.com", + ["Host"] = "oauth2_11.test", ["custom_header_name"] = "", } }) @@ -2984,13 +2984,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() path = "/oauth2/authorize", body = body, headers = kong.table.merge({ - ["Host"] = "oauth2_18.com", + ["Host"] = "oauth2_18.test", ["Content-Type"] = "application/json" }) }) res = assert(cjson.decode(assert.res_status(200, res))) if res.redirect_uri then - local iterator, err = ngx.re.gmatch(res.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(res.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -3003,14 +3003,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_id = "clientid123", client_secret = "secret123", grant_type = "authorization_code", - redirect_uri = "http://google.com/kong", + redirect_uri = "http://google.test/kong", } res = assert(request_client:send { method = "POST", path = "/oauth2/token", body = body, headers = { - ["Host"] = "oauth2_18.com", + ["Host"] = "oauth2_18.test", ["Content-Type"] = "application/json" } }) @@ -3027,7 +3027,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token", }, headers = { - ["Host"] = "oauth2_19.com", + ["Host"] = "oauth2_19.test", ["Content-Type"] = "application/json" } }) @@ -3056,13 +3056,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() path = "/oauth2/authorize", body = body, headers = kong.table.merge({ - ["Host"] = "oauth2_20.com", + ["Host"] = "oauth2_20.test", ["Content-Type"] = "application/json" }) }) res = assert(cjson.decode(assert.res_status(200, res))) if res.redirect_uri then - local iterator, err = ngx.re.gmatch(res.redirect_uri, "^http://google\\.com/kong\\?code=([\\w]{32,32})&state=hello$") + local iterator, err = ngx.re.gmatch(res.redirect_uri, "^http://google\\.test/kong\\?code=([\\w]{32,32})&state=hello$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -3075,14 +3075,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() client_id = "clientid123", client_secret = "secret123", grant_type = "authorization_code", - redirect_uri = "http://google.com/kong", + redirect_uri = "http://google.test/kong", } res = assert(request_client:send { method = "POST", path = "/oauth2/token", body = body, headers = { - ["Host"] = "oauth2_20.com", + ["Host"] = "oauth2_20.test", ["Content-Type"] = "application/json" } }) @@ -3099,7 +3099,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token", }, headers = { - ["Host"] = "oauth2_21.com", + ["Host"] = "oauth2_21.test", ["Content-Type"] = "application/json" } }) @@ -3108,13 +3108,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("fails when a correct access_token is being sent in the wrong header", function() - local token = provision_token("oauth2_11.com",nil,"clientid1011","secret1011") + local token = provision_token("oauth2_11.test",nil,"clientid1011","secret1011") local res = assert(proxy_ssl_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_11.com", + ["Host"] = "oauth2_11.test", ["authorization"] = "bearer " .. token.access_token, } }) @@ -3127,7 +3127,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2_3.com" + ["Host"] = "oauth2_3.test" } }) local body = assert.res_status(401, res) @@ -3144,7 +3144,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() access_token = token.access_token }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -3168,7 +3168,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) assert.res_status(200, res) @@ -3185,7 +3185,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", Authorization = "bearer " .. token.access_token } }) @@ -3198,7 +3198,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", Authorization = "bearer " .. token.access_token } }) @@ -3214,12 +3214,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("accepts gRPC call with credentials", function() - local token = provision_token("oauth2_grpc.com") + local token = provision_token("oauth2_grpc.test") local ok, res = helpers.proxy_client_grpcs(){ service = "hello.HelloService.SayHello", opts = { - ["-authority"] = "oauth2_grpc.com", + ["-authority"] = "oauth2_grpc.test", ["-H"] = ("'authorization: bearer %s'"):format(token.access_token), }, } @@ -3248,7 +3248,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "password", }, headers = { - ["Host"] = "oauth2_5.com", + ["Host"] = "oauth2_5.test", ["Content-Type"] = "application/json" } }) @@ -3261,12 +3261,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end end) it("works with right credentials and anonymous", function() - local token = provision_token("oauth2_7.com") + local token = provision_token("oauth2_7.test") local res = assert(proxy_ssl_client:send { method = "POST", path = "/request", headers = { - ["Host"] = "oauth2_7.com", + ["Host"] = "oauth2_7.test", Authorization = "bearer " .. token.access_token } }) @@ -3285,7 +3285,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2_7.com" + ["Host"] = "oauth2_7.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -3299,7 +3299,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2__c.com" + ["Host"] = "oauth2__c.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -3319,7 +3319,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_10.com" + ["Host"] = "oauth2_10.test" } }) assert.res_status(500, res) @@ -3336,14 +3336,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() response_type = "token" }, headers = { - ["Host"] = "oauth2_11.com", + ["Host"] = "oauth2_11.test", ["Content-Type"] = "application/json" } }) local body = cjson.decode(assert.res_status(200, res)) - assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.com/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) + assert.is_table(ngx.re.match(body.redirect_uri, "^http://google\\.test/kong\\#access_token=[\\w]{32,32}&expires_in=[\\d]+&token_type=bearer$")) - local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.com/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") + local iterator, err = ngx.re.gmatch(body.redirect_uri, "^http://google\\.test/kong\\#access_token=([\\w]{32,32})&expires_in=[\\d]+&token_type=bearer$") assert.is_nil(err) local m, err = iterator() assert.is_nil(err) @@ -3360,7 +3360,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2_3.com" + ["Host"] = "oauth2_3.test" } }) assert.res_status(401, res) @@ -3370,7 +3370,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) assert.res_status(200, res) @@ -3378,13 +3378,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() describe("Global Credentials", function() it("does not access two different APIs that are not sharing global credentials", function() - local token = provision_token("oauth2_8.com") + local token = provision_token("oauth2_8.test") local res = assert(proxy_ssl_client:send { method = "POST", path = "/request", headers = { - ["Host"] = "oauth2_8.com", + ["Host"] = "oauth2_8.test", Authorization = "bearer " .. token.access_token } }) @@ -3394,20 +3394,20 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", Authorization = "bearer " .. token.access_token } }) assert.res_status(401, res) end) it("does not access two different APIs that are not sharing global credentials 2", function() - local token = provision_token("oauth2.com") + local token = provision_token("oauth2.test") local res = assert(proxy_ssl_client:send { method = "POST", path = "/request", headers = { - ["Host"] = "oauth2_8.com", + ["Host"] = "oauth2_8.test", Authorization = "bearer " .. token.access_token } }) @@ -3417,20 +3417,20 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", Authorization = "bearer " .. token.access_token } }) assert.res_status(200, res) end) it("access two different APIs that are sharing global credentials", function() - local token = provision_token("oauth2_8.com") + local token = provision_token("oauth2_8.test") local res = assert(proxy_ssl_client:send { method = "POST", path = "/request", headers = { - ["Host"] = "oauth2_8.com", + ["Host"] = "oauth2_8.test", Authorization = "bearer " .. token.access_token } }) @@ -3440,7 +3440,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2_9.com", + ["Host"] = "oauth2_9.test", Authorization = "bearer " .. token.access_token } }) @@ -3455,7 +3455,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) local body = assert.res_status(401, res) @@ -3468,7 +3468,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=invalid", headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) local body = assert.res_status(401, res) @@ -3481,7 +3481,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", Authorization = "bearer invalid" } }) @@ -3501,7 +3501,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", Authorization = "bearer " .. token.access_token } }) @@ -3529,7 +3529,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -3550,7 +3550,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -3575,7 +3575,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -3601,7 +3601,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -3620,7 +3620,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -3641,7 +3641,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json" } }) @@ -3658,7 +3658,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", authorization = "bearer " .. token.access_token } }) @@ -3675,7 +3675,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", authorization = "bearer " .. token.access_token } }) @@ -3697,7 +3697,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() grant_type = "refresh_token" }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/json", authorization = "bearer " .. token.access_token } @@ -3723,8 +3723,8 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.falsy(token.refresh_token == refreshed_token.refresh_token) end) it("does not rewrite persistent refresh tokens", function() - local token = provision_token("oauth2_13.com") - local refreshed_token = refresh_token("oauth2_13.com", token.refresh_token) + local token = provision_token("oauth2_13.test") + local refreshed_token = refresh_token("oauth2_13.test", token.refresh_token) local new_access_token = db.oauth2_tokens:select_by_access_token(refreshed_token.access_token) local new_refresh_token = db.oauth2_tokens:select_by_refresh_token(token.refresh_token) assert.truthy(new_refresh_token) @@ -3743,7 +3743,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "POST", path = "/request", headers = { - ["Host"] = "oauth2_13.com", + ["Host"] = "oauth2_13.test", Authorization = "bearer " .. refreshed_token.access_token } }) @@ -3757,12 +3757,12 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.same({ error_description = "The access token is invalid or has expired", error = "invalid_token" }, json) assert.are.equal('Bearer realm="service" error="invalid_token" error_description="The access token is invalid or has expired"', headers['www-authenticate']) - local final_refreshed_token = refresh_token("oauth2_13.com", refreshed_token.refresh_token) + local final_refreshed_token = refresh_token("oauth2_13.test", refreshed_token.refresh_token) local last_res = assert(proxy_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_13.com", + ["Host"] = "oauth2_13.test", authorization = "bearer " .. final_refreshed_token.access_token } }) @@ -3783,7 +3783,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() access_token = token.access_token }, headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", ["Content-Type"] = "application/x-www-form-urlencoded" } }) @@ -3791,7 +3791,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.equal(token.access_token, body.post_data.params.access_token) end) it("hides credentials in the body", function() - local token = provision_token("oauth2_3.com") + local token = provision_token("oauth2_3.test") local res = assert(proxy_client:send { method = "POST", @@ -3800,7 +3800,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() access_token = token.access_token }, headers = { - ["Host"] = "oauth2_3.com", + ["Host"] = "oauth2_3.test", ["Content-Type"] = "application/x-www-form-urlencoded" } }) @@ -3814,33 +3814,33 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2.com" + ["Host"] = "oauth2.test" } }) local body = cjson.decode(assert.res_status(200, res)) assert.equal(token.access_token, body.uri_args.access_token) end) it("hides credentials in the querystring", function() - local token = provision_token("oauth2_3.com") + local token = provision_token("oauth2_3.test") local res = assert(proxy_client:send { method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2_3.com" + ["Host"] = "oauth2_3.test" } }) local body = cjson.decode(assert.res_status(200, res)) assert.is_nil(body.uri_args.access_token) end) it("hides credentials in the querystring for api with custom header", function() - local token = provision_token("oauth2_12.com",nil,"clientid1011","secret1011") + local token = provision_token("oauth2_12.test",nil,"clientid1011","secret1011") local res = assert(proxy_client:send { method = "GET", path = "/request?access_token=" .. token.access_token, headers = { - ["Host"] = "oauth2_12.com" + ["Host"] = "oauth2_12.test" } }) local body = cjson.decode(assert.res_status(200, res)) @@ -3853,7 +3853,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "oauth2.com", + ["Host"] = "oauth2.test", authorization = "bearer " .. token.access_token } }) @@ -3861,13 +3861,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.equal("bearer " .. token.access_token, body.headers.authorization) end) it("hides credentials in the header", function() - local token = provision_token("oauth2_3.com") + local token = provision_token("oauth2_3.test") local res = assert(proxy_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_3.com", + ["Host"] = "oauth2_3.test", authorization = "bearer " .. token.access_token } }) @@ -3875,13 +3875,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.is_nil(body.headers.authorization) end) it("hides credentials in the custom header", function() - local token = provision_token("oauth2_12.com",nil,"clientid1011","secret1011") + local token = provision_token("oauth2_12.test",nil,"clientid1011","secret1011") local res = assert(proxy_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "oauth2_12.com", + ["Host"] = "oauth2_12.test", ["custom_header_name"] = "bearer " .. token.access_token } }) @@ -3890,7 +3890,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() assert.is_nil(body.headers.custom_header_name) end) it("does not abort when the request body is a multipart form upload", function() - local token = provision_token("oauth2_3.com") + local token = provision_token("oauth2_3.test") local res = assert(proxy_client:send { method = "POST", @@ -3899,7 +3899,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() foo = "bar" }, headers = { - ["Host"] = "oauth2_3.com", + ["Host"] = "oauth2_3.test", ["Content-Type"] = "multipart/form-data" } }) @@ -3923,7 +3923,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() }) local route1 = assert(admin_api.routes:insert({ - hosts = { "logical-and.com" }, + hosts = { "logical-and.test" }, protocols = { "http", "https" }, service = service1 })) @@ -3955,13 +3955,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() }) local route2 = assert(admin_api.routes:insert({ - hosts = { "logical-or.com" }, + hosts = { "logical-or.test" }, protocols = { "http", "https" }, service = service2 })) local route3 = assert(admin_api.routes:insert({ - hosts = { "logical-or-jwt.com" }, + hosts = { "logical-or-jwt.test" }, protocols = { "http", "https" }, service = service2 })) @@ -4010,7 +4010,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() admin_api.oauth2_credentials:insert { client_id = "clientid4567", client_secret = "secret4567", - redirect_uris = { "http://google.com/kong" }, + redirect_uris = { "http://google.test/kong" }, name = "testapp", consumer = { id = user2.id }, } @@ -4026,13 +4026,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() describe("multiple auth without anonymous, logical AND", function() it("passes with all credentials provided", function() - local token = provision_token("logical-and.com", + local token = provision_token("logical-and.test", { ["apikey"] = "Mouse"}, "clientid4567", "secret4567").access_token local res = assert(proxy_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", -- we must provide the apikey again in the extra_headers, for the -- token endpoint, because that endpoint is also protected by the @@ -4055,7 +4055,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", ["apikey"] = "Mouse", } }) @@ -4067,11 +4067,11 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", -- we must provide the apikey again in the extra_headers, for the -- token endpoint, because that endpoint is also protected by the -- key-auth plugin. Otherwise getting the token simply fails. - ["Authorization"] = "bearer " .. provision_token("logical-and.com", + ["Authorization"] = "bearer " .. provision_token("logical-and.test", {["apikey"] = "Mouse"}).access_token, } }) @@ -4083,7 +4083,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "logical-and.com", + ["Host"] = "logical-and.test", } }) assert.response(res).has.status(401) @@ -4094,13 +4094,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() describe("multiple auth with anonymous, logical OR", function() it("passes with all credentials provided", function() - local token = provision_token("logical-or.com", nil, + local token = provision_token("logical-or.test", nil, "clientid4567", "secret4567").access_token local res = assert(proxy_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", ["Authorization"] = "bearer " .. token, } @@ -4119,7 +4119,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["apikey"] = "Mouse", ["X-Authenticated-Scope"] = "all-access", ["X-Authenticated-UserId"] = "admin", @@ -4145,7 +4145,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "logical-or-jwt.com", + ["Host"] = "logical-or-jwt.test", ["Authorization"] = authorization, ["X-Authenticated-Scope"] = "all-access", ["X-Authenticated-UserId"] = "admin", @@ -4164,13 +4164,13 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("passes with only the second credential provided", function() - local token = provision_token("logical-or.com", nil, + local token = provision_token("logical-or.test", nil, "clientid4567", "secret4567").access_token local res = assert(proxy_client:send { method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", ["Authorization"] = "bearer " .. token, } }) @@ -4188,7 +4188,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() method = "GET", path = "/request", headers = { - ["Host"] = "logical-or.com", + ["Host"] = "logical-or.test", } }) assert.response(res).has.status(200) @@ -4203,7 +4203,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() describe("Plugin: oauth2 (ttl) with #"..strategy, function() lazy_setup(function() local route11 = assert(admin_api.routes:insert({ - hosts = { "oauth2_21.refresh.com" }, + hosts = { "oauth2_21.refresh.test" }, protocols = { "http", "https" }, service = admin_api.services:insert(), })) @@ -4221,7 +4221,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() } local route12 = assert(admin_api.routes:insert({ - hosts = { "oauth2_22.refresh.com" }, + hosts = { "oauth2_22.refresh.test" }, protocols = { "http", "https" }, service = admin_api.services:insert(), })) @@ -4244,7 +4244,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() admin_api.oauth2_credentials:insert { client_id = "clientid7890", client_secret = "secret7890", - redirect_uris = { "http://google.com/kong" }, + redirect_uris = { "http://google.test/kong" }, name = "testapp", consumer = { id = consumer.id }, } @@ -4252,7 +4252,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() describe("refresh token", function() it("is deleted after defined TTL", function() - local token = provision_token("oauth2_21.refresh.com", nil, "clientid7890", "secret7890") + local token = provision_token("oauth2_21.refresh.test", nil, "clientid7890", "secret7890") local token_entity = db.oauth2_tokens:select_by_access_token(token.access_token) assert.is_table(token_entity) @@ -4264,7 +4264,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() end) it("is not deleted when when TTL is 0 == never", function() - local token = provision_token("oauth2_22.refresh.com", nil, "clientid7890", "secret7890") + local token = provision_token("oauth2_22.refresh.test", nil, "clientid7890", "secret7890") local token_entity = db.oauth2_tokens:select_by_access_token(token.access_token) assert.is_table(token_entity) @@ -4284,7 +4284,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() -- setup local route_token = assert(admin_api.routes:insert({ - hosts = { "oauth2_regression_4232.com" }, + hosts = { "oauth2_regression_4232.test" }, protocols = { "http", "https" }, service = admin_api.services:insert(), })) @@ -4300,7 +4300,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() } local route_test = assert(admin_api.routes:insert({ - hosts = { "oauth2_regression_4232_test.com" }, + hosts = { "oauth2_regression_4232_test.test" }, protocols = { "http", "https" }, service = admin_api.services:insert(), })) @@ -4321,14 +4321,14 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() admin_api.oauth2_credentials:insert { client_id = "clientid_4232", client_secret = "secret_4232", - redirect_uris = { "http://google.com/kong" }, + redirect_uris = { "http://google.test/kong" }, name = "4232_app", consumer = { id = consumer.id }, } -- /setup - local token = provision_token("oauth2_regression_4232.com", nil, + local token = provision_token("oauth2_regression_4232.test", nil, "clientid_4232", "secret_4232") @@ -4341,7 +4341,7 @@ describe("Plugin: oauth2 [#" .. strategy .. "]", function() access_token = token.access_token }, headers = { - ["Host"] = "oauth2_regression_4232_test.com", + ["Host"] = "oauth2_regression_4232_test.test", ["Content-Type"] = "application/json" } }) diff --git a/spec/03-plugins/27-aws-lambda/05-aws-serializer_spec.lua b/spec/03-plugins/27-aws-lambda/05-aws-serializer_spec.lua index 934803cd39d9..35a8259394c6 100644 --- a/spec/03-plugins/27-aws-lambda/05-aws-serializer_spec.lua +++ b/spec/03-plugins/27-aws-lambda/05-aws-serializer_spec.lua @@ -68,7 +68,7 @@ describe("[AWS Lambda] aws-gateway input", function() request_method = "GET", upstream_uri = "/123/strip/more?boolean=;multi-query=first;single-query=hello%20world;multi-query=second", kong_request_id = "1234567890", - host = "abc.myhost.com", + host = "abc.myhost.test", remote_addr = "123.123.123.123" }, ctx = { @@ -120,7 +120,7 @@ describe("[AWS Lambda] aws-gateway input", function() path = "/123/strip/more", protocol = "HTTP/1.1", httpMethod = "GET", - domainName = "abc.myhost.com", + domainName = "abc.myhost.test", domainPrefix = "abc", identity = { sourceIp = "123.123.123.123", userAgent = "curl/7.54.0" }, requestId = "1234567890", @@ -150,7 +150,7 @@ describe("[AWS Lambda] aws-gateway input", function() request_method = "GET", upstream_uri = "/plain/strip/more?boolean=;multi-query=first;single-query=hello%20world;multi-query=second", kong_request_id = "1234567890", - host = "def.myhost.com", + host = "def.myhost.test", remote_addr = "123.123.123.123" }, ctx = { @@ -195,7 +195,7 @@ describe("[AWS Lambda] aws-gateway input", function() path = "/plain/strip/more", protocol = "HTTP/1.0", httpMethod = "GET", - domainName = "def.myhost.com", + domainName = "def.myhost.test", domainPrefix = "def", identity = { sourceIp = "123.123.123.123", userAgent = "curl/7.54.0" }, requestId = "1234567890", @@ -247,7 +247,7 @@ describe("[AWS Lambda] aws-gateway input", function() upstream_uri = "/plain/strip/more", http_content_type = tdata.ct, kong_request_id = "1234567890", - host = "def.myhost.com", + host = "def.myhost.test", remote_addr = "123.123.123.123" }, ctx = { @@ -282,7 +282,7 @@ describe("[AWS Lambda] aws-gateway input", function() path = "/plain/strip/more", protocol = "HTTP/1.0", httpMethod = "GET", - domainName = "def.myhost.com", + domainName = "def.myhost.test", domainPrefix = "def", identity = { sourceIp = "123.123.123.123", userAgent = "curl/7.54.0" }, requestId = "1234567890", diff --git a/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua b/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua index 2e152293bc39..3e52100865aa 100644 --- a/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua +++ b/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua @@ -16,7 +16,7 @@ for _, strategy in helpers.each_strategy() do local route1 = bp.routes:insert { - hosts = { "gw.skipfile.com" }, + hosts = { "gw.skipfile.test" }, } bp.plugins:insert { name = "aws-lambda", @@ -34,7 +34,7 @@ for _, strategy in helpers.each_strategy() do } local route2 = bp.routes:insert { - hosts = { "gw.readfile.com" }, + hosts = { "gw.readfile.test" }, } bp.plugins:insert { name = "aws-lambda", @@ -52,7 +52,7 @@ for _, strategy in helpers.each_strategy() do } local route3 = bp.routes:insert { - hosts = { "plain.skipfile.com" }, + hosts = { "plain.skipfile.test" }, } bp.plugins:insert { name = "aws-lambda", @@ -70,7 +70,7 @@ for _, strategy in helpers.each_strategy() do } local route4 = bp.routes:insert { - hosts = { "plain.readfile.com" }, + hosts = { "plain.readfile.test" }, } bp.plugins:insert { name = "aws-lambda", @@ -126,7 +126,7 @@ for _, strategy in helpers.each_strategy() do } local route7 = db.routes:insert { - hosts = { "gw.serviceless.com" }, + hosts = { "gw.serviceless.test" }, } db.plugins:insert { name = "aws-lambda", @@ -177,7 +177,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "plain.skipfile.com" + ["Host"] = "plain.skipfile.test" }, body = request_body }) @@ -195,7 +195,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "plain.skipfile.com" + ["Host"] = "plain.skipfile.test" }, body = request_body, }) @@ -218,7 +218,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "plain.readfile.com" + ["Host"] = "plain.readfile.test" }, body = request_body }) @@ -236,7 +236,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "plain.readfile.com" + ["Host"] = "plain.readfile.test" }, body = request_body, }) @@ -262,7 +262,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "gw.skipfile.com" + ["Host"] = "gw.skipfile.test" }, body = request_body }) @@ -280,7 +280,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "gw.skipfile.com" + ["Host"] = "gw.skipfile.test" }, body = request_body, }) @@ -303,7 +303,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "gw.readfile.com" + ["Host"] = "gw.readfile.test" }, body = request_body }) @@ -321,7 +321,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "gw.readfile.com" + ["Host"] = "gw.readfile.test" }, body = request_body, }) @@ -380,7 +380,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "gw.serviceless.com" + ["Host"] = "gw.serviceless.test" }, body = request_body, }) diff --git a/spec/03-plugins/27-aws-lambda/08-sam-integration_spec.lua b/spec/03-plugins/27-aws-lambda/08-sam-integration_spec.lua index 0ddef1868552..755d1e0e6ca2 100644 --- a/spec/03-plugins/27-aws-lambda/08-sam-integration_spec.lua +++ b/spec/03-plugins/27-aws-lambda/08-sam-integration_spec.lua @@ -40,7 +40,7 @@ if sam.get_os_architecture() ~= "aarch64" then }, { "aws-lambda" }) local route1 = bp.routes:insert { - hosts = { "lambda.com" }, + hosts = { "lambda.test" }, } bp.plugins:insert { @@ -59,7 +59,7 @@ if sam.get_os_architecture() ~= "aarch64" then } local route2 = bp.routes:insert { - hosts = { "lambda2.com" }, + hosts = { "lambda2.test" }, } bp.plugins:insert { @@ -111,7 +111,7 @@ if sam.get_os_architecture() ~= "aarch64" then method = "GET", path = "/", headers = { - host = "lambda.com" + host = "lambda.test" } }) assert.res_status(200, res) @@ -122,7 +122,7 @@ if sam.get_os_architecture() ~= "aarch64" then method = "GET", path = "/", headers = { - host = "lambda2.com" + host = "lambda2.test" } }) assert.res_status(201, res) diff --git a/spec/03-plugins/27-aws-lambda/99-access_spec.lua b/spec/03-plugins/27-aws-lambda/99-access_spec.lua index 3ffb2d152149..8508e6b6b9e3 100644 --- a/spec/03-plugins/27-aws-lambda/99-access_spec.lua +++ b/spec/03-plugins/27-aws-lambda/99-access_spec.lua @@ -24,134 +24,134 @@ for _, strategy in helpers.each_strategy() do }, { "aws-lambda" }) local route1 = bp.routes:insert { - hosts = { "lambda.com" }, + hosts = { "lambda.test" }, } local route1_1 = bp.routes:insert { - hosts = { "lambda_ignore_service.com" }, + hosts = { "lambda_ignore_service.test" }, service = assert(bp.services:insert()), } local route2 = bp.routes:insert { - hosts = { "lambda2.com" }, + hosts = { "lambda2.test" }, } local route3 = bp.routes:insert { - hosts = { "lambda3.com" }, + hosts = { "lambda3.test" }, } local route4 = bp.routes:insert { - hosts = { "lambda4.com" }, + hosts = { "lambda4.test" }, } local route5 = bp.routes:insert { - hosts = { "lambda5.com" }, + hosts = { "lambda5.test" }, } local route6 = bp.routes:insert { - hosts = { "lambda6.com" }, + hosts = { "lambda6.test" }, } local route7 = bp.routes:insert { - hosts = { "lambda7.com" }, + hosts = { "lambda7.test" }, } local route8 = bp.routes:insert { - hosts = { "lambda8.com" }, + hosts = { "lambda8.test" }, } local route9 = bp.routes:insert { - hosts = { "lambda9.com" }, + hosts = { "lambda9.test" }, protocols = { "http", "https" }, service = null, } local route10 = bp.routes:insert { - hosts = { "lambda10.com" }, + hosts = { "lambda10.test" }, protocols = { "http", "https" }, service = null, } local route11 = bp.routes:insert { - hosts = { "lambda11.com" }, + hosts = { "lambda11.test" }, protocols = { "http", "https" }, service = null, } local route12 = bp.routes:insert { - hosts = { "lambda12.com" }, + hosts = { "lambda12.test" }, protocols = { "http", "https" }, service = null, } local route13 = bp.routes:insert { - hosts = { "lambda13.com" }, + hosts = { "lambda13.test" }, protocols = { "http", "https" }, service = null, } local route14 = bp.routes:insert { - hosts = { "lambda14.com" }, + hosts = { "lambda14.test" }, protocols = { "http", "https" }, service = null, } local route15 = bp.routes:insert { - hosts = { "lambda15.com" }, + hosts = { "lambda15.test" }, protocols = { "http", "https" }, service = null, } local route16 = bp.routes:insert { - hosts = { "lambda16.com" }, + hosts = { "lambda16.test" }, protocols = { "http", "https" }, service = null, } local route17 = bp.routes:insert { - hosts = { "lambda17.com" }, + hosts = { "lambda17.test" }, protocols = { "http", "https" }, service = null, } local route18 = bp.routes:insert { - hosts = { "lambda18.com" }, + hosts = { "lambda18.test" }, protocols = { "http", "https" }, service = null, } local route19 = bp.routes:insert { - hosts = { "lambda19.com" }, + hosts = { "lambda19.test" }, protocols = { "http", "https" }, service = null, } local route20 = bp.routes:insert { - hosts = { "lambda20.com" }, + hosts = { "lambda20.test" }, protocols = { "http", "https" }, service = null, } local route21 = bp.routes:insert { - hosts = { "lambda21.com" }, + hosts = { "lambda21.test" }, protocols = { "http", "https" }, service = null, } local route22 = bp.routes:insert { - hosts = { "lambda22.com" }, + hosts = { "lambda22.test" }, protocols = { "http", "https" }, service = null, } local route23 = bp.routes:insert { - hosts = { "lambda23.com" }, + hosts = { "lambda23.test" }, protocols = { "http", "https" }, service = null, } local route24 = bp.routes:insert { - hosts = { "lambda24.com" }, + hosts = { "lambda24.test" }, protocols = { "http", "https" }, service = null, } @@ -521,7 +521,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda.com" + ["Host"] = "lambda.test" } }) assert.res_status(200, res) @@ -536,7 +536,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda_ignore_service.com" + ["Host"] = "lambda_ignore_service.test" } }) assert.res_status(200, res) @@ -551,7 +551,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda.com", + ["Host"] = "lambda.test", ["Content-Type"] = "application/x-www-form-urlencoded" }, body = { @@ -571,7 +571,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda.com", + ["Host"] = "lambda.test", ["Content-Type"] = "application/json" }, body = { @@ -591,7 +591,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda.com", + ["Host"] = "lambda.test", ["Content-Type"] = "application/json" }, body = '[{}, []]' @@ -605,7 +605,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post?key1=from_querystring", headers = { - ["Host"] = "lambda.com", + ["Host"] = "lambda.test", ["Content-Type"] = "application/x-www-form-urlencoded" }, body = { @@ -624,7 +624,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post?key1=from_querystring", headers = { - ["Host"] = "lambda9.com", + ["Host"] = "lambda9.test", ["Content-Type"] = "application/xml", ["custom-header"] = "someheader" }, @@ -643,7 +643,7 @@ for _, strategy in helpers.each_strategy() do -- request_headers assert.equal("someheader", body.request_headers["custom-header"]) - assert.equal("lambda9.com", body.request_headers.host) + assert.equal("lambda9.test", body.request_headers.host) -- request_body assert.equal("", body.request_body) @@ -655,7 +655,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post?key1=from_querystring", headers = { - ["Host"] = "lambda10.com", + ["Host"] = "lambda10.test", ["Content-Type"] = "application/json", ["custom-header"] = "someheader" }, @@ -673,7 +673,7 @@ for _, strategy in helpers.each_strategy() do assert.is_nil(body.request_uri_args) -- request_headers - assert.equal("lambda10.com", body.request_headers.host) + assert.equal("lambda10.test", body.request_headers.host) assert.equal("someheader", body.request_headers["custom-header"]) -- request_body @@ -686,7 +686,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post?key1=from_querystring", headers = { - ["Host"] = "lambda9.com", + ["Host"] = "lambda9.test", ["Content-Type"] = "text/plain", ["custom-header"] = "someheader" }, @@ -705,7 +705,7 @@ for _, strategy in helpers.each_strategy() do -- request_headers assert.equal("someheader", body.request_headers["custom-header"]) - assert.equal("lambda9.com", body.request_headers.host) + assert.equal("lambda9.test", body.request_headers.host) -- request_body assert.equal("some text", body.request_body) @@ -718,7 +718,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post?key1=from_querystring", headers = { - ["Host"] = "lambda9.com", + ["Host"] = "lambda9.test", ["Content-Type"] = "application/octet-stream", ["custom-header"] = "someheader" }, @@ -736,7 +736,7 @@ for _, strategy in helpers.each_strategy() do assert.is_table(body.request_uri_args) -- request_headers - assert.equal("lambda9.com", body.request_headers.host) + assert.equal("lambda9.test", body.request_headers.host) assert.equal("someheader", body.request_headers["custom-header"]) -- request_body @@ -750,7 +750,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda2.com", + ["Host"] = "lambda2.test", ["Content-Type"] = "application/x-www-form-urlencoded" }, body = { @@ -768,7 +768,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda3.com", + ["Host"] = "lambda3.test", ["Content-Type"] = "application/x-www-form-urlencoded" }, body = { @@ -786,7 +786,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda4.com", + ["Host"] = "lambda4.test", } }) assert.res_status(500, res) @@ -797,7 +797,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda5.com" + ["Host"] = "lambda5.test" } }) assert.res_status(200, res) @@ -809,7 +809,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda6.com" + ["Host"] = "lambda6.test" } }) assert.res_status(202, res) @@ -821,7 +821,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda7.com" + ["Host"] = "lambda7.test" } }) assert.res_status(204, res) @@ -833,7 +833,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda8.com" + ["Host"] = "lambda8.test" } }) assert.res_status(412, res) @@ -845,7 +845,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda.com" + ["Host"] = "lambda.test" } }) @@ -859,7 +859,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda.com" + ["Host"] = "lambda.test" } }) @@ -871,7 +871,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1", headers = { - ["Host"] = "lambda15.com" + ["Host"] = "lambda15.test" } }) assert.res_status(500, res) @@ -896,7 +896,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda11.com", + ["Host"] = "lambda11.test", ["Content-Type"] = "application/json" }, body = { @@ -922,7 +922,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda11.com", + ["Host"] = "lambda11.test", ["Content-Type"] = "application/json", }, body = { @@ -951,7 +951,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda11.com", + ["Host"] = "lambda11.test", ["Content-Type"] = "application/json", }, body = { @@ -969,7 +969,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda11.com", + ["Host"] = "lambda11.test", ["Content-Type"] = "application/json", }, body = { @@ -987,7 +987,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda11.com", + ["Host"] = "lambda11.test", ["Content-Type"] = "application/json", }, body = { @@ -1005,7 +1005,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda11.com", + ["Host"] = "lambda11.test", ["Content-Type"] = "application/json", }, body = { @@ -1024,7 +1024,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda12.com", + ["Host"] = "lambda12.test", } }) @@ -1038,7 +1038,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/post", headers = { - ["Host"] = "lambda13.com", + ["Host"] = "lambda13.test", } }) @@ -1052,7 +1052,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda14.com" + ["Host"] = "lambda14.test" } }) assert.res_status(200, res) @@ -1067,7 +1067,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda16.com" + ["Host"] = "lambda16.test" } }) assert.res_status(200, res) @@ -1079,7 +1079,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda22.com" + ["Host"] = "lambda22.test" } }) assert.res_status(502, res) @@ -1091,7 +1091,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda23.com" + ["Host"] = "lambda23.test" } }) assert.res_status(200, res) @@ -1103,7 +1103,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda17.com" + ["Host"] = "lambda17.test" } }) assert.res_status(200, res) @@ -1117,7 +1117,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1", headers = { - ["Host"] = "lambda18.com" + ["Host"] = "lambda18.test" } })) assert.res_status(500, res) @@ -1128,7 +1128,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda.com" + ["Host"] = "lambda.test" } }) assert.res_status(200, res) @@ -1143,7 +1143,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda20.com", + ["Host"] = "lambda20.test", } })) @@ -1158,7 +1158,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?a=1&b=2", headers = { - ["Host"] = "lambda21.com" + ["Host"] = "lambda21.test" } })) @@ -1174,7 +1174,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get", headers = { - ["Host"] = "lambda24.com" + ["Host"] = "lambda24.test" } })) @@ -1211,7 +1211,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1", headers = { - ["Host"] = "lambda19.com" + ["Host"] = "lambda19.test" } })) assert.res_status(200, res) @@ -1238,7 +1238,7 @@ for _, strategy in helpers.each_strategy() do }, { "aws-lambda" }, { "random" }) local route1 = bp.routes:insert { - hosts = { "lambda-vault.com" }, + hosts = { "lambda-vault.test" }, } bp.plugins:insert { @@ -1284,7 +1284,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda-vault.com" + ["Host"] = "lambda-vault.test" } }) assert.res_status(200, res) @@ -1300,7 +1300,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/get?key1=some_value1&key2=some_value2&key3=some_value3", headers = { - ["Host"] = "lambda-vault.com" + ["Host"] = "lambda-vault.test" } }) assert.res_status(200, res) diff --git a/spec/03-plugins/29-acme/01-client_spec.lua b/spec/03-plugins/29-acme/01-client_spec.lua index e5ff149e15b5..0ab8ef14e1d7 100644 --- a/spec/03-plugins/29-acme/01-client_spec.lua +++ b/spec/03-plugins/29-acme/01-client_spec.lua @@ -259,7 +259,7 @@ for _, strategy in helpers.each_strategy() do describe("Plugin: acme (client.save) [#" .. strategy .. "]", function() local bp, db local cert, sni - local host = "test1.com" + local host = "test1.test" lazy_setup(function() bp, db = helpers.get_db_utils(strategy, { @@ -285,7 +285,7 @@ for _, strategy in helpers.each_strategy() do describe("creates new cert", function() local key, crt = new_cert_key_pair() local new_sni, new_cert, err - local new_host = "test2.com" + local new_host = "test2.test" it("returns no error", function() err = client._save_dao(new_host, key, crt) @@ -343,8 +343,8 @@ for _, strategy in ipairs({"off"}) do describe("Plugin: acme (client.renew) [#" .. strategy .. "]", function() local bp local cert - local host = "test1.com" - local host_not_expired = "test2.com" + local host = "test1.test" + local host_not_expired = "test2.test" -- make it due for renewal local key, crt = new_cert_key_pair(ngx.time() - 23333) -- make it not due for renewal diff --git a/spec/03-plugins/29-acme/05-redis_storage_spec.lua b/spec/03-plugins/29-acme/05-redis_storage_spec.lua index 861e7609c9a0..99e0b46e64f7 100644 --- a/spec/03-plugins/29-acme/05-redis_storage_spec.lua +++ b/spec/03-plugins/29-acme/05-redis_storage_spec.lua @@ -289,7 +289,7 @@ describe("Plugin: acme (storage.redis)", function() describe("Plugin: acme (handler.access) [#postgres]", function() local bp - local domain = "mydomain.com" + local domain = "mydomain.test" local dummy_id = "ZR02iVO6PFywzFLj6igWHd6fnK2R07C-97dkQKC7vJo" local namespace = "namespace1" local plugin diff --git a/spec/03-plugins/29-acme/06-hybrid_mode_spec.lua b/spec/03-plugins/29-acme/06-hybrid_mode_spec.lua index 05cab17810e3..8b645f99f28e 100644 --- a/spec/03-plugins/29-acme/06-hybrid_mode_spec.lua +++ b/spec/03-plugins/29-acme/06-hybrid_mode_spec.lua @@ -2,7 +2,7 @@ local helpers = require "spec.helpers" for _, strategy in helpers.each_strategy({"postgres"}) do describe("Plugin: acme (handler.access) worked with [#" .. strategy .. "]", function() - local domain = "mydomain.com" + local domain = "mydomain.test" lazy_setup(function() local bp = helpers.get_db_utils(strategy, { diff --git a/spec/03-plugins/30-session/01-access_spec.lua b/spec/03-plugins/30-session/01-access_spec.lua index f8b65ab715d2..a92d0a5ddf65 100644 --- a/spec/03-plugins/30-session/01-access_spec.lua +++ b/spec/03-plugins/30-session/01-access_spec.lua @@ -21,27 +21,27 @@ for _, strategy in helpers.each_strategy() do local route1 = bp.routes:insert { paths = {"/test1"}, - hosts = {"konghq.com"}, + hosts = {"konghq.test"}, } local route2 = bp.routes:insert { paths = {"/test2"}, - hosts = {"konghq.com"}, + hosts = {"konghq.test"}, } local route3 = bp.routes:insert { paths = {"/headers"}, - hosts = {"konghq.com"}, + hosts = {"konghq.test"}, } local route4 = bp.routes:insert { paths = {"/headers"}, - hosts = {"mockbin.org"}, + hosts = {"mockbin.test"}, } local route5 = bp.routes:insert { paths = {"/test5"}, - hosts = {"httpbin.org"}, + hosts = {"httpbin.test"}, } assert(bp.plugins:insert { @@ -188,7 +188,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/test1/status/200", headers = { - host = "konghq.com", + host = "konghq.test", apikey = "kong", }, }) @@ -214,7 +214,7 @@ for _, strategy in helpers.each_strategy() do local request = { method = "GET", path = "/test2/status/200", - headers = { host = "konghq.com", }, + headers = { host = "konghq.test", }, } -- make sure the anonymous consumer can't get in (request termination) @@ -253,7 +253,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/test5/status/200", headers = { - host = "httpbin.org", + host = "httpbin.test", apikey = "kong", }, }) @@ -283,7 +283,7 @@ for _, strategy in helpers.each_strategy() do local request = { method = "GET", path = "/headers", - headers = { host = "konghq.com", }, + headers = { host = "konghq.test", }, } -- make a request with a valid key, grab the cookie for later @@ -323,7 +323,7 @@ for _, strategy in helpers.each_strategy() do local request = { method = "GET", path = "/headers", - headers = { host = "mockbin.org", }, + headers = { host = "mockbin.test", }, } -- make a request with a valid key, grab the cookie for later diff --git a/spec/03-plugins/30-session/02-kong_storage_adapter_spec.lua b/spec/03-plugins/30-session/02-kong_storage_adapter_spec.lua index 20b9bf93d894..509f2556cd75 100644 --- a/spec/03-plugins/30-session/02-kong_storage_adapter_spec.lua +++ b/spec/03-plugins/30-session/02-kong_storage_adapter_spec.lua @@ -18,17 +18,17 @@ for _, strategy in helpers.each_strategy() do local route1 = bp.routes:insert { paths = {"/test1"}, - hosts = {"konghq.com"} + hosts = {"konghq.test"} } local route2 = bp.routes:insert { paths = {"/test2"}, - hosts = {"konghq.com"} + hosts = {"konghq.test"} } local route3 = bp.routes:insert { paths = {"/headers"}, - hosts = {"konghq.com"}, + hosts = {"konghq.test"}, } assert(bp.plugins:insert { @@ -145,7 +145,7 @@ for _, strategy in helpers.each_strategy() do local request = { method = "GET", path = "/test1/status/200", - headers = { host = "konghq.com", }, + headers = { host = "konghq.test", }, } -- make sure the anonymous consumer can't get in (request termination) @@ -188,7 +188,7 @@ for _, strategy in helpers.each_strategy() do local request = { method = "GET", path = "/test2/status/200", - headers = { host = "konghq.com", }, + headers = { host = "konghq.test", }, } local function send_requests(request, number, step) @@ -245,7 +245,7 @@ for _, strategy in helpers.each_strategy() do local request = { method = "GET", path = "/test2/status/200", - headers = { host = "konghq.com", }, + headers = { host = "konghq.test", }, } -- make sure the anonymous consumer can't get in (request termination) @@ -284,7 +284,7 @@ for _, strategy in helpers.each_strategy() do path = "/test2/status/200?session_logout=true", headers = { cookie = cookie, - host = "konghq.com", + host = "konghq.test", } })) assert.response(res).has.status(200) @@ -302,7 +302,7 @@ for _, strategy in helpers.each_strategy() do local request = { method = "GET", path = "/headers", - headers = { host = "konghq.com", }, + headers = { host = "konghq.test", }, } client = helpers.proxy_ssl_client() diff --git a/spec/03-plugins/31-proxy-cache/02-access_spec.lua b/spec/03-plugins/31-proxy-cache/02-access_spec.lua index 9498929906b9..aa8b350773d7 100644 --- a/spec/03-plugins/31-proxy-cache/02-access_spec.lua +++ b/spec/03-plugins/31-proxy-cache/02-access_spec.lua @@ -37,70 +37,70 @@ do strategy:flush(true) local route1 = assert(bp.routes:insert { - hosts = { "route-1.com" }, + hosts = { "route-1.test" }, }) local route2 = assert(bp.routes:insert { - hosts = { "route-2.com" }, + hosts = { "route-2.test" }, }) assert(bp.routes:insert { - hosts = { "route-3.com" }, + hosts = { "route-3.test" }, }) assert(bp.routes:insert { - hosts = { "route-4.com" }, + hosts = { "route-4.test" }, }) local route5 = assert(bp.routes:insert { - hosts = { "route-5.com" }, + hosts = { "route-5.test" }, }) local route6 = assert(bp.routes:insert { - hosts = { "route-6.com" }, + hosts = { "route-6.test" }, }) local route7 = assert(bp.routes:insert { - hosts = { "route-7.com" }, + hosts = { "route-7.test" }, }) local route8 = assert(bp.routes:insert { - hosts = { "route-8.com" }, + hosts = { "route-8.test" }, }) local route9 = assert(bp.routes:insert { - hosts = { "route-9.com" }, + hosts = { "route-9.test" }, }) local route10 = assert(bp.routes:insert { - hosts = { "route-10.com" }, + hosts = { "route-10.test" }, }) local route11 = assert(bp.routes:insert { - hosts = { "route-11.com" }, + hosts = { "route-11.test" }, }) local route12 = assert(bp.routes:insert { - hosts = { "route-12.com" }, + hosts = { "route-12.test" }, }) local route13 = assert(bp.routes:insert { - hosts = { "route-13.com" }, + hosts = { "route-13.test" }, }) local route14 = assert(bp.routes:insert { - hosts = { "route-14.com" }, + hosts = { "route-14.test" }, }) local route15 = assert(bp.routes:insert { - hosts = { "route-15.com" }, + hosts = { "route-15.test" }, }) local route16 = assert(bp.routes:insert { - hosts = { "route-16.com" }, + hosts = { "route-16.test" }, }) local route17 = assert(bp.routes:insert { - hosts = { "route-17.com" }, + hosts = { "route-17.test" }, }) local route18 = assert(bp.routes:insert { - hosts = { "route-18.com" }, + hosts = { "route-18.test" }, }) local route19 = assert(bp.routes:insert { - hosts = { "route-19.com" }, + hosts = { "route-19.test" }, }) local route20 = assert(bp.routes:insert { - hosts = { "route-20.com" }, + hosts = { "route-20.test" }, }) local route21 = assert(bp.routes:insert { - hosts = { "route-21.com" }, + hosts = { "route-21.test" }, }) local route22 = assert(bp.routes:insert({ - hosts = { "route-22.com" }, + hosts = { "route-22.test" }, })) local consumer1 = assert(bp.consumers:insert { @@ -368,7 +368,7 @@ do end) it("caches a simple request", function() - local res = assert(get(client, "route-1.com")) + local res = assert(get(client, "route-1.test")) local body1 = assert.res_status(200, res) assert.same("Miss", res.headers["X-Cache-Status"]) @@ -383,7 +383,7 @@ do -- return strategy:fetch(cache_key1) ~= nil --end, TIMEOUT) - local res = assert(get(client, "route-1.com")) + local res = assert(get(client, "route-1.test")) local body2 = assert.res_status(200, res) assert.same("Hit", res.headers["X-Cache-Status"]) @@ -397,17 +397,17 @@ do --cache_key = cache_key1 end) it("No X-Cache* neither age headers on the response without debug header in the query", function() - local res = assert(get(client, "route-22.com")) + local res = assert(get(client, "route-22.test")) assert.res_status(200, res) assert.is_nil(res.headers["X-Cache-Status"]) - res = assert(get(client, "route-22.com")) + res = assert(get(client, "route-22.test")) assert.res_status(200, res) assert.is_nil(res.headers["X-Cache-Status"]) assert.is_nil(res.headers["X-Cache-Key"]) assert.is_nil(res.headers["Age"]) res = assert(client:get("/get", { headers = { - Host = "route-22.com", + Host = "route-22.test", ["kong-debug"] = 1, }, })) @@ -417,7 +417,7 @@ do end) it("respects cache ttl", function() - local res = assert(get(client, "route-6.com")) + local res = assert(get(client, "route-6.test")) --local cache_key2 = res.headers["X-Cache-Key"] assert.res_status(200, res) @@ -428,7 +428,7 @@ do -- return strategy:fetch(cache_key2) ~= nil --end, TIMEOUT) - res = assert(get(client, "route-6.com")) + res = assert(get(client, "route-6.test")) assert.res_status(200, res) assert.same("Hit", res.headers["X-Cache-Status"]) @@ -446,7 +446,7 @@ do --end, TIMEOUT) -- and go through the cycle again - res = assert(get(client, "route-6.com")) + res = assert(get(client, "route-6.test")) assert.res_status(200, res) assert.same("Miss", res.headers["X-Cache-Status"]) @@ -457,13 +457,13 @@ do -- return strategy:fetch(cache_key) ~= nil --end, TIMEOUT) - res = assert(get(client, "route-6.com")) + res = assert(get(client, "route-6.test")) assert.res_status(200, res) assert.same("Hit", res.headers["X-Cache-Status"]) -- examine the behavior of keeping cache in memory for longer than ttl - res = assert(get(client, "route-9.com")) + res = assert(get(client, "route-9.test")) assert.res_status(200, res) assert.same("Miss", res.headers["X-Cache-Status"]) @@ -474,7 +474,7 @@ do -- return strategy:fetch(cache_key) ~= nil --end, TIMEOUT) - res = assert(get(client, "route-9.com")) + res = assert(get(client, "route-9.test")) assert.res_status(200, res) assert.same("Hit", res.headers["X-Cache-Status"]) @@ -493,12 +493,12 @@ do --end, TIMEOUT) -- and go through the cycle again - res = assert(get(client, "route-9.com")) + res = assert(get(client, "route-9.test")) assert.res_status(200, res) assert.same("Refresh", res.headers["X-Cache-Status"]) - res = assert(get(client, "route-9.com")) + res = assert(get(client, "route-9.test")) assert.res_status(200, res) assert.same("Hit", res.headers["X-Cache-Status"]) @@ -507,7 +507,7 @@ do it("respects cache ttl via cache control", function() local res = assert(client:get("/cache/2", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -522,7 +522,7 @@ do res = assert(client:get("/cache/2", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -542,7 +542,7 @@ do -- and go through the cycle again res = assert(client:get("/cache/2", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -556,7 +556,7 @@ do res = assert(client:get("/cache/2", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -566,7 +566,7 @@ do -- assert that max-age=0 never results in caching res = assert(client:get("/cache/0", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -575,7 +575,7 @@ do res = assert(client:get("/cache/0", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -588,7 +588,7 @@ do -- necessary to set it manually using /response-headers instead local res = assert(client:get("/response-headers?Cache-Control=max-age%3D604800", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -599,7 +599,7 @@ do it("Cache-Control contains s-maxage only", function() local res = assert(client:get("/response-headers?Cache-Control=s-maxage%3D604800", { headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -612,7 +612,7 @@ do local res = assert(client:get("/response-headers", { query = "Expires=" .. httpdate, headers = { - host = "route-7.com", + host = "route-7.test", } })) @@ -625,7 +625,7 @@ do -- bypass via unsatisfied min-fresh local res = assert(client:get("/cache/2", { headers = { - host = "route-7.com", + host = "route-7.test", ["Cache-Control"] = "min-fresh=30", } })) @@ -637,7 +637,7 @@ do it("max-age", function() local res = assert(client:get("/cache/10", { headers = { - host = "route-7.com", + host = "route-7.test", ["Cache-Control"] = "max-age=2", } })) @@ -653,7 +653,7 @@ do res = assert(client:get("/cache/10", { headers = { - host = "route-7.com", + host = "route-7.test", ["Cache-Control"] = "max-age=2", } })) @@ -675,7 +675,7 @@ do res = assert(client:get("/cache/10", { headers = { - host = "route-7.com", + host = "route-7.test", ["Cache-Control"] = "max-age=2", } })) @@ -687,7 +687,7 @@ do it("max-stale", function() local res = assert(client:get("/cache/2", { headers = { - host = "route-8.com", + host = "route-8.test", } })) @@ -702,7 +702,7 @@ do res = assert(client:get("/cache/2", { headers = { - host = "route-8.com", + host = "route-8.test", } })) @@ -722,7 +722,7 @@ do res = assert(client:get("/cache/2", { headers = { - host = "route-8.com", + host = "route-8.test", ["Cache-Control"] = "max-stale=1", } })) @@ -734,7 +734,7 @@ do it("only-if-cached", function() local res = assert(client:get("/get?not=here", { headers = { - host = "route-8.com", + host = "route-8.test", ["Cache-Control"] = "only-if-cached", } })) @@ -746,7 +746,7 @@ do it("caches a streaming request", function() local res = assert(client:get("/stream/3", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -762,7 +762,7 @@ do res = assert(client:get("/stream/3", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -774,7 +774,7 @@ do it("uses a separate cache key for the same consumer between routes", function() local res = assert(client:get("/get", { headers = { - host = "route-13.com", + host = "route-13.test", apikey = "bob", } })) @@ -783,7 +783,7 @@ do local res = assert(client:get("/get", { headers = { - host = "route-14.com", + host = "route-14.test", apikey = "bob", } })) @@ -796,7 +796,7 @@ do it("uses a separate cache key for the same consumer between routes/services", function() local res = assert(client:get("/get", { headers = { - host = "route-15.com", + host = "route-15.test", apikey = "bob", } })) @@ -805,7 +805,7 @@ do local res = assert(client:get("/get", { headers = { - host = "route-16.com", + host = "route-16.test", apikey = "bob", } })) @@ -816,7 +816,7 @@ do end) it("uses an separate cache key between routes-specific and a global plugin", function() - local res = assert(get(client, "route-3.com")) + local res = assert(get(client, "route-3.test")) assert.res_status(200, res) assert.same("Miss", res.headers["X-Cache-Status"]) @@ -825,7 +825,7 @@ do assert.matches("^[%w%d]+$", cache_key1) assert.equals(64, #cache_key1) - res = assert(get(client, "route-4.com")) + res = assert(get(client, "route-4.test")) assert.res_status(200, res) @@ -835,7 +835,7 @@ do end) it("differentiates caches between instances", function() - local res = assert(get(client, "route-2.com")) + local res = assert(get(client, "route-2.test")) assert.res_status(200, res) assert.same("Miss", res.headers["X-Cache-Status"]) @@ -849,7 +849,7 @@ do -- return strategy:fetch(cache_key1) ~= nil --end, TIMEOUT) - res = assert(get(client, "route-2.com")) + res = assert(get(client, "route-2.test")) local cache_key2 = res.headers["X-Cache-Key"] assert.res_status(200, res) @@ -860,7 +860,7 @@ do it("uses request params as part of the cache key", function() local res = assert(client:get("/get?a=b&b=c", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -869,7 +869,7 @@ do res = assert(client:get("/get?a=c", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -879,7 +879,7 @@ do res = assert(client:get("/get?b=c&a=b", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -888,7 +888,7 @@ do res = assert(client:get("/get?a&b", { headers = { - host = "route-1.com", + host = "route-1.test", } })) assert.res_status(200, res) @@ -896,7 +896,7 @@ do res = assert(client:get("/get?a&b", { headers = { - host = "route-1.com", + host = "route-1.test", } })) assert.res_status(200, res) @@ -906,7 +906,7 @@ do it("can focus only in a subset of the query arguments", function() local res = assert(client:get("/get?foo=b&b=c", { headers = { - host = "route-12.com", + host = "route-12.test", } })) @@ -922,7 +922,7 @@ do res = assert(client:get("/get?b=d&foo=b", { headers = { - host = "route-12.com", + host = "route-12.test", } })) @@ -934,7 +934,7 @@ do it("uses headers if instructed to do so", function() local res = assert(client:get("/get", { headers = { - host = "route-11.com", + host = "route-11.test", foo = "bar", } })) @@ -949,7 +949,7 @@ do res = assert(client:get("/get", { headers = { - host = "route-11.com", + host = "route-11.test", foo = "bar", } })) @@ -958,7 +958,7 @@ do res = assert(client:get("/get", { headers = { - host = "route-11.com", + host = "route-11.test", foo = "baz", } })) @@ -968,7 +968,7 @@ do describe("handles authenticated routes", function() it("by ignoring cache if the request is unauthenticated", function() - local res = assert(get(client, "route-5.com")) + local res = assert(get(client, "route-5.test")) assert.res_status(401, res) assert.is_nil(res.headers["X-Cache-Status"]) @@ -977,7 +977,7 @@ do it("by maintaining a separate cache per consumer", function() local res = assert(client:get("/get", { headers = { - host = "route-5.com", + host = "route-5.test", apikey = "bob", } })) @@ -987,7 +987,7 @@ do res = assert(client:get("/get", { headers = { - host = "route-5.com", + host = "route-5.test", apikey = "bob", } })) @@ -997,7 +997,7 @@ do local res = assert(client:get("/get", { headers = { - host = "route-5.com", + host = "route-5.test", apikey = "alice", } })) @@ -1007,7 +1007,7 @@ do res = assert(client:get("/get", { headers = { - host = "route-5.com", + host = "route-5.test", apikey = "alice", } })) @@ -1022,7 +1022,7 @@ do it("request method", function() local res = assert(client:post("/post", { headers = { - host = "route-1.com", + host = "route-1.test", ["Content-Type"] = "application/json", }, { @@ -1039,7 +1039,7 @@ do it("response status", function() local res = assert(client:get("/status/418", { headers = { - host = "route-1.com", + host = "route-1.test", }, })) @@ -1050,7 +1050,7 @@ do it("response content type", function() local res = assert(client:get("/xml", { headers = { - host = "route-1.com", + host = "route-1.test", }, })) @@ -1063,7 +1063,7 @@ do it("request methods", function() local res = assert(client:post("/post", { headers = { - host = "route-10.com", + host = "route-10.test", ["Content-Type"] = "application/json", }, { @@ -1082,7 +1082,7 @@ do res = assert(client:post("/post", { headers = { - host = "route-10.com", + host = "route-10.test", ["Content-Type"] = "application/json", }, { @@ -1097,7 +1097,7 @@ do it("response status", function() local res = assert(client:get("/status/417", { headers = { - host = "route-10.com", + host = "route-10.test", }, })) @@ -1106,7 +1106,7 @@ do res = assert(client:get("/status/417", { headers = { - host = "route-10.com", + host = "route-10.test", }, })) @@ -1120,7 +1120,7 @@ do it("X-Kong-Proxy-Latency", function() local res = assert(client:get("/get?show-me=proxy-latency", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -1130,7 +1130,7 @@ do res = assert(client:get("/get?show-me=proxy-latency", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -1142,7 +1142,7 @@ do it("X-Kong-Upstream-Latency", function() local res = assert(client:get("/get?show-me=upstream-latency", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -1158,7 +1158,7 @@ do res = assert(client:get("/get?show-me=upstream-latency", { headers = { - host = "route-1.com", + host = "route-1.test", } })) @@ -1174,7 +1174,7 @@ do method = "GET", path = "/xml", headers = { - host = "route-17.com", + host = "route-17.test", }, } @@ -1194,7 +1194,7 @@ do method = "GET", path = "/xml", headers = { - host = "route-18.com", + host = "route-18.test", }, }) @@ -1209,7 +1209,7 @@ do method = "GET", path = "/response-headers?Content-Type=application/xml;", headers = { - host = "route-18.com", + host = "route-18.test", }, }) @@ -1223,7 +1223,7 @@ do method = "GET", path = "/xml", headers = { - host = "route-19.com", + host = "route-19.test", }, } @@ -1239,7 +1239,7 @@ do method = "GET", path = "/ignore-case/kong", headers = { - host = "route-20.com", + host = "route-20.test", }, }) @@ -1254,7 +1254,7 @@ do method = "GET", path = "/ignore-case/KONG", headers = { - host = "route-20.com", + host = "route-20.test", }, } @@ -1271,7 +1271,7 @@ do method = "GET", path = "/acknowledge-case/kong", headers = { - host = "route-21.com", + host = "route-21.test", }, }) @@ -1287,7 +1287,7 @@ do method = "GET", path = "/acknowledge-case/KONG", headers = { - host = "route-21.com", + host = "route-21.test", }, }) diff --git a/spec/03-plugins/31-proxy-cache/03-api_spec.lua b/spec/03-plugins/31-proxy-cache/03-api_spec.lua index ddc6200fc1de..81191c8558d6 100644 --- a/spec/03-plugins/31-proxy-cache/03-api_spec.lua +++ b/spec/03-plugins/31-proxy-cache/03-api_spec.lua @@ -10,7 +10,7 @@ describe("Plugin: proxy-cache", function() bp = helpers.get_db_utils(nil, nil, {"proxy-cache"}) route1 = assert(bp.routes:insert { - hosts = { "route-1.com" }, + hosts = { "route-1.test" }, }) plugin1 = assert(bp.plugins:insert { name = "proxy-cache", @@ -32,7 +32,7 @@ describe("Plugin: proxy-cache", function() }) local route2 = assert(bp.routes:insert { - hosts = { "route-2.com" }, + hosts = { "route-2.test" }, }) assert(bp.plugins:insert { @@ -205,7 +205,7 @@ describe("Plugin: proxy-cache", function() it("delete a cache entry", function() local res = assert(proxy_client:get("/get", { headers = { - host = "route-1.com", + host = "route-1.test", ["kong-debug"] = 1, } })) @@ -221,7 +221,7 @@ describe("Plugin: proxy-cache", function() res = assert(proxy_client:get("/get", { headers = { - host = "route-1.com", + host = "route-1.test", ["kong-debug"] = 1, } })) @@ -237,7 +237,7 @@ describe("Plugin: proxy-cache", function() local res = assert(proxy_client:get("/get", { headers = { - host = "route-1.com", + host = "route-1.test", ["kong-debug"] = 1, } })) @@ -251,7 +251,7 @@ describe("Plugin: proxy-cache", function() local res = assert(proxy_client:get("/get", { headers = { - host = "route-1.com", + host = "route-1.test", ["kong-debug"] = 1, } })) @@ -263,7 +263,7 @@ describe("Plugin: proxy-cache", function() -- make a `Hit` request to `route-1` local res = assert(proxy_client:get("/get", { headers = { - host = "route-1.com", + host = "route-1.test", ["kong-debug"] = 1, } })) @@ -273,7 +273,7 @@ describe("Plugin: proxy-cache", function() -- make a `Miss` request to `route-2` local res = assert(proxy_client:get("/get", { headers = { - host = "route-2.com", + host = "route-2.test", ["kong-debug"] = 1, } })) @@ -289,7 +289,7 @@ describe("Plugin: proxy-cache", function() -- make a `Hit` request to `route-1` res = assert(proxy_client:get("/get", { headers = { - host = "route-2.com", + host = "route-2.test", ["kong-debug"] = 1, } })) @@ -305,7 +305,7 @@ describe("Plugin: proxy-cache", function() local res = assert(proxy_client:get("/get", { headers = { - host = "route-1.com", + host = "route-1.test", ["kong-debug"] = 1, } })) @@ -315,7 +315,7 @@ describe("Plugin: proxy-cache", function() local res = assert(proxy_client:get("/get", { headers = { - host = "route-2.com", + host = "route-2.test", ["kong-debug"] = 1, } })) @@ -357,7 +357,7 @@ describe("Plugin: proxy-cache", function() -- add request to cache local res = assert(proxy_client:get("/get", { headers = { - host = "route-1.com", + host = "route-1.test", ["kong-debug"] = 1, } })) diff --git a/spec/03-plugins/31-proxy-cache/04-invalidations_spec.lua b/spec/03-plugins/31-proxy-cache/04-invalidations_spec.lua index fad2a933c38b..e21abd9cd4ed 100644 --- a/spec/03-plugins/31-proxy-cache/04-invalidations_spec.lua +++ b/spec/03-plugins/31-proxy-cache/04-invalidations_spec.lua @@ -30,11 +30,11 @@ describe("proxy-cache invalidations via: " .. strategy, function() bp = helpers.get_db_utils(strategy, nil, {"proxy-cache"}) route1 = assert(bp.routes:insert { - hosts = { "route-1.com" }, + hosts = { "route-1.test" }, }) route2 = assert(bp.routes:insert { - hosts = { "route-2.com" }, + hosts = { "route-2.test" }, }) plugin1 = assert(bp.plugins:insert { @@ -121,38 +121,38 @@ describe("proxy-cache invalidations via: " .. strategy, function() setup(function() -- prime cache entries on both instances - local res_1 = get(client_1, "route-1.com") + local res_1 = get(client_1, "route-1.test") assert.res_status(200, res_1) assert.same("Miss", res_1.headers["X-Cache-Status"]) cache_key = res_1.headers["X-Cache-Key"] - local res_2 = get(client_2, "route-1.com") + local res_2 = get(client_2, "route-1.test") assert.res_status(200, res_2) assert.same("Miss", res_2.headers["X-Cache-Status"]) assert.same(cache_key, res_2.headers["X-Cache-Key"]) - res_1 = get(client_1, "route-2.com") + res_1 = get(client_1, "route-2.test") assert.res_status(200, res_1) assert.same("Miss", res_1.headers["X-Cache-Status"]) cache_key2 = res_1.headers["X-Cache-Key"] assert.not_same(cache_key, cache_key2) - local res_2 = get(client_2, "route-2.com") + local res_2 = get(client_2, "route-2.test") assert.res_status(200, res_2) assert.same("Miss", res_2.headers["X-Cache-Status"]) end) it("propagates purges via cluster events mechanism", function() - local res_1 = get(client_1, "route-1.com") + local res_1 = get(client_1, "route-1.test") assert.res_status(200, res_1) assert.same("Hit", res_1.headers["X-Cache-Status"]) - local res_2 = get(client_2, "route-1.com") + local res_2 = get(client_2, "route-1.test") assert.res_status(200, res_2) assert.same("Hit", res_2.headers["X-Cache-Status"]) @@ -171,12 +171,12 @@ describe("proxy-cache invalidations via: " .. strategy, function() end, 10) -- refresh and purge with our second endpoint - res_1 = get(client_1, "route-1.com") + res_1 = get(client_1, "route-1.test") assert.res_status(200, res_1) assert.same("Miss", res_1.headers["X-Cache-Status"]) - res_2 = get(client_2, "route-1.com") + res_2 = get(client_2, "route-1.test") assert.res_status(200, res_2) assert.same("Miss", res_2.headers["X-Cache-Status"]) diff --git a/spec/03-plugins/33-serverless-functions/02-access_spec.lua b/spec/03-plugins/33-serverless-functions/02-access_spec.lua index 6c45606bd0c8..a4c382071c0a 100644 --- a/spec/03-plugins/33-serverless-functions/02-access_spec.lua +++ b/spec/03-plugins/33-serverless-functions/02-access_spec.lua @@ -127,67 +127,67 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do local route1 = bp.routes:insert { service = { id = service.id }, - hosts = { "one." .. plugin_name .. ".com" }, + hosts = { "one." .. plugin_name .. ".test" }, } local route2 = bp.routes:insert { service = { id = service.id }, - hosts = { "two." .. plugin_name .. ".com" }, + hosts = { "two." .. plugin_name .. ".test" }, } local route3 = bp.routes:insert { service = { id = service.id }, - hosts = { "three." .. plugin_name .. ".com" }, + hosts = { "three." .. plugin_name .. ".test" }, } local route4 = bp.routes:insert { service = { id = service.id }, - hosts = { "four." .. plugin_name .. ".com" }, + hosts = { "four." .. plugin_name .. ".test" }, } local route6 = bp.routes:insert { service = { id = service.id }, - hosts = { "six." .. plugin_name .. ".com" }, + hosts = { "six." .. plugin_name .. ".test" }, } local route7 = bp.routes:insert { service = { id = service.id }, - hosts = { "seven." .. plugin_name .. ".com" }, + hosts = { "seven." .. plugin_name .. ".test" }, } local route8 = bp.routes:insert { service = { id = service.id }, - hosts = { "eight." .. plugin_name .. ".com" }, + hosts = { "eight." .. plugin_name .. ".test" }, } local route9 = bp.routes:insert { service = { id = service.id }, - hosts = { "nine." .. plugin_name .. ".com" }, + hosts = { "nine." .. plugin_name .. ".test" }, } local route10 = bp.routes:insert { service = { id = service.id }, - hosts = { "ten." .. plugin_name .. ".com" }, + hosts = { "ten." .. plugin_name .. ".test" }, } local route11 = bp.routes:insert { service = { id = service.id }, - hosts = { "eleven." .. plugin_name .. ".com" }, + hosts = { "eleven." .. plugin_name .. ".test" }, } local route12 = bp.routes:insert { service = { id = service.id }, - hosts = { "twelve." .. plugin_name .. ".com" }, + hosts = { "twelve." .. plugin_name .. ".test" }, } local route13 = bp.routes:insert { service = { id = service.id }, - hosts = { "thirteen." .. plugin_name .. ".com" }, + hosts = { "thirteen." .. plugin_name .. ".test" }, } local route14 = bp.routes:insert { service = { id = service.id }, - hosts = { "fourteen." .. plugin_name .. ".com" }, + hosts = { "fourteen." .. plugin_name .. ".test" }, } bp.plugins:insert { @@ -296,7 +296,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "one." .. plugin_name .. ".com" + ["Host"] = "one." .. plugin_name .. ".test" } }) @@ -310,7 +310,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "six." .. plugin_name .. ".com" + ["Host"] = "six." .. plugin_name .. ".test" } }) @@ -327,7 +327,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "two." .. plugin_name .. ".com" + ["Host"] = "two." .. plugin_name .. ".test" } }) local body = assert.res_status(404, res) @@ -339,7 +339,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "three." .. plugin_name .. ".com" + ["Host"] = "three." .. plugin_name .. ".test" } }) local body = assert.res_status(406, res) @@ -353,7 +353,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "four." .. plugin_name .. ".com" + ["Host"] = "four." .. plugin_name .. ".test" } }) local body = assert.res_status(400, res) @@ -365,7 +365,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "nine." .. plugin_name .. ".com" + ["Host"] = "nine." .. plugin_name .. ".test" } }) local body = assert.res_status(500, res) @@ -382,7 +382,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "POST", path = "/status/200", headers = { - ["Host"] = "seven." .. plugin_name .. ".com", + ["Host"] = "seven." .. plugin_name .. ".test", ["Content-Length"] = #tostring(count), }, body = count, @@ -398,7 +398,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "POST", path = "/status/200", headers = { - ["Host"] = "seven." .. plugin_name .. ".com", + ["Host"] = "seven." .. plugin_name .. ".test", ["Content-Length"] = #tostring(count), }, body = count, @@ -415,7 +415,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "POST", path = "/status/200", headers = { - ["Host"] = "eight." .. plugin_name .. ".com", + ["Host"] = "eight." .. plugin_name .. ".test", ["Content-Length"] = #tostring(count), }, body = count, @@ -430,7 +430,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "POST", path = "/status/200", headers = { - ["Host"] = "eight." .. plugin_name .. ".com", + ["Host"] = "eight." .. plugin_name .. ".test", ["Content-Length"] = #tostring(count), }, body = count, @@ -448,7 +448,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "eleven." .. plugin_name .. ".com", + ["Host"] = "eleven." .. plugin_name .. ".test", }, }) local body = assert.res_status(200, res) @@ -461,7 +461,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "twelve." .. plugin_name .. ".com", + ["Host"] = "twelve." .. plugin_name .. ".test", }, }) local body = assert.res_status(200, res) @@ -474,7 +474,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "thirteen." .. plugin_name .. ".com", + ["Host"] = "thirteen." .. plugin_name .. ".test", }, }) local body = assert.res_status(200, res) @@ -487,7 +487,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "fourteen." .. plugin_name .. ".com", + ["Host"] = "fourteen." .. plugin_name .. ".test", }, }) local body = assert.res_status(200, res) @@ -500,7 +500,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do it("does not crash even when query is cleared, #9246", function() local res = client:get("/status/200?a=b", { headers = { - ["Host"] = "ten." .. plugin_name .. ".com" + ["Host"] = "ten." .. plugin_name .. ".test" } }) local body = assert.res_status(200, res) diff --git a/spec/03-plugins/33-serverless-functions/04-phases_spec.lua b/spec/03-plugins/33-serverless-functions/04-phases_spec.lua index cc957b44bd7c..1c2610017444 100644 --- a/spec/03-plugins/33-serverless-functions/04-phases_spec.lua +++ b/spec/03-plugins/33-serverless-functions/04-phases_spec.lua @@ -34,7 +34,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do bp.routes:insert { service = { id = service.id }, - hosts = { "one." .. plugin_name .. ".com" }, + hosts = { "one." .. plugin_name .. ".test" }, } local config = {} @@ -72,7 +72,7 @@ for _, plugin_name in ipairs({ "pre-function", "post-function" }) do method = "GET", path = "/status/200", headers = { - ["Host"] = "one." .. plugin_name .. ".com" + ["Host"] = "one." .. plugin_name .. ".test" } }) assert.response(res).has.status(200) diff --git a/spec/03-plugins/35-azure-functions/01-access_spec.lua b/spec/03-plugins/35-azure-functions/01-access_spec.lua index 7208cb9985bf..ca5125fe1faf 100644 --- a/spec/03-plugins/35-azure-functions/01-access_spec.lua +++ b/spec/03-plugins/35-azure-functions/01-access_spec.lua @@ -54,7 +54,7 @@ for _, strategy in helpers.each_strategy() do }) local route2 = db.routes:insert { - hosts = { "azure2.com" }, + hosts = { "azure2.test" }, protocols = { "http", "https" }, } @@ -86,7 +86,7 @@ for _, strategy in helpers.each_strategy() do config = { https = false, appname = "azure", - hostdomain = "example.com", + hostdomain = "example.test", routeprefix = "request", functionname = "test-func-name", apikey = "anything_but_an_API_key", @@ -99,12 +99,12 @@ for _, strategy in helpers.each_strategy() do } local route3 = db.routes:insert { - hosts = { "azure3.com" }, + hosts = { "azure3.test" }, protocols = { "http", "https" }, service = db.services:insert( { name = "azure3", - host = "azure.example.com", -- just mock service, it will not be requested + host = "azure.example.test", -- just mock service, it will not be requested port = 80, path = "/request", } @@ -120,7 +120,7 @@ for _, strategy in helpers.each_strategy() do config = { https = false, appname = "azure", - hostdomain = "example.com", + hostdomain = "example.test", routeprefix = "request", functionname = "test-func-name", apikey = "anything_but_an_API_key", @@ -129,7 +129,7 @@ for _, strategy in helpers.each_strategy() do } fixtures.dns_mock:A({ - name = "azure.example.com", + name = "azure.example.test", address = "127.0.0.1", }) @@ -162,7 +162,7 @@ for _, strategy in helpers.each_strategy() do path = "/", query = { hello = "world" }, headers = { - ["Host"] = "azure2.com" + ["Host"] = "azure2.test" } }) @@ -179,7 +179,7 @@ for _, strategy in helpers.each_strategy() do body = body, query = { hello = "world" }, headers = { - ["Host"] = "azure2.com" + ["Host"] = "azure2.test" } }) @@ -193,7 +193,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/and/then/some", headers = { - ["Host"] = "azure2.com" + ["Host"] = "azure2.test" } }) @@ -207,7 +207,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/", headers = { - ["Host"] = "azure2.com" + ["Host"] = "azure2.test" } }) @@ -221,7 +221,7 @@ for _, strategy in helpers.each_strategy() do method = "GET", path = "/and/then/some", headers = { - ["Host"] = "azure2.com", + ["Host"] = "azure2.test", ["Just-A-Header"] = "just a value", } }) @@ -236,7 +236,7 @@ for _, strategy in helpers.each_strategy() do method = "POST", path = "/", headers = { - ["Host"] = "azure2.com" + ["Host"] = "azure2.test" } }) @@ -253,7 +253,7 @@ for _, strategy in helpers.each_strategy() do path = "/", query = { hello = "world" }, headers = { - ["Host"] = "azure2.com" + ["Host"] = "azure2.test" } }) @@ -266,7 +266,7 @@ for _, strategy in helpers.each_strategy() do path = "/", query = { hello = "world" }, headers = { - ["Host"] = "azure2.com" + ["Host"] = "azure2.test" } }) @@ -279,7 +279,7 @@ for _, strategy in helpers.each_strategy() do path = "/", query = { hello = "world" }, headers = { - ["Host"] = "azure3.com" + ["Host"] = "azure3.test" } }) diff --git a/spec/helpers.lua b/spec/helpers.lua index e6100913b09b..3bf41149dfa8 100644 --- a/spec/helpers.lua +++ b/spec/helpers.lua @@ -1950,9 +1950,9 @@ local function wait_for_all_config_update(opts) local upstream_id, target_id, service_id, route_id local stream_upstream_id, stream_target_id, stream_service_id, stream_route_id local consumer_id, rl_plugin_id, key_auth_plugin_id, credential_id - local upstream_name = "really.really.really.really.really.really.really.mocking.upstream.com" + local upstream_name = "really.really.really.really.really.really.really.mocking.upstream.test" local service_name = "really-really-really-really-really-really-really-mocking-service" - local stream_upstream_name = "stream-really.really.really.really.really.really.really.mocking.upstream.com" + local stream_upstream_name = "stream-really.really.really.really.really.really.really.mocking.upstream.test" local stream_service_name = "stream-really-really-really-really-really-really-really-mocking-service" local route_path = "/really-really-really-really-really-really-really-mocking-route" local key_header_name = "really-really-really-really-really-really-really-mocking-key" From 81845c886ecf5b4f69e7901fdea403b0ab8214d3 Mon Sep 17 00:00:00 2001 From: Hisham Muhammad Date: Tue, 28 Nov 2023 16:57:40 -0300 Subject: [PATCH 184/249] chore(deps): bump ngx_wasm_module to b51a15fc972540e6b8964e2fe1d86ebf67ca53aa * chore(deps): bump ngx_wasm_module to b51a15fc972540e6b8964e2fe1d86ebf67ca53aa Changes since ddb3fa8f7cacc81557144cf22706484eabd79a84: * b51a15f - chore(*) add a .gitattributes file * 9959389 - fix(*) resolve a possible segfault in the FFI * 8c45ad1 - fix(*) proper filter modules order in dynamic OpenResty builds * 33157a8 - feat(proxy-wasm) custom host properties getters/setters * 81c703e - docs(*) minor fix for a title level * db88b15 - fix(proxy-wasm) free dispatch calls during resume edge-case * 5553ae0 - feat(proxy-wasm) strengthen host functions context checks --- .requirements | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.requirements b/.requirements index d3543e59b819..fb8c572ff095 100644 --- a/.requirements +++ b/.requirements @@ -13,7 +13,7 @@ LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 ATC_ROUTER=7a2ad42d4246598ba1f753b6ae79cb1456040afa # 1.3.1 KONG_MANAGER=nightly -NGX_WASM_MODULE=ddb3fa8f7cacc81557144cf22706484eabd79a84 +NGX_WASM_MODULE=b51a15fc972540e6b8964e2fe1d86ebf67ca53aa WASMER=3.1.1 WASMTIME=14.0.3 V8=10.5.18 From a0a0be529c546454f00310b12d854ea230311e93 Mon Sep 17 00:00:00 2001 From: Michael Martin Date: Mon, 16 Oct 2023 18:36:56 -0700 Subject: [PATCH 185/249] feat(wasm): add proxy-wasm dynamic getters/setters Co-Authored-By: Hisham Muhammad --- .../kong/wasm-dynamic-properties.yml | 5 + kong-3.6.0-0.rockspec | 1 + kong/runloop/wasm.lua | 172 ++++++- kong/runloop/wasm/properties.lua | 129 +++++ .../20-wasm/04-proxy-wasm_spec.lua | 462 ++++++++++++++++++ .../proxy_wasm_filters/tests/src/test_http.rs | 16 + 6 files changed, 771 insertions(+), 14 deletions(-) create mode 100644 changelog/unreleased/kong/wasm-dynamic-properties.yml create mode 100644 kong/runloop/wasm/properties.lua diff --git a/changelog/unreleased/kong/wasm-dynamic-properties.yml b/changelog/unreleased/kong/wasm-dynamic-properties.yml new file mode 100644 index 000000000000..4c8fb4d17b4a --- /dev/null +++ b/changelog/unreleased/kong/wasm-dynamic-properties.yml @@ -0,0 +1,5 @@ +message: > + Extend support for getting and setting Gateway values via proxy-wasm + properties in the `kong.*` namespace. +type: feature +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index b722cafb7507..c49b7e137fb4 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -198,6 +198,7 @@ build = { ["kong.runloop.plugin_servers.mp_rpc"] = "kong/runloop/plugin_servers/mp_rpc.lua", ["kong.runloop.plugin_servers.pb_rpc"] = "kong/runloop/plugin_servers/pb_rpc.lua", ["kong.runloop.wasm"] = "kong/runloop/wasm.lua", + ["kong.runloop.wasm.properties"] = "kong/runloop/wasm/properties.lua", ["kong.workspaces"] = "kong/workspaces/init.lua", diff --git a/kong/runloop/wasm.lua b/kong/runloop/wasm.lua index 3ae3f7e8c029..004c08ea5658 100644 --- a/kong/runloop/wasm.lua +++ b/kong/runloop/wasm.lua @@ -40,6 +40,7 @@ local json_schema = require "kong.db.schema.json" local pl_file = require "pl.file" local pl_path = require "pl.path" local constants = require "kong.constants" +local properties = require "kong.runloop.wasm.properties" ---@module 'resty.wasmx.proxy_wasm' @@ -682,6 +683,155 @@ local function disable(reason) end +local function register_property_handlers() + properties.reset() + + properties.add_getter("kong.client.protocol", function(kong) + return true, kong.client.get_protocol(), true + end) + + properties.add_getter("kong.nginx.subsystem", function(kong) + return true, kong.nginx.get_subsystem(), true + end) + + properties.add_getter("kong.node.id", function(kong) + return true, kong.node.get_id(), true + end) + + properties.add_getter("kong.node.memory_stats", function(kong) + local stats = kong.node.get_memory_stats() + if not stats then + return false + end + return true, cjson_encode(stats), false + end) + + properties.add_getter("kong.request.forwarded_host", function(kong) + return true, kong.request.get_forwarded_host(), true + end) + + properties.add_getter("kong.request.forwarded_port", function(kong) + return true, kong.request.get_forwarded_port(), true + end) + + properties.add_getter("kong.request.forwarded_scheme", function(kong) + return true, kong.request.get_forwarded_scheme(), true + end) + + properties.add_getter("kong.request.port", function(kong) + return true, kong.request.get_port(), true + end) + + properties.add_getter("kong.response.source", function(kong) + return true, kong.request.get_source(), false + end) + + properties.add_setter("kong.response.status", function(kong, _, _, status) + return true, kong.response.set_status(tonumber(status)), false + end) + + properties.add_getter("kong.router.route", function(kong) + local route = kong.router.get_route() + if not route then + return true, nil, true + end + return true, cjson_encode(route), true + end) + + properties.add_getter("kong.router.service", function(kong) + local service = kong.router.get_service() + if not service then + return true, nil, true + end + return true, cjson_encode(service), true + end) + + properties.add_setter("kong.service.target", function(kong, _, _, target) + local host, port = target:match("^(.*):([0-9]+)$") + port = tonumber(port) + if not (host and port) then + return false + end + + kong.service.set_target(host, port) + return true, target, false + end) + + properties.add_setter("kong.service.upstream", function(kong, _, _, upstream) + local ok, err = kong.service.set_upstream(upstream) + if not ok then + kong.log.err(err) + return false + end + + return true, upstream, false + end) + + properties.add_setter("kong.service.request.scheme", function(kong, _, _, scheme) + kong.service.request.set_scheme(scheme) + return true, scheme, false + end) + + properties.add_getter("kong.route_id", function(_, _, ctx) + local value = ctx.route and ctx.route.id + local ok = value ~= nil + local const = ok + return ok, value, const + end) + + properties.add_getter("kong.service.response.status", function(kong) + return true, kong.service.response.get_status(), false + end) + + properties.add_getter("kong.service_id", function(_, _, ctx) + local value = ctx.service and ctx.service.id + local ok = value ~= nil + local const = ok + return ok, value, const + end) + + properties.add_getter("kong.version", function(kong) + return true, kong.version, true + end) + + properties.add_namespace_handlers("kong.ctx.shared", + function(kong, _, _, key) + local value = kong.ctx.shared[key] + local ok = value ~= nil + value = ok and tostring(value) or nil + return ok, value, false + end, + + function(kong, _, _, key, value) + kong.ctx.shared[key] = value + return true + end + ) + + properties.add_namespace_handlers("kong.configuration", + function(kong, _, _, key) + local value = kong.configuration[key] + if value ~= nil then + if type(value) == "table" then + value = cjson_decode(value) + else + value = tostring(value) + end + + return true, value, true + end + + return false + end, + + function() + -- kong.configuration is read-only: setter rejects all + return false + end + ) +end + + local function enable(kong_config) set_available_filters(kong_config.wasm_modules_parsed) @@ -690,6 +840,8 @@ local function enable(kong_config) proxy_wasm = proxy_wasm or require "resty.wasmx.proxy_wasm" + register_property_handlers() + ENABLED = true STATUS = STATUS_ENABLED end @@ -746,18 +898,6 @@ function _M.init_worker() end -local function set_proxy_wasm_property(property, value) - if not value then - return - end - - local ok, err = proxy_wasm.set_property(property, value) - if not ok then - log(ERR, "failed to set proxy-wasm '", property, "' property: ", err) - end -end - - --- -- Lookup and execute the filter chain that applies to the current request -- (if any). @@ -788,8 +928,12 @@ function _M.attach(ctx) return kong.response.error(500) end - set_proxy_wasm_property("kong.route_id", ctx.route and ctx.route.id) - set_proxy_wasm_property("kong.service_id", ctx.service and ctx.service.id) + ok, err = proxy_wasm.set_host_properties_handlers(properties.get, + properties.set) + if not ok then + log(CRIT, "failed setting host property handlers: ", err) + return kong.response.error(500) + end ok, err = proxy_wasm.start() if not ok then diff --git a/kong/runloop/wasm/properties.lua b/kong/runloop/wasm/properties.lua new file mode 100644 index 000000000000..14ef3feae80e --- /dev/null +++ b/kong/runloop/wasm/properties.lua @@ -0,0 +1,129 @@ +local _M = {} + +local clear_tab = require "table.clear" + +local kong = kong +local ngx = ngx + + +local simple_getters = {} +local simple_setters = {} +local namespace_handlers = {} + +local get_namespace, rebuild_namespaces +do + local patterns = {} + local handlers = {} + local namespaces_len = 0 + + function rebuild_namespaces() + clear_tab(patterns) + clear_tab(handlers) + + for ns, handler in pairs(namespace_handlers) do + table.insert(patterns, ns .. ".") + table.insert(handlers, handler) + end + + namespaces_len = #patterns + end + + local find = string.find + local sub = string.sub + + ---@param property string + ---@return table? namespace + ---@return string? key + function get_namespace(property) + for i = 1, namespaces_len do + local from, to = find(property, patterns[i], nil, true) + if from == 1 then + local key = sub(property, to + 1) + return handlers[i], key + end + end + end +end + + +function _M.reset() + clear_tab(simple_getters) + clear_tab(simple_setters) + clear_tab(namespace_handlers) + rebuild_namespaces() +end + + +function _M.add_getter(name, handler) + assert(type(name) == "string") + assert(type(handler) == "function") + + simple_getters[name] = handler +end + + +function _M.add_setter(name, handler) + assert(type(name) == "string") + assert(type(handler) == "function") + + simple_setters[name] = handler +end + + +function _M.add_namespace_handlers(name, get, set) + assert(type(name) == "string") + assert(type(get) == "function") + assert(type(set) == "function") + + namespace_handlers[name] = { get = get, set = set } + rebuild_namespaces() +end + + +---@param name string +---@return boolean? ok +---@return string? value_or_error +---@return boolean? is_const +function _M.get(name) + local ok, value, const = false, nil, nil + + local getter = simple_getters[name] + if getter then + ok, value, const = getter(kong, ngx, ngx.ctx) + + else + local ns, key = get_namespace(name) + + if ns then + ok, value, const = ns.get(kong, ngx, ngx.ctx, key) + end + end + + return ok, value, const +end + + +---@param name string +---@param value string|nil +---@return boolean? ok +---@return string? cached_value +---@return boolean? is_const +function _M.set(name, value) + local ok, cached_value, const = false, nil, nil + + local setter = simple_setters[name] + if setter then + ok, cached_value, const = setter(kong, ngx, ngx.ctx, value) + + else + local ns, key = get_namespace(name) + if ns then + ok, cached_value, const = ns.set(kong, ngx, ngx.ctx, key, value) + end + end + + return ok, cached_value, const +end + + +return _M diff --git a/spec/02-integration/20-wasm/04-proxy-wasm_spec.lua b/spec/02-integration/20-wasm/04-proxy-wasm_spec.lua index 86305377b680..96e610f78fe8 100644 --- a/spec/02-integration/20-wasm/04-proxy-wasm_spec.lua +++ b/spec/02-integration/20-wasm/04-proxy-wasm_spec.lua @@ -8,6 +8,9 @@ local HEADER_NAME_INPUT = "X-PW-Input" local HEADER_NAME_DISPATCH_ECHO = "X-PW-Dispatch-Echo" local HEADER_NAME_ADD_REQ_HEADER = "X-PW-Add-Header" local HEADER_NAME_ADD_RESP_HEADER = "X-PW-Add-Resp-Header" +local HEADER_NAME_LUA_PROPERTY = "X-Lua-Property" +local HEADER_NAME_LUA_VALUE = "X-Lua-Value" +local UUID_PATTERN = "%x%x%x%x%x%x%x%x%-%x%x%x%x%-%x%x%x%x%-%x%x%x%x%-%x%x%x%x%x%x%x%x%x%x%x%x" local DNS_HOSTNAME = "wasm.test" local MOCK_UPSTREAM_DNS_ADDR = DNS_HOSTNAME .. ":" .. helpers.mock_upstream_port @@ -36,6 +39,15 @@ describe("proxy-wasm filters (#wasm) (#" .. strategy .. ")", function() port = helpers.mock_upstream_port, }) + local mock_upstream = assert(bp.upstreams:insert { + name = "mock_upstream", + }) + + assert(bp.targets:insert { + upstream = { id = mock_upstream.id }, + target = helpers.mock_upstream_host .. ":" .. helpers.mock_upstream_port, + }) + r_single = assert(bp.routes:insert { paths = { "/single" }, strip_path = true, @@ -63,6 +75,58 @@ describe("proxy-wasm filters (#wasm) (#" .. strategy .. ")", function() }, }) + local r_lua = assert(bp.routes:insert { + paths = { "/lua" }, + strip_path = true, + service = mock_service, + }) + + assert(bp.filter_chains:insert { + route = r_lua, + filters = { + { name = "tests" }, + }, + }) + + assert(bp.plugins:insert { + name = "pre-function", + config = { + access = {([[ + local property = kong.request.get_header(%q) + + if property then + local value = kong.request.get_header(%q) + kong.log.notice("Setting kong.ctx.shared.", property, " to '", value, "'") + kong.ctx.shared[property] = value + end + ]]):format(HEADER_NAME_LUA_PROPERTY, HEADER_NAME_LUA_VALUE) + }, + }, + }) + + assert(bp.plugins:insert { + name = "post-function", + config = { + header_filter = {([[ + local property = kong.request.get_header(%q) + if property then + local value = kong.ctx.shared[property] + local header = %q + + if value then + kong.log.notice("Setting ", header, " response header to '", value, "'") + kong.response.set_header(header, value) + else + kong.log.notice("Clearing ", header, " response header") + kong.response.clear_header(header) + end + end + ]]):format(HEADER_NAME_LUA_PROPERTY, HEADER_NAME_LUA_VALUE) + }, + }, + }) + + -- XXX our dns mock fixture doesn't work when called from wasm land hosts_file = os.tmpname() assert(helpers.file.write(hosts_file, @@ -73,6 +137,7 @@ describe("proxy-wasm filters (#wasm) (#" .. strategy .. ")", function() nginx_conf = "spec/fixtures/custom_nginx.template", wasm = true, dns_hostsfile = hosts_file, + plugins = "pre-function,post-function", })) end) @@ -256,6 +321,337 @@ describe("proxy-wasm filters (#wasm) (#" .. strategy .. ")", function() assert.logfile().has.no.line("[crit]", true, 0) end) + it("read kong.client.protocol", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "client.protocol", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.equal("http", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.nginx.subsystem", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "nginx.subsystem", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.equal("http", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.node.id", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "node.id", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.matches(UUID_PATTERN, body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.node.memory_stats", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "node.memory_stats", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.matches("{.*lua_shared_dicts.*}", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.request.forwarded_host", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "request.forwarded_host", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.matches("^[a-z.0-9%-]+$", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.request.forwarded_port", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "request.forwarded_port", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.matches("^[0-9]+$", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.request.forwarded_scheme", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "request.forwarded_scheme", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.equal("http", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + pending("read kong.response.source", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_PHASE] = "log", + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "response.source", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.equal("service", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.router.route", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "router.route", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + local json = cjson.decode(body) + assert.equal(json.id, r_single.id) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.router.service", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "router.service", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + local json = cjson.decode(body) + assert.equal(json.id, mock_service.id) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("write kong.service.target", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local target = helpers.mock_upstream_host .. ":" .. + helpers.mock_upstream_port + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "set_kong_property", + [HEADER_NAME_INPUT] = "service.target=" .. target, + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + assert.res_status(200, res) + -- TODO read back property + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + -- observing weird behavior in this one: + -- target is being set to mock_upstream:15555 instead of + -- 127.0.0.1:1555 as expected... + pending("write kong.service.upstream", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_PHASE] = "request_headers", + [HEADER_NAME_TEST] = "set_kong_property", + [HEADER_NAME_INPUT] = "service.upstream=mock_upstream", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + assert.res_status(200, res) + -- TODO read back property + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("write kong.service.request.scheme", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "set_kong_property", + [HEADER_NAME_INPUT] = "service.request.scheme=http", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + assert.res_status(200, res) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + pending("read kong.service.response.status", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_PHASE] = "log", + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "service.response.status", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.equal("200", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("write kong.response.status", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_PHASE] = "response_headers", + [HEADER_NAME_TEST] = "set_kong_property", + [HEADER_NAME_INPUT] = "response.status=203", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + assert.res_status(203, res) + -- TODO read back property + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("read kong.configuration", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/single/status/200", + headers = { + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "configuration.role", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.equal("traditional", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + it("read kong.route_id", function() local client = helpers.proxy_client() finally(function() client:close() end) @@ -296,6 +692,72 @@ describe("proxy-wasm filters (#wasm) (#" .. strategy .. ")", function() assert.logfile().has.no.line("[crit]", true, 0) end) + it("read kong.ctx.shared[]", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/lua/status/200", + headers = { + [HEADER_NAME_LUA_PROPERTY] = "foo", + [HEADER_NAME_LUA_VALUE] = "bar", + [HEADER_NAME_TEST] = "get_kong_property", + [HEADER_NAME_INPUT] = "ctx.shared.foo", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + local body = assert.res_status(200, res) + assert.equal("bar", body) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("write kong.ctx.shared[]", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/lua/status/200", + headers = { + [HEADER_NAME_LUA_PROPERTY] = "foo", + [HEADER_NAME_TEST] = "set_kong_property", + [HEADER_NAME_INPUT] = "ctx.shared.foo=bar", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + assert.res_status(200, res) + local value = assert.response(res).has.header(HEADER_NAME_LUA_VALUE) + assert.same("bar", value) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + + it("clear kong.ctx.shared[]", function() + local client = helpers.proxy_client() + finally(function() client:close() end) + + local res = assert(client:send { + method = "GET", + path = "/lua/status/200", + headers = { + [HEADER_NAME_LUA_PROPERTY] = "foo", + [HEADER_NAME_LUA_VALUE] = "bar", + [HEADER_NAME_TEST] = "set_kong_property", + [HEADER_NAME_INPUT] = "ctx.shared.foo", + [HEADER_NAME_DISPATCH_ECHO] = "on", + } + }) + + assert.res_status(200, res) + assert.response(res).has.no.header(HEADER_NAME_LUA_VALUE) + assert.logfile().has.no.line("[error]", true, 0) + assert.logfile().has.no.line("[crit]", true, 0) + end) + it("send an http dispatch, return its response body", function() local client = helpers.proxy_client() finally(function() client:close() end) diff --git a/spec/fixtures/proxy_wasm_filters/tests/src/test_http.rs b/spec/fixtures/proxy_wasm_filters/tests/src/test_http.rs index 651ee154478b..83da6555d6a8 100644 --- a/spec/fixtures/proxy_wasm_filters/tests/src/test_http.rs +++ b/spec/fixtures/proxy_wasm_filters/tests/src/test_http.rs @@ -20,6 +20,11 @@ impl TestHttp { } } + fn set_prop(&self, ns: &str, prop: &str, value: Option<&str>) { + let value: Option<&[u8]> = value.map(|v| v.as_bytes()); + self.set_property(vec![ns, prop], value); + } + fn send_http_dispatch(&mut self, config: TestConfig) -> Action { let mut timeout = Duration::from_secs(0); let mut headers = Vec::new(); @@ -112,6 +117,17 @@ impl TestHttp { info!("[proxy-wasm] kong.{}: \"{:?}\"", name, value); self.send_plain_response(StatusCode::OK, Some(&value)) } + "set_kong_property" => { + if let Some(input) = opt_input { + let (key, value) = match input.split_once('=') { + Some((key, value)) => (key, Some(value)), + None => (input.as_ref(), None), + }; + + self.set_prop("kong", key, value); + info!("[proxy-wasm] kong.{} = \"{:?}\"", key, value); + } + } "echo_http_dispatch" => { let config = TestConfig::from_str(&opt_input.unwrap_or("".to_string())) .expect("invalid configuration"); From a796ac6105b60fbd0e85c24281b9f91cf19bdeaf Mon Sep 17 00:00:00 2001 From: Zhongwei Yao Date: Thu, 16 Nov 2023 14:19:01 -0300 Subject: [PATCH 186/249] fix(wasm): disable JIT for proxy_wasm launch This prevents triggering a LuaJIT issue when attempting to call an FFI callback with an ongoing trace further down the stack; attempting to do so can trigger a "bad callback" assertion. Stack trace demonstrating the issue in question: ``` from /home/zhongweiyao/projects/kong/bazel-bin/build/kong-dev/openresty/nginx/sbin/../modules/ngx_wasm_module.so at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/wasm/wrt/ngx_wrt_wasmtime.c:657 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/wasm/vm/ngx_wavm.c:1107 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/wasm/vm/ngx_wavm.c:1184 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/wasm/vm/ngx_wavm.c:1287 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/http/proxy_wasm/ngx_http_proxy_wasm.c:40 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/http/proxy_wasm/ngx_http_proxy_wasm.c:411 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/common/proxy_wasm/ngx_proxy_wasm.c:658 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/common/proxy_wasm/ngx_proxy_wasm.c:783 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/wasm/ngx_wasm_ops.c:417 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/wasm/ngx_wasm_ops.c:290 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/common/lua/ngx_wasm_lua_ffi.c:164 at ../ngx_lua-0.10.25/src/ngx_http_lua_util.c:1184 respawn=-3) at src/os/unix/ngx_process.c:199 ``` The problem arises when Wasm code eventually calls the FFI callback which triggers Lua code while having an ongoing trace in the stack (see frame 12, `TRACE_1054`, in the example above). Eventually the LuaJIT callback crashes like this: ``` at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/common/proxy_wasm/ngx_proxy_wasm_properties.c:1058 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/common/proxy_wasm/ngx_proxy_wasm_host.c:780 at /home/zhongweiyao/.cache/bazel/_bazel_zhongweiyao/7df4419a5ca351a16fa75df771d28bc8/execroot/kong/external/ngx_wasm_module/src/wasm/vm/ngx_wavm_host.c:265 from /home/zhongweiyao/projects/kong/bazel-bin/build/kong-dev/openresty/nginx/sbin/../modules/ngx_wasm_module.so from /home/zhongweiyao/projects/kong/bazel-bin/build/kong-dev/openresty/nginx/sbin/../modules/ngx_wasm_module.so from /home/zhongweiyao/projects/kong/bazel-bin/build/kong-dev/openresty/nginx/sbin/../modules/ngx_wasm_module.so from /home/zhongweiyao/projects/kong/bazel-bin/build/kong-dev/openresty/nginx/sbin/../modules/ngx_wasm_module.so ... ``` Here's some sample minimal code to reproduce the LuaJIT issue outside of the Gateway: ```lua -- Lua code local ffi = require("ffi") local C = ffi.C ffi.cdef [[ typedef int (*my_fn_t)(int, int, int); int f2(); void setup(my_fn_t f, int, int, int); ]] local lib = ffi.load("test") function setup(cb, a, b, c) lib.setup(cb, a, b, c) end function f0() return lib.f2() + 1 end do local cb = ffi.cast("my_fn_t", function(a, b, c) return a+b+c end) setup(cb, 10, 99, 13) print(f0()) for i=1,300 do if i > 60 then f0() end end end ``` ```c /* C code */ typedef int (*my_fn_t)(int, int, int); my_fn_t gf = 0; int ga; int gb; int gc; void setup(my_fn_t f, int a, int b, int c) { gf = f; ga = a; gb = b; gc = c; } int f2() { return gf(ga, gb, gc) + 1; } ``` The issue in question has been a known for a long time. See: https://luajit.freelists.narkive.com/sdhSLJSr/how-to-make-bad-callback-more-deterministic ``` The bad callback error happens because some JIT-compiled Lua code calls a C function which in turn calls an FFI callback. ``` https://lua-l.lua.narkive.com/qXJrNlpP/luajit-ffi-windows-bad-callback-error-in-msgwaitformultipleobjects-proof-of-concept From Mike Pall: ``` The problem is that a FFI callback cannot safely be called from a C function which is itself called via the FFI from JIT-compiled code. In your case this is the call to MsgWaitForMultipleObjects. I've put in a lot of heuristics to detect this, and it usually succeeds in disabling compilation for such a function. However in your case the loop is compiled before the callback is ever called, so the detection fails. The straighforward solution is to put the message loop into an extra Lua function and use jit.off(func) ``` Signed-off-by: Hisham Muhammad --- kong/runloop/wasm.lua | 1 + 1 file changed, 1 insertion(+) diff --git a/kong/runloop/wasm.lua b/kong/runloop/wasm.lua index 004c08ea5658..8558c38bf919 100644 --- a/kong/runloop/wasm.lua +++ b/kong/runloop/wasm.lua @@ -935,6 +935,7 @@ function _M.attach(ctx) return kong.response.error(500) end + jit.off(proxy_wasm.start) ok, err = proxy_wasm.start() if not ok then log(CRIT, "failed to execute ", chain.label, " filter chain for request: ", err) From beb11709bd5a08948f6f24811f0a920722a15b63 Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Wed, 6 Dec 2023 06:53:17 +0100 Subject: [PATCH 187/249] chore(actions): bump `cross-repo-cherrypick-action` action to `v1.1.0` (#12157) This should now correctly identify if the PR was merged using either "Squash and merge" or "Rebase and merge" and act accordingly. KAG-3198 Signed-off-by: Joshua Schmid --- .github/workflows/cherry-picks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cherry-picks.yml b/.github/workflows/cherry-picks.yml index 82c1a0df4130..c5539dd8f0f2 100644 --- a/.github/workflows/cherry-picks.yml +++ b/.github/workflows/cherry-picks.yml @@ -26,7 +26,7 @@ jobs: with: token: ${{ secrets.CHERRY_PICK_TOKEN }} - name: Create backport pull requests - uses: jschmid1/cross-repo-cherrypick-action@2366f50fd85e8966aa024a4dd6fbf70e7019d7e1 + uses: jschmid1/cross-repo-cherrypick-action@cde6a39fa9e6eee09e633dc83bbf5e83bb476ec3 #v1.1.0 with: token: ${{ secrets.CHERRY_PICK_TOKEN }} pull_title: '[cherry-pick -> ${target_branch}] ${pull_title}' From 89e62669ea26fea36f66bcab092fd507a9abd326 Mon Sep 17 00:00:00 2001 From: Joshua Schmid Date: Wed, 6 Dec 2023 06:54:32 +0100 Subject: [PATCH 188/249] chore(actions): re-introduce improved backport action (#12154) This now correctly detects the available merge strategies and adapts it's behavior accordingly. Rebase -> Use commits from the PR Squash -> Use the newly created, squashed commit (Merge commit -> We don't use that in our repository.) KAG-3198 Signed-off-by: Joshua Schmid --- .github/workflows/backport.yml | 36 +++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 7cc4b9c134a3..3e2dd71dc7df 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,24 +1,32 @@ name: Backport on: pull_request_target: - types: - - closed - - labeled - + types: [closed, labeled] # runs when the pull request is closed/merged or labeled (to trigger a backport in hindsight) +permissions: + contents: write # so it can comment + pull-requests: write # so it can create pull requests + actions: write jobs: backport: name: Backport runs-on: ubuntu-latest - if: > - github.event.pull_request.merged - && ( - github.event.action == 'closed' - || ( - github.event.action == 'labeled' - && contains(github.event.label.name, 'backport') - ) - ) + if: github.event.pull_request.merged steps: - - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 + - uses: actions/checkout@v4 + - name: Create backport pull requests + uses: korthout/backport-action@e355f68e2fc1cb0063b1c1b717882290ffc994bf #v2.2.0 with: github_token: ${{ secrets.PAT }} + pull_title: '[backport -> ${target_branch}] ${pull_title}' + merge_commits: 'skip' + copy_labels_pattern: ^(?!backport ).* # copies all labels except those starting with "backport " + label_pattern: ^backport (release\/[^ ]+)$ # filters for labels starting with "backport " and extracts the branch name + pull_description: |- + Automated backport to `${target_branch}`, triggered by a label in #${pull_number}. + copy_assignees: true + copy_milestone: true + copy_requested_reviewers: true + experimental: > + { + "detect_merge_method": true + } From aba1910882daefa125c503f4de1d82efab5e4d12 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 00:16:08 +0000 Subject: [PATCH 189/249] chore(deps): bump ngx_wasm_module to prerelease-0.2.0 Changes since b51a15fc972540e6b8964e2fe1d86ebf67ca53aa: * 388d572 - docs(changelog) prerelease-0.2.0 * 7d3451b - chore(codecov) specify flags in 'flag_management' section * d59027f - chore(valgrind.supp) consolidate wasmparser::parse suppressions * 7184a57 - chore(deps) bump OpenSSL to 3.2.0 * 338bcbe - chore(deps) bump zlib to 1.3 * 743c3d3 - chore(deps) cargo update * 8964b1f - chore(util) minor cleanup/improvements * f955308 - chore(sdk) separate build and install of .wasm examples * 8f3fa95 - fix(wasi) do not use instance pool in 'fd_write' * 4f47e96 - docs(proxy-wasm) document response body buffering * f813a30 - feat(proxy-wasm) implement response body buffering * f171e0f - chore(util) always invoke the Proxy-Wasm SDK scripts * 3d61ca1 - chore(ci) add code coverage for Valgrind jobs * a278bb7 - tests(*) switch Valgrind tests from 'opt-out' to 'opt-in' * 9584c03 - fix(proxy-wasm) use filter chain pool in 'ngx_proxy_wasm_maps_set' * 175f0b8 - chore(util) minor usage fix and style cohesion for scripts * aefb121 - chore(ci) install Node.js in unit and valgrind jobs * e757482 - chore(*) clone and test proxy-wasm-assemblyscript-sdk examples * f2faf97 - chore(util) build Proxy-Wasm SDKs on 'make setup' * bd1b5b8 - chore(ci) remove 'nginx.sock' before artifact upload on failure * 65a0b46 - chore(util) use 'git fetch --tags' for updating runtimes --- .requirements | 2 +- changelog/unreleased/kong/bump-ngx-wasm-module.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.requirements b/.requirements index fb8c572ff095..cac1c5e026c8 100644 --- a/.requirements +++ b/.requirements @@ -13,7 +13,7 @@ LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 ATC_ROUTER=7a2ad42d4246598ba1f753b6ae79cb1456040afa # 1.3.1 KONG_MANAGER=nightly -NGX_WASM_MODULE=b51a15fc972540e6b8964e2fe1d86ebf67ca53aa +NGX_WASM_MODULE=388d5720293f5091ccee1f859a42683fbfd14e7d # prerelease-0.2.0 WASMER=3.1.1 WASMTIME=14.0.3 V8=10.5.18 diff --git a/changelog/unreleased/kong/bump-ngx-wasm-module.yml b/changelog/unreleased/kong/bump-ngx-wasm-module.yml index 1550fb88dd2f..64ce68434fcf 100644 --- a/changelog/unreleased/kong/bump-ngx-wasm-module.yml +++ b/changelog/unreleased/kong/bump-ngx-wasm-module.yml @@ -1,2 +1,2 @@ -message: "Bump `ngx_wasm_module` to `ddb3fa8f7cacc81557144cf22706484eabd79a84`" +message: "Bump `ngx_wasm_module` to `388d5720293f5091ccee1f859a42683fbfd14e7d`" type: dependency From 81ad18d1b97d6ac85d01d5c3ce1ccab0db4b702c Mon Sep 17 00:00:00 2001 From: Xiaoch Date: Fri, 8 Dec 2023 13:39:18 +0800 Subject: [PATCH 190/249] fix(globalpatches): support exptime in SharedDict:set() api (#12173) This commit introduces support for the exptime parameter in SharedDict:set() to align its functionality with that of the original ngx.shared.DICT.set(). And it refines the logic of SharedDict:add() by using SharedDict:set(). KAG-3303 --- kong/globalpatches.lua | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/kong/globalpatches.lua b/kong/globalpatches.lua index 812d3d74e4b8..56de8dcfb68b 100644 --- a/kong/globalpatches.lua +++ b/kong/globalpatches.lua @@ -265,16 +265,7 @@ return function(options) return self.data[key] and self.data[key].value, nil end SharedDict.get_stale = SharedDict.get - function SharedDict:set(key, value) - set(self.data, key, value) - return true, nil, false - end - SharedDict.safe_set = SharedDict.set - function SharedDict:add(key, value, exptime) - if self.data[key] ~= nil then - return false, "exists", false - end - + function SharedDict:set(key, value, exptime) local expire_at = nil if exptime then @@ -287,6 +278,14 @@ return function(options) set(self.data, key, value, expire_at) return true, nil, false end + SharedDict.safe_set = SharedDict.set + function SharedDict:add(key, value, exptime) + if self.data[key] ~= nil then + return false, "exists", false + end + + return self:set(key, value, exptime) + end SharedDict.safe_add = SharedDict.add function SharedDict:replace(key, value) if self.data[key] == nil then From ac5b634a2c2df607c26ddd497b5fb8f67b851e95 Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 11 Dec 2023 17:56:35 +0800 Subject: [PATCH 191/249] refactor(tools): remove reference of sha256 from utils (#12095) KAG-3226 --- kong/db/schema/json.lua | 7 ++++--- kong/plugins/hmac-auth/access.lua | 13 ++++++------- kong/plugins/ldap-auth/access.lua | 2 +- kong/plugins/proxy-cache/cache_key.lua | 2 +- kong/runloop/wasm.lua | 2 +- kong/tools/utils.lua | 1 - spec/03-plugins/20-ldap-auth/01-access_spec.lua | 2 +- .../20-ldap-auth/02-invalidations_spec.lua | 2 +- 8 files changed, 15 insertions(+), 16 deletions(-) diff --git a/kong/db/schema/json.lua b/kong/db/schema/json.lua index 70844f2fd692..b140c0b6279d 100644 --- a/kong/db/schema/json.lua +++ b/kong/db/schema/json.lua @@ -7,12 +7,13 @@ local _M = {} local lrucache = require "resty.lrucache" local jsonschema = require "resty.ljsonschema" local metaschema = require "resty.ljsonschema.metaschema" -local utils = require "kong.tools.utils" local cjson = require "cjson" +local sha256_hex = require("kong.tools.sha256").sha256_hex +local cycle_aware_deep_copy = require("kong.tools.table").cycle_aware_deep_copy + local type = type local cjson_encode = cjson.encode -local sha256_hex = utils.sha256_hex ---@class kong.db.schema.json.schema_doc : table @@ -156,7 +157,7 @@ end ---@param name string ---@param schema kong.db.schema.json.schema_doc function _M.add_schema(name, schema) - schemas[name] = utils.cycle_aware_deep_copy(schema, true) + schemas[name] = cycle_aware_deep_copy(schema, true) end diff --git a/kong/plugins/hmac-auth/access.lua b/kong/plugins/hmac-auth/access.lua index 6a2b37437689..44ac3a4875c7 100644 --- a/kong/plugins/hmac-auth/access.lua +++ b/kong/plugins/hmac-auth/access.lua @@ -1,7 +1,9 @@ local constants = require "kong.constants" -local sha256 = require "resty.sha256" local openssl_hmac = require "resty.openssl.hmac" -local utils = require "kong.tools.utils" + + +local sha256_base64 = require("kong.tools.sha256").sha256_base64 +local string_split = require("kong.tools.string").split local ngx = ngx @@ -10,7 +12,6 @@ local error = error local time = ngx.time local abs = math.abs local decode_base64 = ngx.decode_base64 -local encode_base64 = ngx.encode_base64 local parse_time = ngx.parse_http_time local re_gmatch = ngx.re.gmatch local hmac_sha1 = ngx.hmac_sha1 @@ -115,7 +116,7 @@ local function retrieve_hmac_fields(authorization_header) if m and #m >= 4 then hmac_params.username = m[1] hmac_params.algorithm = m[2] - hmac_params.hmac_headers = utils.split(m[3], " ") + hmac_params.hmac_headers = string_split(m[3], " ") hmac_params.signature = m[4] end end @@ -231,9 +232,7 @@ local function validate_body() return body == "" end - local digest = sha256:new() - digest:update(body or '') - local digest_created = "SHA-256=" .. encode_base64(digest:final()) + local digest_created = "SHA-256=" .. sha256_base64(body or '') return digest_created == digest_received end diff --git a/kong/plugins/ldap-auth/access.lua b/kong/plugins/ldap-auth/access.lua index c04b6c50276d..8ece16c98923 100644 --- a/kong/plugins/ldap-auth/access.lua +++ b/kong/plugins/ldap-auth/access.lua @@ -13,7 +13,7 @@ local upper = string.upper local sub = string.sub local fmt = string.format local tcp = ngx.socket.tcp -local sha256_hex = require "kong.tools.utils".sha256_hex +local sha256_hex = require("kong.tools.sha256").sha256_hex local AUTHORIZATION = "authorization" diff --git a/kong/plugins/proxy-cache/cache_key.lua b/kong/plugins/proxy-cache/cache_key.lua index f9f11945d275..81aa8df762bd 100644 --- a/kong/plugins/proxy-cache/cache_key.lua +++ b/kong/plugins/proxy-cache/cache_key.lua @@ -6,7 +6,7 @@ local sort = table.sort local insert = table.insert local concat = table.concat -local sha256_hex = require "kong.tools.utils".sha256_hex +local sha256_hex = require("kong.tools.sha256").sha256_hex local _M = {} diff --git a/kong/runloop/wasm.lua b/kong/runloop/wasm.lua index 8558c38bf919..70f36b798adc 100644 --- a/kong/runloop/wasm.lua +++ b/kong/runloop/wasm.lua @@ -107,7 +107,7 @@ local hash_chain do local buffer = require "string.buffer" - local sha256 = utils.sha256_bin + local sha256 = require("kong.tools.sha256").sha256_bin local HASH_DISABLED = sha256("disabled") local HASH_NONE = sha256("none") diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 0b38d0dab5b7..ab3ed8343cac 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -19,7 +19,6 @@ local _M = {} do local modules = { "kong.tools.table", - "kong.tools.sha256", "kong.tools.yield", "kong.tools.string", "kong.tools.uuid", diff --git a/spec/03-plugins/20-ldap-auth/01-access_spec.lua b/spec/03-plugins/20-ldap-auth/01-access_spec.lua index bf1cb9f78a04..c4f4f259f237 100644 --- a/spec/03-plugins/20-ldap-auth/01-access_spec.lua +++ b/spec/03-plugins/20-ldap-auth/01-access_spec.lua @@ -5,7 +5,7 @@ local cjson = require "cjson" local lower = string.lower local fmt = string.format -local sha256_hex = require "kong.tools.utils".sha256_hex +local sha256_hex = require("kong.tools.sha256").sha256_hex local function cache_key(conf, username, password) diff --git a/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua b/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua index 49f9dbed0485..054db47fed00 100644 --- a/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua +++ b/spec/03-plugins/20-ldap-auth/02-invalidations_spec.lua @@ -1,7 +1,7 @@ local helpers = require "spec.helpers" local fmt = string.format local lower = string.lower -local sha256_hex = require "kong.tools.utils".sha256_hex +local sha256_hex = require("kong.tools.sha256").sha256_hex local ldap_host_aws = "ec2-54-172-82-117.compute-1.amazonaws.com" From 9a1b557b4201464342f2666ba77f322f60e5fefc Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 11 Dec 2023 17:57:06 +0800 Subject: [PATCH 192/249] refactor(tools): remove reference of module from utils (#12113) KAG-3226 --- kong/api/init.lua | 8 +++++--- kong/cache/warmup.lua | 5 ++++- kong/db/dao/plugins.lua | 7 ++++--- kong/db/dao/vaults.lua | 4 ++-- kong/db/init.lua | 19 ++++++++++--------- kong/db/migrations/state.lua | 10 ++++++---- kong/db/schema/others/declarative_config.lua | 3 ++- kong/db/schema/plugin_loader.lua | 9 +++++---- kong/db/schema/vault_loader.lua | 4 ++-- kong/db/strategies/init.lua | 4 ++-- kong/runloop/certificate.lua | 5 +++-- kong/status/init.lua | 6 +++--- kong/tools/stream_api.lua | 4 ++-- kong/tools/utils.lua | 1 - spec/01-unit/05-utils_spec.lua | 8 +++++--- 15 files changed, 55 insertions(+), 42 deletions(-) diff --git a/kong/api/init.lua b/kong/api/init.lua index 6ca0d29ac900..4b68d3558039 100644 --- a/kong/api/init.lua +++ b/kong/api/init.lua @@ -1,10 +1,12 @@ local lapis = require "lapis" -local utils = require "kong.tools.utils" local api_helpers = require "kong.api.api_helpers" local Endpoints = require "kong.api.endpoints" local hooks = require "kong.hooks" +local load_module_if_exists = require "kong.tools.module".load_module_if_exists + + local ngx = ngx local type = type local pairs = pairs @@ -95,7 +97,7 @@ do -- Custom Routes for _, dao in pairs(kong.db.daos) do local schema = dao.schema - local ok, custom_endpoints = utils.load_module_if_exists("kong.api.routes." .. schema.name) + local ok, custom_endpoints = load_module_if_exists("kong.api.routes." .. schema.name) if ok then customize_routes(routes, custom_endpoints, schema) end @@ -104,7 +106,7 @@ do -- Plugin Routes if kong.configuration and kong.configuration.loaded_plugins then for k in pairs(kong.configuration.loaded_plugins) do - local loaded, custom_endpoints = utils.load_module_if_exists("kong.plugins." .. k .. ".api") + local loaded, custom_endpoints = load_module_if_exists("kong.plugins." .. k .. ".api") if loaded then ngx.log(ngx.DEBUG, "Loading API endpoints for plugin: ", k) if api_helpers.is_new_db_routes(custom_endpoints) then diff --git a/kong/cache/warmup.lua b/kong/cache/warmup.lua index 4dee26539357..3d7829f94f7f 100644 --- a/kong/cache/warmup.lua +++ b/kong/cache/warmup.lua @@ -2,7 +2,10 @@ local utils = require "kong.tools.utils" local constants = require "kong.constants" local buffer = require "string.buffer" local acl_groups -if utils.load_module_if_exists("kong.plugins.acl.groups") then + + +local load_module_if_exists = require "kong.tools.module".load_module_if_exists +if load_module_if_exists("kong.plugins.acl.groups") then acl_groups = require "kong.plugins.acl.groups" end diff --git a/kong/db/dao/plugins.lua b/kong/db/dao/plugins.lua index 58521cc07f84..86a56fc416e7 100644 --- a/kong/db/dao/plugins.lua +++ b/kong/db/dao/plugins.lua @@ -1,10 +1,11 @@ local constants = require "kong.constants" -local utils = require "kong.tools.utils" local DAO = require "kong.db.dao" local plugin_loader = require "kong.db.schema.plugin_loader" local reports = require "kong.reports" local plugin_servers = require "kong.runloop.plugin_servers" local version = require "version" +local load_module_if_exists = require "kong.tools.module".load_module_if_exists + local Plugins = {} @@ -150,7 +151,7 @@ local load_plugin_handler do -- NOTE: no version _G.kong (nor PDK) in plugins main chunk local plugin_handler = "kong.plugins." .. plugin .. ".handler" - local ok, handler = utils.load_module_if_exists(plugin_handler) + local ok, handler = load_module_if_exists(plugin_handler) if not ok then ok, handler = plugin_servers.load_plugin(plugin) if type(handler) == "table" then @@ -202,7 +203,7 @@ local function load_plugin_entity_strategy(schema, db, plugin) local custom_strat = fmt("kong.plugins.%s.strategies.%s.%s", plugin, db.strategy, schema.name) - local exists, mod = utils.load_module_if_exists(custom_strat) + local exists, mod = load_module_if_exists(custom_strat) if exists and mod then local parent_mt = getmetatable(strategy) local mt = { diff --git a/kong/db/dao/vaults.lua b/kong/db/dao/vaults.lua index a07384c93e6f..1c7238b15b93 100644 --- a/kong/db/dao/vaults.lua +++ b/kong/db/dao/vaults.lua @@ -1,6 +1,6 @@ local constants = require "kong.constants" -local utils = require "kong.tools.utils" local vault_loader = require "kong.db.schema.vault_loader" +local load_module_if_exists = require "kong.tools.module".load_module_if_exists local Vaults = {} @@ -19,7 +19,7 @@ local DEBUG = ngx.DEBUG local function load_vault_strategy(vault) - local ok, strategy = utils.load_module_if_exists("kong.vaults." .. vault) + local ok, strategy = load_module_if_exists("kong.vaults." .. vault) if not ok then return nil, vault .. " vault is enabled but not installed;\n" .. strategy end diff --git a/kong/db/init.lua b/kong/db/init.lua index f963a2624a79..edf44f2ac46d 100644 --- a/kong/db/init.lua +++ b/kong/db/init.lua @@ -8,11 +8,13 @@ local MetaSchema = require "kong.db.schema.metaschema" local constants = require "kong.constants" local log = require "kong.cmd.utils.log" local workspaces = require "kong.workspaces" -local utils = require "kong.tools.utils" local knode = kong and kong.node or require "kong.pdk.node".new() +local load_module_if_exists = require "kong.tools.module".load_module_if_exists + + local fmt = string.format local type = type local pairs = pairs @@ -71,7 +73,7 @@ function DB.new(kong_config, strategy) -- load core entities subschemas local subschemas - ok, subschemas = utils.load_module_if_exists("kong.db.schema.entities." .. entity_name .. "_subschemas") + ok, subschemas = load_module_if_exists("kong.db.schema.entities." .. entity_name .. "_subschemas") if ok then for name, subschema in pairs(subschemas) do local ok, err = entity:new_subschema(name, subschema) @@ -418,7 +420,6 @@ end do -- migrations - local utils = require "kong.tools.utils" local MigrationsState = require "kong.db.migrations.state" @@ -490,8 +491,8 @@ do if run_teardown and options.skip_teardown_migrations then for _, t in ipairs(options.skip_teardown_migrations) do for _, mig in ipairs(t.migrations) do - local ok, mod = utils.load_module_if_exists(t.namespace .. "." .. - mig.name) + local ok, mod = load_module_if_exists(t.namespace .. "." .. + mig.name) if ok then local strategy_migration = mod[self.strategy] if strategy_migration and strategy_migration.teardown then @@ -523,8 +524,8 @@ do self.infos.db_name) for _, mig in ipairs(t.migrations) do - local ok, mod = utils.load_module_if_exists(t.namespace .. "." .. - mig.name) + local ok, mod = load_module_if_exists(t.namespace .. "." .. + mig.name) if not ok then self.connector:close() return nil, fmt_err(self, "failed to load migration '%s': %s", @@ -638,8 +639,8 @@ do for _, t in ipairs(migrations) do for _, mig in ipairs(t.migrations) do - local ok, mod = utils.load_module_if_exists(t.namespace .. "." .. - mig.name) + local ok, mod = load_module_if_exists(t.namespace .. "." .. + mig.name) if not ok then return nil, fmt("failed to load migration '%s': %s", mig.name, mod) diff --git a/kong/db/migrations/state.lua b/kong/db/migrations/state.lua index 0d96e9ced12e..a703a1fc1b38 100644 --- a/kong/db/migrations/state.lua +++ b/kong/db/migrations/state.lua @@ -1,10 +1,12 @@ -local utils = require "kong.tools.utils" local log = require "kong.cmd.utils.log" local Schema = require "kong.db.schema" local Migration = require "kong.db.schema.others.migrations" local Errors = require "kong.db.errors" +local load_module_if_exists = require "kong.tools.module".load_module_if_exists + + local MigrationSchema = Schema.new(Migration) @@ -67,12 +69,12 @@ local function load_subsystems(db, plugin_names) for _, plugin_name in ipairs(sorted_plugin_names) do local namespace = ss.namespace:gsub("%*", plugin_name) - local ok, mig_idx = utils.load_module_if_exists(namespace) + local ok, mig_idx = load_module_if_exists(namespace) if not ok then -- fallback to using ".init" since "/?/init.lua" isn't always in a -- Lua-path by default, see https://github.com/Kong/kong/issues/6867 - ok, mig_idx = utils.load_module_if_exists(namespace .. ".init") + ok, mig_idx = load_module_if_exists(namespace .. ".init") end if ok then @@ -104,7 +106,7 @@ local function load_subsystems(db, plugin_names) for _, mig_name in ipairs(subsys.migrations_index) do local mig_module = fmt("%s.%s", subsys.namespace, mig_name) - local ok, migration = utils.load_module_if_exists(mig_module) + local ok, migration = load_module_if_exists(mig_module) if not ok then return nil, fmt_err(db, "failed to load migration '%s' of '%s' subsystem", mig_module, subsys.name) diff --git a/kong/db/schema/others/declarative_config.lua b/kong/db/schema/others/declarative_config.lua index 145bb7f97783..00fa540c5cd6 100644 --- a/kong/db/schema/others/declarative_config.lua +++ b/kong/db/schema/others/declarative_config.lua @@ -19,6 +19,7 @@ local insert = table.insert local concat = table.concat local tostring = tostring local cjson_encode = require("cjson.safe").encode +local load_module_if_exists = require("kong.tools.module").load_module_if_exists local DeclarativeConfig = {} @@ -847,7 +848,7 @@ end local function load_entity_subschemas(entity_name, entity) - local ok, subschemas = utils.load_module_if_exists("kong.db.schema.entities." .. entity_name .. "_subschemas") + local ok, subschemas = load_module_if_exists("kong.db.schema.entities." .. entity_name .. "_subschemas") if ok then for name, subschema in pairs(subschemas) do local ok, err = entity:new_subschema(name, subschema) diff --git a/kong/db/schema/plugin_loader.lua b/kong/db/schema/plugin_loader.lua index 5ec62ec0ed8e..7ae7d856e4aa 100644 --- a/kong/db/schema/plugin_loader.lua +++ b/kong/db/schema/plugin_loader.lua @@ -1,7 +1,8 @@ local MetaSchema = require "kong.db.schema.metaschema" local Entity = require "kong.db.schema.entity" -local utils = require "kong.tools.utils" local plugin_servers = require "kong.runloop.plugin_servers" +local is_array = require "kong.tools.table".is_array +local load_module_if_exists = require "kong.tools.module".load_module_if_exists local fmt = string.format @@ -13,7 +14,7 @@ local plugin_loader = {} function plugin_loader.load_subschema(parent_schema, plugin, errors) local plugin_schema = "kong.plugins." .. plugin .. ".schema" - local ok, schema = utils.load_module_if_exists(plugin_schema) + local ok, schema = load_module_if_exists(plugin_schema) if not ok then ok, schema = plugin_servers.load_schema(plugin) end @@ -56,11 +57,11 @@ end function plugin_loader.load_entities(plugin, errors, loader_fn) - local has_daos, daos_schemas = utils.load_module_if_exists("kong.plugins." .. plugin .. ".daos") + local has_daos, daos_schemas = load_module_if_exists("kong.plugins." .. plugin .. ".daos") if not has_daos then return {} end - if not utils.is_array(daos_schemas, "strict") then + if not is_array(daos_schemas, "strict") then return nil, fmt("custom plugin '%s' returned non-array daos definition table", plugin) end diff --git a/kong/db/schema/vault_loader.lua b/kong/db/schema/vault_loader.lua index adb45fe859ee..3ae3fdb1f515 100644 --- a/kong/db/schema/vault_loader.lua +++ b/kong/db/schema/vault_loader.lua @@ -1,6 +1,6 @@ local MetaSchema = require "kong.db.schema.metaschema" local Entity = require "kong.db.schema.entity" -local utils = require "kong.tools.utils" +local load_module_if_exists = require "kong.tools.module".load_module_if_exists local tostring = tostring @@ -11,7 +11,7 @@ local vault_loader = {} function vault_loader.load_subschema(parent_schema, vault, errors) local vault_schema = "kong.vaults." .. vault .. ".schema" - local ok, schema = utils.load_module_if_exists(vault_schema) + local ok, schema = load_module_if_exists(vault_schema) if not ok then return nil, "no configuration schema found for vault: " .. vault end diff --git a/kong/db/strategies/init.lua b/kong/db/strategies/init.lua index fde65cc7c562..90f7968a1ec7 100644 --- a/kong/db/strategies/init.lua +++ b/kong/db/strategies/init.lua @@ -1,4 +1,4 @@ -local utils = require("kong.tools.utils") +local load_module_if_exists = require "kong.tools.module".load_module_if_exists local fmt = string.format @@ -55,7 +55,7 @@ function _M.new(kong_config, database, schemas, errors) end local custom_strat = fmt("kong.db.strategies.%s.%s", database, schema.name) - local exists, mod = utils.load_module_if_exists(custom_strat) + local exists, mod = load_module_if_exists(custom_strat) if exists and mod then local parent_mt = getmetatable(strategy) local mt = { diff --git a/kong/runloop/certificate.lua b/kong/runloop/certificate.lua index f52f338ac685..aeeab9702051 100644 --- a/kong/runloop/certificate.lua +++ b/kong/runloop/certificate.lua @@ -3,7 +3,6 @@ local pl_utils = require "pl.utils" local mlcache = require "kong.resty.mlcache" local new_tab = require "table.new" local constants = require "kong.constants" -local utils = require "kong.tools.utils" local plugin_servers = require "kong.runloop.plugin_servers" local openssl_x509_store = require "resty.openssl.x509.store" local openssl_x509 = require "resty.openssl.x509" @@ -418,9 +417,11 @@ end -- here we assume the field name is always `ca_certificates` local get_ca_certificate_reference_plugins do + local load_module_if_exists = require "kong.tools.module".load_module_if_exists + local function is_plugin_referencing_ca_certificates(name) local plugin_schema = "kong.plugins." .. name .. ".schema" - local ok, schema = utils.load_module_if_exists(plugin_schema) + local ok, schema = load_module_if_exists(plugin_schema) if not ok then ok, schema = plugin_servers.load_schema(name) end diff --git a/kong/status/init.lua b/kong/status/init.lua index b5f9c64b0eaa..ffe7ca2e54cf 100644 --- a/kong/status/init.lua +++ b/kong/status/init.lua @@ -1,7 +1,7 @@ local lapis = require "lapis" -local utils = require "kong.tools.utils" local api_helpers = require "kong.api.api_helpers" local hooks = require "kong.hooks" +local load_module_if_exists = require "kong.tools.module".load_module_if_exists local ngx = ngx @@ -58,8 +58,8 @@ end -- Load plugins status routes if kong.configuration and kong.configuration.loaded_plugins then for k in pairs(kong.configuration.loaded_plugins) do - local loaded, mod = utils.load_module_if_exists("kong.plugins." .. - k .. ".status_api") + local loaded, mod = load_module_if_exists("kong.plugins." .. + k .. ".status_api") if loaded then ngx.log(ngx.DEBUG, "Loading Status API endpoints for plugin: ", k) diff --git a/kong/tools/stream_api.lua b/kong/tools/stream_api.lua index f3f29980da39..1710487552be 100644 --- a/kong/tools/stream_api.lua +++ b/kong/tools/stream_api.lua @@ -236,10 +236,10 @@ end function stream_api.load_handlers() - local utils = require "kong.tools.utils" + local load_module_if_exists = require "kong.tools.module".load_module_if_exists for plugin_name in pairs(kong.configuration.loaded_plugins) do - local loaded, custom_endpoints = utils.load_module_if_exists("kong.plugins." .. plugin_name .. ".api") + local loaded, custom_endpoints = load_module_if_exists("kong.plugins." .. plugin_name .. ".api") if loaded and custom_endpoints._stream then log(DEBUG, "Register stream api for plugin: ", plugin_name) _handlers[plugin_name] = custom_endpoints._stream diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index ab3ed8343cac..4dce9e2f3016 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -25,7 +25,6 @@ do "kong.tools.rand", "kong.tools.system", "kong.tools.time", - "kong.tools.module", "kong.tools.ip", "kong.tools.http", } diff --git a/spec/01-unit/05-utils_spec.lua b/spec/01-unit/05-utils_spec.lua index d358954f1205..ea0fb9c11882 100644 --- a/spec/01-unit/05-utils_spec.lua +++ b/spec/01-unit/05-utils_spec.lua @@ -487,16 +487,18 @@ describe("Utils", function() end) describe("load_module_if_exists()", function() + local load_module_if_exists = require "kong.tools.module".load_module_if_exists + it("should return false if the module does not exist", function() local loaded, mod assert.has_no.errors(function() - loaded, mod = utils.load_module_if_exists("kong.does.not.exist") + loaded, mod = load_module_if_exists("kong.does.not.exist") end) assert.False(loaded) assert.is.string(mod) end) it("should throw an error with a traceback if the module is invalid", function() - local pok, perr = pcall(utils.load_module_if_exists, "spec.fixtures.invalid-module") + local pok, perr = pcall(load_module_if_exists, "spec.fixtures.invalid-module") assert.falsy(pok) assert.match("error loading module 'spec.fixtures.invalid-module'", perr, 1, true) assert.match("./spec/fixtures/invalid-module.lua:", perr, 1, true) @@ -504,7 +506,7 @@ describe("Utils", function() it("should load a module if it was found and valid", function() local loaded, mod assert.has_no.errors(function() - loaded, mod = utils.load_module_if_exists("spec.fixtures.valid-module") + loaded, mod = load_module_if_exists("spec.fixtures.valid-module") end) assert.True(loaded) assert.truthy(mod) From 2666f6ffaa5ec02fb0d3264171dd3f6c780d690a Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 11 Dec 2023 19:08:44 +0800 Subject: [PATCH 193/249] refactor(tools): remove reference of yield from utils (#12098) KAG-3226 --- kong/clustering/config_helper.lua | 2 +- kong/concurrency.lua | 2 +- kong/db/declarative/import.lua | 2 +- kong/db/declarative/init.lua | 3 +-- kong/db/schema/init.lua | 2 +- kong/db/schema/others/declarative_config.lua | 2 +- kong/db/strategies/off/init.lua | 2 +- kong/pdk/vault.lua | 8 +++----- kong/plugins/prometheus/exporter.lua | 4 ++-- kong/plugins/prometheus/prometheus.lua | 2 +- kong/router/atc.lua | 2 +- kong/router/traditional.lua | 2 +- kong/runloop/handler.lua | 3 ++- kong/tools/utils.lua | 1 - 14 files changed, 17 insertions(+), 20 deletions(-) diff --git a/kong/clustering/config_helper.lua b/kong/clustering/config_helper.lua index 82e94b357023..b77b69f672f1 100644 --- a/kong/clustering/config_helper.lua +++ b/kong/clustering/config_helper.lua @@ -14,7 +14,7 @@ local error = error local pairs = pairs local ipairs = ipairs local sort = table.sort -local yield = require("kong.tools.utils").yield +local yield = require("kong.tools.yield").yield local fetch_table = tablepool.fetch local release_table = tablepool.release diff --git a/kong/concurrency.lua b/kong/concurrency.lua index 58077d0aeed5..beef26d76aea 100644 --- a/kong/concurrency.lua +++ b/kong/concurrency.lua @@ -1,6 +1,6 @@ local resty_lock = require "resty.lock" local ngx_semaphore = require "ngx.semaphore" -local in_yieldable_phase = require("kong.tools.utils").in_yieldable_phase +local in_yieldable_phase = require("kong.tools.yield").in_yieldable_phase local type = type diff --git a/kong/db/declarative/import.lua b/kong/db/declarative/import.lua index 68cf31d08704..5539af2212d9 100644 --- a/kong/db/declarative/import.lua +++ b/kong/db/declarative/import.lua @@ -6,6 +6,7 @@ local utils = require("kong.tools.utils") local declarative_config = require("kong.db.schema.others.declarative_config") +local yield = require("kong.tools.yield").yield local marshall = require("kong.db.declarative.marshaller").marshall local schema_topological_sort = require("kong.db.schema.topological_sort") local nkeys = require("table.nkeys") @@ -18,7 +19,6 @@ local next = next local insert = table.insert local null = ngx.null local get_phase = ngx.get_phase -local yield = utils.yield local DECLARATIVE_HASH_KEY = constants.DECLARATIVE_HASH_KEY diff --git a/kong/db/declarative/init.lua b/kong/db/declarative/init.lua index 93d2e40a0803..a7dd6d2b0734 100644 --- a/kong/db/declarative/init.lua +++ b/kong/db/declarative/init.lua @@ -1,7 +1,6 @@ local pl_file = require "pl.file" local lyaml = require "lyaml" local cjson = require "cjson.safe" -local utils = require "kong.tools.utils" local declarative_config = require "kong.db.schema.others.declarative_config" local on_the_fly_migration = require "kong.db.declarative.migrations.route_path" local declarative_import = require "kong.db.declarative.import" @@ -17,7 +16,7 @@ local type = type local null = ngx.null local md5 = ngx.md5 local pairs = pairs -local yield = utils.yield +local yield = require("kong.tools.yield").yield local cjson_decode = cjson.decode local cjson_encode = cjson.encode local convert_nulls = declarative_export.convert_nulls diff --git a/kong/db/schema/init.lua b/kong/db/schema/init.lua index 0a3db763ad6d..b895e141f50f 100644 --- a/kong/db/schema/init.lua +++ b/kong/db/schema/init.lua @@ -19,7 +19,7 @@ local insert = table.insert local format = string.format local unpack = unpack local assert = assert -local yield = utils.yield +local yield = require("kong.tools.yield").yield local pairs = pairs local pcall = pcall local floor = math.floor diff --git a/kong/db/schema/others/declarative_config.lua b/kong/db/schema/others/declarative_config.lua index 00fa540c5cd6..15d291f6c0b3 100644 --- a/kong/db/schema/others/declarative_config.lua +++ b/kong/db/schema/others/declarative_config.lua @@ -13,7 +13,7 @@ local null = ngx.null local type = type local next = next local pairs = pairs -local yield = utils.yield +local yield = require("kong.tools.yield").yield local ipairs = ipairs local insert = table.insert local concat = table.concat diff --git a/kong/db/strategies/off/init.lua b/kong/db/strategies/off/init.lua index 2edceff6863d..38a59634946f 100644 --- a/kong/db/strategies/off/init.lua +++ b/kong/db/strategies/off/init.lua @@ -2,7 +2,7 @@ local declarative_config = require "kong.db.schema.others.declarative_config" local workspaces = require "kong.workspaces" local lmdb = require("resty.lmdb") local marshaller = require("kong.db.declarative.marshaller") -local yield = require("kong.tools.utils").yield +local yield = require("kong.tools.yield").yield local unique_field_key = require("kong.db.declarative").unique_field_key local kong = kong diff --git a/kong/pdk/vault.lua b/kong/pdk/vault.lua index efc306d48915..81d154b93932 100644 --- a/kong/pdk/vault.lua +++ b/kong/pdk/vault.lua @@ -16,14 +16,12 @@ local lrucache = require "resty.lrucache" local isempty = require "table.isempty" local buffer = require "string.buffer" local clone = require "table.clone" -local utils = require "kong.tools.utils" -local string_tools = require "kong.tools.string" local cjson = require("cjson.safe").new() -local yield = utils.yield -local get_updated_now_ms = utils.get_updated_now_ms -local replace_dashes = string_tools.replace_dashes +local yield = require("kong.tools.yield").yield +local get_updated_now_ms = require("kong.tools.time").get_updated_now_ms +local replace_dashes = require("kong.tools.string").replace_dashes local ngx = ngx diff --git a/kong/plugins/prometheus/exporter.lua b/kong/plugins/prometheus/exporter.lua index fd219d66b380..02eb4ba3e969 100644 --- a/kong/plugins/prometheus/exporter.lua +++ b/kong/plugins/prometheus/exporter.lua @@ -5,7 +5,7 @@ local lower = string.lower local ngx_timer_pending_count = ngx.timer.pending_count local ngx_timer_running_count = ngx.timer.running_count local balancer = require("kong.runloop.balancer") -local yield = require("kong.tools.utils").yield +local yield = require("kong.tools.yield").yield local get_all_upstreams = balancer.get_all_upstreams if not balancer.get_all_upstreams then -- API changed since after Kong 2.5 get_all_upstreams = require("kong.runloop.balancer.upstreams").get_all_upstreams @@ -367,7 +367,7 @@ local function metric_data(write_fn) for key, upstream_id in pairs(upstreams_dict) do -- long loop maybe spike proxy request latency, so we -- need yield to avoid blocking other requests - -- kong.tools.utils.yield(true) + -- kong.tools.yield.yield(true) yield(true, phase) local _, upstream_name = key:match("^([^:]*):(.-)$") upstream_name = upstream_name and upstream_name or key diff --git a/kong/plugins/prometheus/prometheus.lua b/kong/plugins/prometheus/prometheus.lua index fe3de338c55e..796a76c8813e 100644 --- a/kong/plugins/prometheus/prometheus.lua +++ b/kong/plugins/prometheus/prometheus.lua @@ -68,7 +68,7 @@ local tostring = tostring local tonumber = tonumber local table_sort = table.sort local tb_new = require("table.new") -local yield = require("kong.tools.utils").yield +local yield = require("kong.tools.yield").yield local Prometheus = {} diff --git a/kong/router/atc.lua b/kong/router/atc.lua index e67a207d1973..55064e1e34d7 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -10,7 +10,7 @@ local lrucache = require("resty.lrucache") local server_name = require("ngx.ssl").server_name local tb_new = require("table.new") local utils = require("kong.router.utils") -local yield = require("kong.tools.utils").yield +local yield = require("kong.tools.yield").yield local type = type diff --git a/kong/router/traditional.lua b/kong/router/traditional.lua index 7660294e38be..a531983b8bcc 100644 --- a/kong/router/traditional.lua +++ b/kong/router/traditional.lua @@ -34,7 +34,7 @@ local type = type local max = math.max local band = bit.band local bor = bit.bor -local yield = require("kong.tools.utils").yield +local yield = require("kong.tools.yield").yield local server_name = require("ngx.ssl").server_name diff --git a/kong/runloop/handler.lua b/kong/runloop/handler.lua index 8d8630d94fdb..70c64a34a921 100644 --- a/kong/runloop/handler.lua +++ b/kong/runloop/handler.lua @@ -51,6 +51,7 @@ local http_version = ngx.req.http_version local request_id_get = request_id.get local escape = require("kong.tools.uri").escape local encode = require("string.buffer").encode +local yield = require("kong.tools.yield").yield local req_dyn_hook_run_hooks = req_dyn_hook.run_hooks @@ -1008,7 +1009,7 @@ return { if rebuild_transaction_id then -- Yield to process any pending invalidations - utils.yield() + yield() log(DEBUG, "configuration processing completed for transaction ID " .. rebuild_transaction_id) global.CURRENT_TRANSACTION_ID = rebuild_transaction_id diff --git a/kong/tools/utils.lua b/kong/tools/utils.lua index 4dce9e2f3016..6e3db7a9d205 100644 --- a/kong/tools/utils.lua +++ b/kong/tools/utils.lua @@ -19,7 +19,6 @@ local _M = {} do local modules = { "kong.tools.table", - "kong.tools.yield", "kong.tools.string", "kong.tools.uuid", "kong.tools.rand", From 2adad05525868d948a16b3ce5953a9846787720f Mon Sep 17 00:00:00 2001 From: Guilherme Salazar Date: Mon, 11 Dec 2023 14:17:48 -0300 Subject: [PATCH 194/249] tests(plugins): refactor tests to address flakiness --- .../17-ip-restriction/02-access_spec.lua | 46 +++++++++---------- 1 file changed, 21 insertions(+), 25 deletions(-) diff --git a/spec/03-plugins/17-ip-restriction/02-access_spec.lua b/spec/03-plugins/17-ip-restriction/02-access_spec.lua index d487c957bca2..84bb293ca05d 100644 --- a/spec/03-plugins/17-ip-restriction/02-access_spec.lua +++ b/spec/03-plugins/17-ip-restriction/02-access_spec.lua @@ -581,19 +581,17 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, res) - local cache_key = db.plugins:cache_key(plugin) - - helpers.wait_for_invalidation(cache_key) - - local res = assert(proxy_client:send { - method = "GET", - path = "/request", - headers = { - ["Host"] = "ip-restriction2.test" - } - }) - local body = assert.res_status(403, res) - assert.matches("IP address not allowed", body) + helpers.pwait_until(function() + res = assert(proxy_client:send { + method = "GET", + path = "/request", + headers = { + ["Host"] = "ip-restriction2.test" + } + }) + local body = assert.res_status(403, res) + assert.matches("IP address not allowed", body) + end) res = assert(admin_client:send { method = "PATCH", @@ -607,18 +605,16 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, res) - local cache_key = db.plugins:cache_key(plugin) - - helpers.wait_for_invalidation(cache_key) - - local res = assert(proxy_client:send { - method = "GET", - path = "/request", - headers = { - ["Host"] = "ip-restriction2.test" - } - }) - assert.res_status(200, res) + helpers.pwait_until(function() + res = assert(proxy_client:send { + method = "GET", + path = "/request", + headers = { + ["Host"] = "ip-restriction2.test" + } + }) + assert.res_status(200, res) + end) end) describe("#regression", function() From 1f97197bfa59d476057244423187d3f502ba3286 Mon Sep 17 00:00:00 2001 From: Isa Farnik Date: Tue, 12 Dec 2023 13:44:24 -0800 Subject: [PATCH 195/249] feat(gha): pass version as cloudsmith tags (#12175) (cherry picked from commit 851ebcf5e65d6ba12aa5026a453e10f978f95ceb) --- .github/workflows/release.yml | 12 ++++++++++++ scripts/release-kong.sh | 22 ++++++++++++++++++---- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e81e4e5c3e23..2c0a1cd5f130 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -638,6 +638,7 @@ jobs: ARTIFACT_VERSION: ${{ matrix.artifact-version }} ARTIFACT_TYPE: ${{ matrix.artifact-type }} ARTIFACT: ${{ matrix.artifact }} + INPUT_VERSION: ${{ github.event.inputs.version }} PACKAGE_TYPE: ${{ matrix.package }} KONG_RELEASE_LABEL: ${{ needs.metadata.outputs.release-label }} VERBOSE: ${{ runner.debug == '1' && '1' || '' }} @@ -649,6 +650,17 @@ jobs: run: | sha256sum bazel-bin/pkg/* + # set the version input as tags passed to release-scripts + # note: release-scripts rejects user tags if missing internal flag + # + # this can be a comma-sepratated list of tags to apply + if [[ "$OFFICIAL_RELEASE" == 'false' ]]; then + if echo "$INPUT_VERSION" | grep -qs -E 'rc|alpha|beta|nightly'; then + PACKAGE_TAGS="$INPUT_VERSION" + export PACKAGE_TAGS + fi + fi + scripts/release-kong.sh release-images: diff --git a/scripts/release-kong.sh b/scripts/release-kong.sh index f62369ec5af4..9c0a4f1cd44f 100755 --- a/scripts/release-kong.sh +++ b/scripts/release-kong.sh @@ -102,18 +102,32 @@ function push_package () { dist_version="--dist-version jammy" fi + # test for sanitized github actions input + if [[ -n "$(echo "$PACKAGE_TAGS" | tr -d 'a-zA-Z0-9._,')" ]]; then + echo 'invalid characters in PACKAGE_TAGS' + echo "passed to script: ${PACKAGE_TAGS}" + tags='' + else + tags="$PACKAGE_TAGS" + fi + set -x + release_args='' + + if [ -n "${tags:-}" ]; then + release_args="${release_args} --tags ${tags}" + fi - local release_args="--package-type gateway" + release_args="${release_args} --package-type gateway" if [[ "$EDITION" == "enterprise" ]]; then - release_args="$release_args --enterprise" + release_args="${release_args} --enterprise" fi # pre-releases go to `/internal/` if [[ "$OFFICIAL_RELEASE" == "true" ]]; then - release_args="$release_args --publish" + release_args="${release_args} --publish" else - release_args="$release_args --internal" + release_args="${release_args} --internal" fi docker run \ From e98a938f43150321e6bb835718f7a5d6450325bf Mon Sep 17 00:00:00 2001 From: Xiaochen Date: Wed, 13 Dec 2023 16:07:12 +0800 Subject: [PATCH 196/249] style(dns): minor code style clean (#12192) KAG-3329 --- kong/resty/dns/client.lua | 70 ++++++++++++--------------------------- 1 file changed, 22 insertions(+), 48 deletions(-) diff --git a/kong/resty/dns/client.lua b/kong/resty/dns/client.lua index c3f460d4b892..fcc92a4217d7 100644 --- a/kong/resty/dns/client.lua +++ b/kong/resty/dns/client.lua @@ -639,7 +639,6 @@ _M.init = function(options) end end - -- other options badTtl = options.badTtl or 1 @@ -711,6 +710,7 @@ local function parseAnswer(qname, qtype, answers, try_list) return true end + -- executes 1 individual query. -- This query will not be synchronized, every call will be 1 query. -- @param qname the name to query for @@ -1045,15 +1045,9 @@ end local function search_iter(qname, qtype) local _, dots = qname:gsub("%.", "") - local type_list, type_start, type_end - if qtype then - type_list = { qtype } - type_start = 0 - else - type_list = typeOrder - type_start = 0 -- just start at the beginning - end - type_end = #type_list + local type_list = qtype and { qtype } or typeOrder + local type_start = 0 + local type_end = #type_list local i_type = type_start local search do @@ -1167,9 +1161,6 @@ local function resolve(qname, r_opts, dnsCacheOnly, try_list) if try_list then -- check for recursion if try_list["(short)"..qname..":"..tostring(qtype)] then - -- luacheck: push no unused - records = nil - -- luacheck: pop err = "recursion detected" add_status_to_try_list(try_list, err) return nil, err, try_list @@ -1180,9 +1171,6 @@ local function resolve(qname, r_opts, dnsCacheOnly, try_list) if records.expired then -- if the record is already stale/expired we have to traverse the -- iterator as that is required to start the async refresh queries - -- luacheck: push no unused - records = nil - -- luacheck: pop try_list = add_status_to_try_list(try_list, "stale") else @@ -1207,8 +1195,8 @@ local function resolve(qname, r_opts, dnsCacheOnly, try_list) if name_type == "ipv4" then -- if no qtype is given, we're supposed to search, so forcing TYPE_A is safe records, _, try_list = check_ipv4(qname, qtype or _M.TYPE_A, try_list) - else + else -- it is 'ipv6' -- if no qtype is given, we're supposed to search, so forcing TYPE_AAAA is safe records, _, try_list = check_ipv6(qname, qtype or _M.TYPE_AAAA, try_list) @@ -1228,34 +1216,27 @@ local function resolve(qname, r_opts, dnsCacheOnly, try_list) for try_name, try_type in search_iter(qname, qtype) do if try_list and try_list[try_name..":"..try_type] then -- recursion, been here before - records = nil err = "recursion detected" - - else - -- go look it up - opts.qtype = try_type - records, err, try_list = lookup(try_name, opts, dnsCacheOnly, try_list) + break end - if not records then -- luacheck: ignore + -- go look it up + opts.qtype = try_type + records, err, try_list = lookup(try_name, opts, dnsCacheOnly, try_list) + if not records then -- An error has occurred, terminate the lookup process. We don't want to try other record types because -- that would potentially cause us to respond with wrong answers (i.e. the contents of an A record if the -- query for the SRV record failed due to a network error). - goto failed + break + end - elseif records.errcode then + if records.errcode then -- dns error: fall through to the next entry in our search sequence err = ("dns server error: %s %s"):format(records.errcode, records.errstr) - -- luacheck: push no unused - records = nil - -- luacheck: pop elseif #records == 0 then -- empty: fall through to the next entry in our search sequence err = ("dns client error: %s %s"):format(101, clientErrors[101]) - -- luacheck: push no unused - records = nil - -- luacheck: pop else -- we got some records, update the cache @@ -1289,16 +1270,13 @@ local function resolve(qname, r_opts, dnsCacheOnly, try_list) end if records then - -- we have a result - -- cache it under its shortname if not dnsCacheOnly then cacheShortInsert(records, qname, qtype) end - -- check if we need to dereference a CNAME + -- dereference CNAME if records[1].type == _M.TYPE_CNAME and qtype ~= _M.TYPE_CNAME then - -- dereference CNAME opts.qtype = nil add_status_to_try_list(try_list, "dereferencing CNAME") return resolve(records[1].cname, opts, dnsCacheOnly, try_list) @@ -1311,7 +1289,6 @@ local function resolve(qname, r_opts, dnsCacheOnly, try_list) -- we had some error, record it in the status list add_status_to_try_list(try_list, err) end - ::failed:: -- we failed, clear cache and return last error if not dnsCacheOnly then @@ -1320,6 +1297,7 @@ local function resolve(qname, r_opts, dnsCacheOnly, try_list) return nil, err, try_list end + -- Create a metadata cache, using weak keys so it follows the dns record cache. -- The cache will hold pointers and lists for (weighted) round-robin schemes local metadataCache = setmetatable({}, { __mode = "k" }) @@ -1516,17 +1494,16 @@ local function toip(qname, port, dnsCacheOnly, try_list) return nil, err, try_list end ---print(tostring(try_list)) if rec[1].type == _M.TYPE_SRV then local entry = rec[roundRobinW(rec)] -- our SRV entry might still contain a hostname, so recurse, with found port number local srvport = (entry.port ~= 0 and entry.port) or port -- discard port if it is 0 add_status_to_try_list(try_list, "dereferencing SRV") return toip(entry.target, srvport, dnsCacheOnly, try_list) - else - -- must be A or AAAA - return rec[roundRobin(rec)].address, port, try_list end + + -- must be A or AAAA + return rec[roundRobin(rec)].address, port, try_list end @@ -1550,16 +1527,12 @@ local function connect(sock, host, port, sock_opts) if not targetIp then return nil, tostring(targetPort) .. ". Tried: " .. tostring(tryList) - else - -- need to do the extra check here: https://github.com/openresty/lua-nginx-module/issues/860 - if not sock_opts then - return sock:connect(targetIp, targetPort) - else - return sock:connect(targetIp, targetPort, sock_opts) - end end + + return sock:connect(targetIp, targetPort, sock_opts) end + --- Implements udp-setpeername method with dns resolution. -- This builds on top of `toip`. If the name resolves to an SRV record, -- the port returned by the DNS server will override the one provided. @@ -1581,6 +1554,7 @@ local function setpeername(sock, host, port) return sock:connect(targetIp, targetPort) end + -- export local functions _M.resolve = resolve _M.toip = toip From 32996ab5d5983e00dbbf02961a1a23bac60c3ce4 Mon Sep 17 00:00:00 2001 From: Aapo Talvensaari Date: Wed, 13 Dec 2023 11:30:45 +0200 Subject: [PATCH 197/249] chore(tests): remove boring ssl related tests (#12171) Signed-off-by: Aapo Talvensaari --- build/tests/01-base.sh | 9 ++------- spec/helpers/ssl.lua | 21 +++++++-------------- 2 files changed, 9 insertions(+), 21 deletions(-) diff --git a/build/tests/01-base.sh b/build/tests/01-base.sh index d19488e08cf8..7786204d60f8 100755 --- a/build/tests/01-base.sh +++ b/build/tests/01-base.sh @@ -107,13 +107,8 @@ assert_exec 0 'root' "/usr/local/openresty/bin/resty -e 'print(jit.version)' | g ### # check which ssl openresty is using -if docker_exec root '/usr/local/openresty/bin/openresty -V 2>&1' | grep 'BoringSSL'; then - msg_test 'openresty binary uses expected boringssl version' - assert_exec 0 'root' "/usr/local/openresty/bin/openresty -V 2>&1 | grep '1.1.0'" -else - msg_test 'openresty binary uses expected openssl version' - assert_exec 0 'root' "/usr/local/openresty/bin/openresty -V 2>&1 | grep '${OPENSSL}'" -fi +msg_test 'openresty binary uses expected openssl version' +assert_exec 0 'root' "/usr/local/openresty/bin/openresty -V 2>&1 | grep '${OPENSSL}'" msg_test 'openresty binary is linked to kong-provided ssl libraries' assert_exec 0 'root' "ldd /usr/local/openresty/bin/openresty | grep -E 'libssl.so.*kong/lib'" diff --git a/spec/helpers/ssl.lua b/spec/helpers/ssl.lua index 204403cf5264..03714ce4badc 100644 --- a/spec/helpers/ssl.lua +++ b/spec/helpers/ssl.lua @@ -2,7 +2,6 @@ local ffi = require "ffi" local C = ffi.C local bit = require "bit" local format_error = require("resty.openssl.err").format_error -local BORINGSSL = require("resty.openssl.version").BORINGSSL require "resty.openssl.include.ssl" ffi.cdef [[ @@ -76,24 +75,18 @@ local errors = { SSL_ERROR_WANT_RETRY_VERIFY = 12, } +local SOCKET_INVALID = -1 +local SSL_FILETYPE_PEM = 1 + local errors_literal = {} for k, v in pairs(errors) do errors_literal[v] = k end -local SOCKET_INVALID = -1 - - -local ssl_set_mode -if BORINGSSL then - ssl_set_mode = function(...) return C.SSL_set_mode(...) end -else - local SSL_CTRL_MODE = 33 - ssl_set_mode = function(ctx, mode) return C.SSL_ctrl(ctx, SSL_CTRL_MODE, mode, nil) end +local function ssl_set_mode(ctx, mode) + return C.SSL_ctrl(ctx, 33, mode, nil) end -local SSL_FILETYPE_PEM = 1 - local function ssl_ctx_new(cfg) if cfg.protocol and cfg.protocol ~= "any" then return nil, "protocol other than 'any' is currently not supported" @@ -166,10 +159,10 @@ function SSL.wrap(sock, cfg) ctx = s, fd = fd, }, ssl_mt) - + return self, nil end - return nil, err + return nil, err end local function socket_waitfd(fd, events, timeout) From 09a47fc0132452691fa7a834f8a043866d98f2ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Dec 2023 12:13:37 +0200 Subject: [PATCH 198/249] chore(deps): bump actions/setup-python from 4 to 5 (#12183) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/perf.yml | 2 +- .github/workflows/release.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/perf.yml b/.github/workflows/perf.yml index 2129d3bee553..d71b88519039 100644 --- a/.github/workflows/perf.yml +++ b/.github/workflows/perf.yml @@ -251,7 +251,7 @@ jobs: inkscape --export-area-drawing --export-png="${i%.*}.png" --export-dpi=300 -b FFFFFF $i done - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.10' cache: 'pip' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2c0a1cd5f130..0794df858a5b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -296,7 +296,7 @@ jobs: path: bazel-bin/pkg - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' cache: 'pip' # caching pip dependencies @@ -424,7 +424,7 @@ jobs: - uses: actions/checkout@v4 - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' cache: 'pip' # caching pip dependencies From a93b5e8e2615880fdf085432e7a417322c67a32b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Dec 2023 12:13:46 +0200 Subject: [PATCH 199/249] chore(deps): bump tj-actions/changed-files from 40.1.1 to 40.2.2 (#12185) Bumps [tj-actions/changed-files](https://github.com/tj-actions/changed-files) from 40.1.1 to 40.2.2. - [Release notes](https://github.com/tj-actions/changed-files/releases) - [Changelog](https://github.com/tj-actions/changed-files/blob/main/HISTORY.md) - [Commits](https://github.com/tj-actions/changed-files/compare/25ef3926d147cd02fc7e931c1ef50772bbb0d25d...94549999469dbfa032becf298d95c87a14c34394) --- updated-dependencies: - dependency-name: tj-actions/changed-files dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/changelog-requirement.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/changelog-requirement.yml b/.github/workflows/changelog-requirement.yml index 891f41451f55..9169a9317557 100644 --- a/.github/workflows/changelog-requirement.yml +++ b/.github/workflows/changelog-requirement.yml @@ -21,7 +21,7 @@ jobs: - name: Find changelog files id: changelog-list - uses: tj-actions/changed-files@25ef3926d147cd02fc7e931c1ef50772bbb0d25d # v37 + uses: tj-actions/changed-files@94549999469dbfa032becf298d95c87a14c34394 # v37 with: files_yaml: | changelogs: From 6e5cc45fc550f6b27179ea6005737c277d0b9709 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Dec 2023 12:14:12 +0200 Subject: [PATCH 200/249] chore(deps): bump actions/labeler from 4 to 5 (#12186) Bumps [actions/labeler](https://github.com/actions/labeler) from 4 to 5. - [Release notes](https://github.com/actions/labeler/releases) - [Commits](https://github.com/actions/labeler/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/labeler dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/label.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml index 4613569074b3..d23c4d403f5a 100644 --- a/.github/workflows/label.yml +++ b/.github/workflows/label.yml @@ -17,6 +17,6 @@ jobs: pull-requests: write steps: - - uses: actions/labeler@v4 + - uses: actions/labeler@v5 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" From def950ed80d251de63e88b523961e3ca4a9377be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Dec 2023 12:14:31 +0200 Subject: [PATCH 201/249] chore(deps): bump actions/stale from 8 to 9 (#12184) Bumps [actions/stale](https://github.com/actions/stale) from 8 to 9. - [Release notes](https://github.com/actions/stale/releases) - [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/stale/compare/v8...v9) --- updated-dependencies: - dependency-name: actions/stale dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/community-stale.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/community-stale.yml b/.github/workflows/community-stale.yml index 395aa82978ea..f6cba0a64528 100644 --- a/.github/workflows/community-stale.yml +++ b/.github/workflows/community-stale.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: days-before-stale: 14 days-before-close: 7 From f6fbe4458403999fd0b4fc3fa52e4e043e969ad1 Mon Sep 17 00:00:00 2001 From: tzssangglass Date: Wed, 13 Dec 2023 18:34:35 +0800 Subject: [PATCH 202/249] fix(package): declare ownership of all files and directories installed by package (#12162) When installing Kong via rpm/deb and then uninstalling it, there may be residual files and directories left in the system from the installation. The current commit supports cleaning up these leftover files by declare ownership of those files in package manifest. Fix: [FTI-5553](https://konghq.atlassian.net/browse/FTI-5553) Signed-off-by: tzssangglass --- .github/workflows/release.yml | 6 +++ build/package/nfpm.yaml | 8 +++- build/tests/04-uninstall.sh | 53 ++++++++++++++++++++++++ changelog/unreleased/kong/postremove.yml | 3 ++ 4 files changed, 68 insertions(+), 2 deletions(-) create mode 100755 build/tests/04-uninstall.sh create mode 100644 changelog/unreleased/kong/postremove.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0794df858a5b..0dced5a70e25 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -593,6 +593,12 @@ jobs: VERBOSE: ${{ runner.debug == '1' && '1' || '' }} run: build/tests/03-http2-admin-api.sh + - name: Smoke Tests - Uninstall Tests + env: + VERBOSE: ${{ runner.debug == '1' && '1' || '' }} + BUILD_LABEL: ${{ matrix.label }} + run: build/tests/04-uninstall.sh + release-packages: name: Release Packages - ${{ matrix.label }} - ${{ needs.metadata.outputs.release-desc }} needs: [metadata, build-packages, build-images, smoke-tests] diff --git a/build/package/nfpm.yaml b/build/package/nfpm.yaml index 2650569fc6da..388b7d0be89f 100644 --- a/build/package/nfpm.yaml +++ b/build/package/nfpm.yaml @@ -15,25 +15,29 @@ license: "Apache-2.0" contents: - src: nfpm-prefix/bin dst: /usr/local/bin +- src: kong/include + dst: /usr/local/kong/include + type: tree - src: nfpm-prefix/kong dst: /usr/local/kong type: tree - src: nfpm-prefix/lib dst: /usr/local/lib + type: tree - src: nfpm-prefix/etc/luarocks dst: /usr/local/etc/luarocks - src: nfpm-prefix/openresty dst: /usr/local/openresty + type: tree - src: nfpm-prefix/share dst: /usr/local/share + type: tree - src: nfpm-prefix/etc/kong dst: /etc/kong - src: bin/kong dst: /usr/local/bin/kong - src: bin/kong-health dst: /usr/local/bin/kong-health -- src: kong/include - dst: /usr/local/kong/include - src: build/package/kong.service dst: /lib/systemd/system/kong.service - src: build/package/kong.logrotate diff --git a/build/tests/04-uninstall.sh b/build/tests/04-uninstall.sh new file mode 100755 index 000000000000..5bb2b270eac9 --- /dev/null +++ b/build/tests/04-uninstall.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +if [ -n "${VERBOSE:-}" ]; then + set -x +fi + +source .requirements +source build/tests/util.sh + +remove_kong_command() { + local pkg_name="" + local remove_cmd="" + + case "${BUILD_LABEL}" in + "ubuntu"| "debian") + remove_cmd="apt-get remove -y kong" + ;; + "rhel") + remove_cmd="yum remove -y kong" + ;; + *) + return 1 + esac + + echo "$remove_cmd" +} + +msg_test '"kong" remove command' + +remove_command=$(remove_kong_command) +if [ $? -eq 0 ]; then + docker_exec root "$remove_command" +else + err_exit "can not find kong package" +fi + +# kong would create include and lib directory in /usr/local/kong +# but in ubuntu, kong would use /usr/local/kong as default prefix +# so after remove kong, /usr/local/kong would left logs and conf files +# we only check /usr/local/kong/include and /usr/local/kong/lib +msg_test "/usr/local/kong/include has been removed after uninstall" +assert_exec 1 'kong' "test -d /usr/local/kong/include" + +msg_test "/usr/local/kong/lib has been removed after uninstall" +assert_exec 1 'kong' "test -d /usr/local/kong/lib" + +# if /usr/local/share/lua/5.1 has other files, it will not be removed +# only remove files which are installed by kong +msg_test "/usr/local/share/lua/5.1 has been removed after uninstall" +assert_exec 1 'kong' "test -d /usr/local/share/lua/5.1" + +msg_test "/usr/local/openresty has been removed after uninstall" +assert_exec 1 'kong' "test -d /usr/local/openresty" diff --git a/changelog/unreleased/kong/postremove.yml b/changelog/unreleased/kong/postremove.yml new file mode 100644 index 000000000000..c3e0a805d12e --- /dev/null +++ b/changelog/unreleased/kong/postremove.yml @@ -0,0 +1,3 @@ +message: "cleanup of rpm/deb residual files after uninstall" +type: feature +scope: Core From 2e0b5acd4b3108d23381f56bc35c2349452d6df1 Mon Sep 17 00:00:00 2001 From: Water-Melon Date: Wed, 13 Dec 2023 16:18:02 +0800 Subject: [PATCH 203/249] fix(tests): execute all shell commands using resty.shell Due to the use of pl.utils.execute and os.execute, the execution time on both Lua code and the shell commands it launches exceeded the timeout set for the TCP connection. This resulted in abnormal data reception for some TCP connections. KAG-3157 --- .../02-integration/02-cmd/09-prepare_spec.lua | 6 +- .../02-cmd/10-migrations_spec.lua | 4 +- spec/02-integration/02-cmd/11-config_spec.lua | 5 +- spec/02-integration/02-cmd/15-utils_spec.lua | 3 +- .../05-proxy/04-plugins_triggering_spec.lua | 5 +- .../02-core_entities_invalidations_spec.lua | 4 +- .../17-admin_gui/01-admin-gui-path_spec.lua | 5 +- .../17-admin_gui/03-reports_spec.lua | 3 +- spec/03-plugins/03-http-log/01-log_spec.lua | 3 +- .../23-rate-limiting/04-access_spec.lua | 3 +- .../26-prometheus/02-access_spec.lua | 7 ++- .../26-prometheus/04-status_api_spec.lua | 5 +- .../27-aws-lambda/06-request-util_spec.lua | 3 +- .../37-opentelemetry/05-otelcol_spec.lua | 3 +- .../01-rps/06-core_entities_crud_spec.lua | 3 +- .../07-upstream_lock_regression_spec.lua | 3 +- spec/04-perf/02-flamegraph/01-simple_spec.lua | 5 +- spec/04-perf/02-flamegraph/05-prometheus.lua | 3 +- .../07-upstream_lock_regression_spec.lua | 3 +- spec/fixtures/https_server.lua | 5 +- spec/helpers.lua | 60 +++++++++---------- spec/helpers/http_mock/nginx_instance.lua | 4 +- spec/helpers/perf/charts.lua | 3 +- spec/helpers/perf/utils.lua | 3 +- 24 files changed, 83 insertions(+), 68 deletions(-) diff --git a/spec/02-integration/02-cmd/09-prepare_spec.lua b/spec/02-integration/02-cmd/09-prepare_spec.lua index 99110f966186..503b9c5b13c9 100644 --- a/spec/02-integration/02-cmd/09-prepare_spec.lua +++ b/spec/02-integration/02-cmd/09-prepare_spec.lua @@ -1,6 +1,6 @@ local helpers = require "spec.helpers" local signals = require "kong.cmd.utils.nginx_signals" -local pl_utils = require "pl.utils" +local shell = require "resty.shell" local fmt = string.format @@ -115,7 +115,7 @@ describe("kong prepare", function() assert.is_nil(err) local cmd = fmt("%s -p %s -c %s", nginx_bin, TEST_PREFIX, "nginx.conf") - local ok, _, _, stderr = pl_utils.executeex(cmd) + local ok, _, stderr = shell.run(cmd, nil, 0) assert.equal("", stderr) assert.truthy(ok) @@ -149,7 +149,7 @@ describe("kong prepare", function() assert.is_nil(err) local cmd = fmt("%s -p %s -c %s", nginx_bin, TEST_PREFIX, "nginx.conf") - local ok, _, _, stderr = pl_utils.executeex(cmd) + local ok, _, stderr = shell.run(cmd, nil, 0) assert.matches("kong_tests_unknown", stderr) assert.falsy(ok) diff --git a/spec/02-integration/02-cmd/10-migrations_spec.lua b/spec/02-integration/02-cmd/10-migrations_spec.lua index bb896f15507d..39bec40711d9 100644 --- a/spec/02-integration/02-cmd/10-migrations_spec.lua +++ b/spec/02-integration/02-cmd/10-migrations_spec.lua @@ -1,8 +1,8 @@ local helpers = require "spec.helpers" -local pl_utils = require "pl.utils" local utils = require "kong.tools.utils" local DB = require "kong.db.init" local tb_clone = require "table.clone" +local shell = require "resty.shell" -- Current number of migrations to execute in a new install @@ -73,7 +73,7 @@ for _, strategy in helpers.each_strategy() do local cmd = string.format(helpers.unindent [[ echo y | %s KONG_DATABASE=%s %s migrations reset --v -c %s ]], lua_path, strategy, helpers.bin_path, helpers.test_conf_path) - local ok, code, _, stderr = pl_utils.executeex(cmd) + local ok, _, stderr, _, code = shell.run(cmd, nil, 0) assert.falsy(ok) assert.same(1, code) assert.match("not a tty", stderr, 1, true) diff --git a/spec/02-integration/02-cmd/11-config_spec.lua b/spec/02-integration/02-cmd/11-config_spec.lua index 9322d3c9d42e..4096b2189bc2 100644 --- a/spec/02-integration/02-cmd/11-config_spec.lua +++ b/spec/02-integration/02-cmd/11-config_spec.lua @@ -3,6 +3,7 @@ local constants = require "kong.constants" local cjson = require "cjson" local lyaml = require "lyaml" local lfs = require "lfs" +local shell = require "resty.shell" local function sort_by_name(a, b) @@ -692,11 +693,11 @@ describe("kong config", function() local kong_yml_exists = false if lfs.attributes("kong.yml") then kong_yml_exists = true - os.execute("mv kong.yml kong.yml~") + shell.run("mv kong.yml kong.yml~", nil, 0) end finally(function() if kong_yml_exists then - os.execute("mv kong.yml~ kong.yml") + shell.run("mv kong.yml~ kong.yml", nil, 0) else os.remove("kong.yml") end diff --git a/spec/02-integration/02-cmd/15-utils_spec.lua b/spec/02-integration/02-cmd/15-utils_spec.lua index 81a7b5489de1..cb469b51e491 100644 --- a/spec/02-integration/02-cmd/15-utils_spec.lua +++ b/spec/02-integration/02-cmd/15-utils_spec.lua @@ -2,6 +2,7 @@ local signals = require "kong.cmd.utils.nginx_signals" local pl_path = require "pl.path" local pl_file = require "pl.file" local pl_dir = require "pl.dir" +local shell = require "resty.shell" describe("kong cli utils", function() @@ -28,7 +29,7 @@ describe("kong cli utils", function() echo 'nginx version: openresty/%s' >&2]], version )) - assert(os.execute("chmod +x " .. nginx)) + assert(shell.run("chmod +x " .. nginx, nil, 0)) return nginx end diff --git a/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua b/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua index 6eb231eecc11..81e544834251 100644 --- a/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua +++ b/spec/02-integration/05-proxy/04-plugins_triggering_spec.lua @@ -3,6 +3,7 @@ local utils = require "kong.tools.utils" local cjson = require "cjson" local pl_path = require "pl.path" local pl_file = require "pl.file" +local shell = require "resty.shell" local LOG_WAIT_TIMEOUT = 10 @@ -410,7 +411,7 @@ for _, strategy in helpers.each_strategy() do before_each(function() helpers.clean_logfile(FILE_LOG_PATH) - os.execute("chmod 0777 " .. FILE_LOG_PATH) + shell.run("chmod 0777 " .. FILE_LOG_PATH, nil, 0) end) it("execute a log plugin", function() @@ -750,7 +751,7 @@ for _, strategy in helpers.each_strategy() do before_each(function() proxy_client = helpers.proxy_client() helpers.clean_logfile(FILE_LOG_PATH) - os.execute("chmod 0777 " .. FILE_LOG_PATH) + shell.run("chmod 0777 " .. FILE_LOG_PATH, nil, 0) end) diff --git a/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua b/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua index c6552713f16e..5a895803bd89 100644 --- a/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua +++ b/spec/02-integration/06-invalidations/02-core_entities_invalidations_spec.lua @@ -409,7 +409,7 @@ for _, strategy in helpers.each_strategy() do describe("ssl_certificates / snis", function() local function get_cert(port, sn) - local pl_utils = require "pl.utils" + local shell = require "resty.shell" local cmd = [[ echo "" | openssl s_client \ @@ -418,7 +418,7 @@ for _, strategy in helpers.each_strategy() do -servername %s \ ]] - local _, _, stderr = pl_utils.executeex(string.format(cmd, port, sn)) + local _, _, stderr = shell.run(string.format(cmd, port, sn), nil, 0) return stderr end diff --git a/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua b/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua index 90a1096ff9e5..e6b40b620112 100644 --- a/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua +++ b/spec/02-integration/17-admin_gui/01-admin-gui-path_spec.lua @@ -2,6 +2,7 @@ local lfs = require "lfs" local pl_path = require "pl.path" local helpers = require "spec.helpers" local test_prefix = helpers.test_conf.prefix +local shell = require "resty.shell" local _ @@ -24,7 +25,7 @@ describe("Admin GUI - admin_gui_path", function() local err, gui_dir_path, gui_index_file_path gui_dir_path = pl_path.join(test_prefix, "gui") - os.execute("rm -rf " .. gui_dir_path) + shell.run("rm -rf " .. gui_dir_path, nil, 0) _, err = lfs.mkdir(gui_dir_path) assert.is_nil(err) @@ -62,7 +63,7 @@ describe("Admin GUI - admin_gui_path", function() local err, gui_dir_path, gui_index_file_path gui_dir_path = pl_path.join(test_prefix, "gui") - os.execute("rm -rf " .. gui_dir_path) + shell.run("rm -rf " .. gui_dir_path, nil, 0) _, err = lfs.mkdir(gui_dir_path) assert.is_nil(err) diff --git a/spec/02-integration/17-admin_gui/03-reports_spec.lua b/spec/02-integration/17-admin_gui/03-reports_spec.lua index 927f083a92fe..d8de7e69e487 100644 --- a/spec/02-integration/17-admin_gui/03-reports_spec.lua +++ b/spec/02-integration/17-admin_gui/03-reports_spec.lua @@ -1,6 +1,7 @@ local cjson = require "cjson" local lfs = require "lfs" local pl_path = require "pl.path" +local shell = require "resty.shell" local helpers = require "spec.helpers" local constants = require "kong.constants" @@ -26,7 +27,7 @@ describe("anonymous reports for kong manager", function () local prepare_gui_dir = function () local err, gui_dir_path gui_dir_path = pl_path.join(helpers.test_conf.prefix, "gui") - os.execute("rm -rf " .. gui_dir_path) + shell.run("rm -rf " .. gui_dir_path, nil, 0) err = select(2, lfs.mkdir(gui_dir_path)) assert.is_nil(err) return gui_dir_path diff --git a/spec/03-plugins/03-http-log/01-log_spec.lua b/spec/03-plugins/03-http-log/01-log_spec.lua index 508933487351..55591eb85dde 100644 --- a/spec/03-plugins/03-http-log/01-log_spec.lua +++ b/spec/03-plugins/03-http-log/01-log_spec.lua @@ -478,7 +478,8 @@ for _, strategy in helpers.each_strategy() do it("gracefully handles layer 4 failures", function() -- setup: cleanup logs - os.execute(":> " .. helpers.test_conf.nginx_err_logs) + local shell = require "resty.shell" + shell.run(":> " .. helpers.test_conf.nginx_err_logs, nil, 0) local res = proxy_client:get("/status/200", { headers = { diff --git a/spec/03-plugins/23-rate-limiting/04-access_spec.lua b/spec/03-plugins/23-rate-limiting/04-access_spec.lua index 80636b33f674..9601d4deb243 100644 --- a/spec/03-plugins/23-rate-limiting/04-access_spec.lua +++ b/spec/03-plugins/23-rate-limiting/04-access_spec.lua @@ -1302,7 +1302,8 @@ describe(desc, function () delete_route(admin_client, route) delete_service(admin_client, service) red:close() - os.execute("cat servroot/logs/error.log") + local shell = require "resty.shell" + shell.run("cat servroot/logs/error.log", nil, 0) end) helpers.wait_for_all_config_update({ diff --git a/spec/03-plugins/26-prometheus/02-access_spec.lua b/spec/03-plugins/26-prometheus/02-access_spec.lua index a4a5b8c0038a..36cd7933f554 100644 --- a/spec/03-plugins/26-prometheus/02-access_spec.lua +++ b/spec/03-plugins/26-prometheus/02-access_spec.lua @@ -1,4 +1,5 @@ local helpers = require "spec.helpers" +local shell = require "resty.shell" local tcp_service_port = helpers.get_available_port() local tcp_proxy_port = helpers.get_available_port() @@ -216,7 +217,7 @@ describe("Plugin: prometheus (access)", function() it("does not log error if no service was matched", function() -- cleanup logs - os.execute(":> " .. helpers.test_conf.nginx_err_logs) + shell.run(":> " .. helpers.test_conf.nginx_err_logs, nil, 0) local res = assert(proxy_client:send { method = "POST", @@ -230,7 +231,7 @@ describe("Plugin: prometheus (access)", function() it("does not log error during a scrape", function() -- cleanup logs - os.execute(":> " .. helpers.test_conf.nginx_err_logs) + shell.run(":> " .. helpers.test_conf.nginx_err_logs, nil, 0) local res = assert(admin_client:send { method = "GET", @@ -609,4 +610,4 @@ describe("Plugin: prometheus (access) granular metrics switch", function() end) end) -end \ No newline at end of file +end diff --git a/spec/03-plugins/26-prometheus/04-status_api_spec.lua b/spec/03-plugins/26-prometheus/04-status_api_spec.lua index 098d6ab3f3a2..a837ee39e693 100644 --- a/spec/03-plugins/26-prometheus/04-status_api_spec.lua +++ b/spec/03-plugins/26-prometheus/04-status_api_spec.lua @@ -1,4 +1,5 @@ local helpers = require "spec.helpers" +local shell = require "resty.shell" local tcp_service_port = helpers.get_available_port() local tcp_proxy_port = helpers.get_available_port() @@ -260,7 +261,7 @@ describe("Plugin: prometheus (access via status API)", function() it("does not log error if no service was matched", function() -- cleanup logs - os.execute(":> " .. helpers.test_conf.nginx_err_logs) + shell.run(":> " .. helpers.test_conf.nginx_err_logs, nil, 0) local res = assert(proxy_client:send { method = "POST", @@ -274,7 +275,7 @@ describe("Plugin: prometheus (access via status API)", function() it("does not log error during a scrape", function() -- cleanup logs - os.execute(":> " .. helpers.test_conf.nginx_err_logs) + shell.run(":> " .. helpers.test_conf.nginx_err_logs, nil, 0) get_metrics() diff --git a/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua b/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua index 3e52100865aa..dd2e3c84ed3e 100644 --- a/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua +++ b/spec/03-plugins/27-aws-lambda/06-request-util_spec.lua @@ -154,7 +154,8 @@ for _, strategy in helpers.each_strategy() do before_each(function() proxy_client = helpers.proxy_client() admin_client = helpers.admin_client() - os.execute(":> " .. helpers.test_conf.nginx_err_logs) -- clean log files + local shell = require "resty.shell" + shell.run(":> " .. helpers.test_conf.nginx_err_logs, nil, 0) -- clean log files end) after_each(function () diff --git a/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua b/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua index c27f4d3663bc..7f8e4a1e3359 100644 --- a/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua +++ b/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua @@ -75,7 +75,8 @@ for _, strategy in helpers.each_strategy() do lazy_setup(function() -- clear file - os.execute("cat /dev/null > " .. OTELCOL_FILE_EXPORTER_PATH) + local shell = require "resty.shell" + shell.run("cat /dev/null > " .. OTELCOL_FILE_EXPORTER_PATH, nil, 0) setup_instrumentations("all") end) diff --git a/spec/04-perf/01-rps/06-core_entities_crud_spec.lua b/spec/04-perf/01-rps/06-core_entities_crud_spec.lua index 560447c2c333..b63932032ba1 100644 --- a/spec/04-perf/01-rps/06-core_entities_crud_spec.lua +++ b/spec/04-perf/01-rps/06-core_entities_crud_spec.lua @@ -4,6 +4,7 @@ local utils = require "spec.helpers.perf.utils" local workspaces = require "kong.workspaces" local stringx = require "pl.stringx" local tablex = require "pl.tablex" +local shell = require "resty.shell" local fmt = string.format @@ -346,7 +347,7 @@ local gen_wrk_script = function(entity, action) return script end -os.execute("mkdir -p output") +shell.run("mkdir -p output", nil, 0) for _, mode in ipairs(KONG_MODES) do for _, version in ipairs(versions) do diff --git a/spec/04-perf/01-rps/07-upstream_lock_regression_spec.lua b/spec/04-perf/01-rps/07-upstream_lock_regression_spec.lua index 150c2d620809..04e71b1fb6f5 100644 --- a/spec/04-perf/01-rps/07-upstream_lock_regression_spec.lua +++ b/spec/04-perf/01-rps/07-upstream_lock_regression_spec.lua @@ -1,3 +1,4 @@ +local shell = require "resty.shell" local perf = require "spec.helpers.perf" local split = require "pl.stringx".split local utils = require "spec.helpers.perf.utils" @@ -23,7 +24,7 @@ end local LOAD_DURATION = 60 -os.execute("mkdir -p output") +shell.run("mkdir -p output", nil, 0) local function patch(helpers, patch_interval) local status, bsize diff --git a/spec/04-perf/02-flamegraph/01-simple_spec.lua b/spec/04-perf/02-flamegraph/01-simple_spec.lua index a18e72753cf4..ccf15f552f0a 100644 --- a/spec/04-perf/02-flamegraph/01-simple_spec.lua +++ b/spec/04-perf/02-flamegraph/01-simple_spec.lua @@ -1,6 +1,7 @@ local perf = require("spec.helpers.perf") local split = require("pl.stringx").split local utils = require("spec.helpers.perf.utils") +local shell = require "resty.shell" perf.enable_charts(false) -- don't generate charts, we need flamegraphs only perf.use_defaults() @@ -38,7 +39,7 @@ local wrk_script = [[ end ]] -os.execute("mkdir -p output") +shell.run("mkdir -p output", nil, 0) for _, version in ipairs(versions) do describe("perf test for Kong " .. version .. " #simple #no_plugins", function() @@ -112,4 +113,4 @@ for _, version in ipairs(versions) do perf.save_error_log("output/" .. utils.get_test_output_filename() .. ".log") end) end) -end \ No newline at end of file +end diff --git a/spec/04-perf/02-flamegraph/05-prometheus.lua b/spec/04-perf/02-flamegraph/05-prometheus.lua index 03c5c938ec79..dcc87a20f39d 100644 --- a/spec/04-perf/02-flamegraph/05-prometheus.lua +++ b/spec/04-perf/02-flamegraph/05-prometheus.lua @@ -1,6 +1,7 @@ local perf = require("spec.helpers.perf") local split = require("pl.stringx").split local utils = require("spec.helpers.perf.utils") +local shell = require "resty.shell" perf.enable_charts(false) -- don't generate charts, we need flamegraphs only perf.use_defaults() @@ -37,7 +38,7 @@ local wrk_script = [[ end ]] -os.execute("mkdir -p output") +shell.run("mkdir -p output", nil, 0) local function scrape(helpers, scrape_interval) local starting = ngx.now() diff --git a/spec/04-perf/02-flamegraph/07-upstream_lock_regression_spec.lua b/spec/04-perf/02-flamegraph/07-upstream_lock_regression_spec.lua index fcc6366e097b..9083d9283264 100644 --- a/spec/04-perf/02-flamegraph/07-upstream_lock_regression_spec.lua +++ b/spec/04-perf/02-flamegraph/07-upstream_lock_regression_spec.lua @@ -1,3 +1,4 @@ +local shell = require "resty.shell" local perf = require("spec.helpers.perf") local split = require("pl.stringx").split local utils = require("spec.helpers.perf.utils") @@ -19,7 +20,7 @@ end local LOAD_DURATION = 180 -os.execute("mkdir -p output") +shell.run("mkdir -p output", nil, 0) local function patch(helpers, patch_interval) local status, bsize diff --git a/spec/fixtures/https_server.lua b/spec/fixtures/https_server.lua index c078669819ca..b3c61f4496a6 100644 --- a/spec/fixtures/https_server.lua +++ b/spec/fixtures/https_server.lua @@ -13,6 +13,7 @@ local pl_stringx = require "pl.stringx" local uuid = require "resty.jit-uuid" local http_client = require "resty.http" local cjson = require "cjson" +local shell = require "resty.shell" -- we need this to get random UUIDs @@ -192,7 +193,7 @@ function https_server.start(self) end for _ = 1, HTTPS_SERVER_START_MAX_RETRY do - if os.execute("nginx -c " .. file .. " -p " .. self.base_path) then + if shell.run("nginx -c " .. file .. " -p " .. self.base_path, nil, 0) then return end @@ -213,7 +214,7 @@ function https_server.shutdown(self) end local kill_nginx_cmd = fmt("kill -s TERM %s", tostring(pid)) - local status = os.execute(kill_nginx_cmd) + local status = shell.run(kill_nginx_cmd, nil, 0) if not status then error(fmt("could not kill nginx test server. %s was not removed", self.base_path), 2) end diff --git a/spec/helpers.lua b/spec/helpers.lua index 3bf41149dfa8..256e1139648b 100644 --- a/spec/helpers.lua +++ b/spec/helpers.lua @@ -67,6 +67,7 @@ local pkey = require "resty.openssl.pkey" local nginx_signals = require "kong.cmd.utils.nginx_signals" local log = require "kong.cmd.utils.log" local DB = require "kong.db" +local shell = require "resty.shell" local ffi = require "ffi" local ssl = require "ngx.ssl" local ws_client = require "resty.websocket.client" @@ -104,7 +105,7 @@ end -- @function openresty_ver_num local function openresty_ver_num() local nginx_bin = assert(nginx_signals.find_nginx_bin()) - local _, _, _, stderr = pl_utils.executeex(string.format("%s -V", nginx_bin)) + local _, _, stderr = shell.run(string.format("%s -V", nginx_bin), nil, 0) local a, b, c, d = string.match(stderr or "", "openresty/(%d+)%.(%d+)%.(%d+)%.(%d+)") if not a then @@ -203,7 +204,7 @@ do if not USED_PORTS[port] then USED_PORTS[port] = true - local ok = os.execute("netstat -lnt | grep \":" .. port .. "\" > /dev/null") + local ok = shell.run("netstat -lnt | grep \":" .. port .. "\" > /dev/null", nil, 0) if not ok then -- return code of 1 means `grep` did not found the listening port @@ -1114,24 +1115,19 @@ local function http2_client(host, port, tls) cmd = cmd .. " -http1" end - local body_filename + --shell.run does not support '<' if body then - body_filename = pl_path.tmpname() - pl_file.write(body_filename, body) - cmd = cmd .. " -post < " .. body_filename + cmd = cmd .. " -post" end if http2_debug then print("HTTP/2 cmd:\n" .. cmd) end - local ok, _, stdout, stderr = pl_utils.executeex(cmd) + --100MB for retrieving stdout & stderr + local ok, stdout, stderr = shell.run(cmd, body, 0, 1024*1024*100) assert(ok, stderr) - if body_filename then - pl_file.delete(body_filename) - end - if http2_debug then print("HTTP/2 debug:\n") print(stderr) @@ -3147,14 +3143,14 @@ end -- used on an assertion. -- @function execute -- @param cmd command string to execute --- @param pl_returns (optional) boolean: if true, this function will +-- @param returns (optional) boolean: if true, this function will -- return the same values as Penlight's executeex. --- @return if `pl_returns` is true, returns four return values --- (ok, code, stdout, stderr); if `pl_returns` is false, +-- @return if `returns` is true, returns four return values +-- (ok, code, stdout, stderr); if `returns` is false, -- returns either (false, stderr) or (true, stderr, stdout). -function exec(cmd, pl_returns) - local ok, code, stdout, stderr = pl_utils.executeex(cmd) - if pl_returns then +function exec(cmd, returns) + local ok, stdout, stderr, _, code = shell.run(cmd, nil, 0) + if returns then return ok, code, stdout, stderr end if not ok then @@ -3170,14 +3166,14 @@ end -- @param env (optional) table with kong parameters to set as environment -- variables, overriding the test config (each key will automatically be -- prefixed with `KONG_` and be converted to uppercase) --- @param pl_returns (optional) boolean: if true, this function will +-- @param returns (optional) boolean: if true, this function will -- return the same values as Penlight's `executeex`. -- @param env_vars (optional) a string prepended to the command, so -- that arbitrary environment variables may be passed --- @return if `pl_returns` is true, returns four return values --- (ok, code, stdout, stderr); if `pl_returns` is false, +-- @return if `returns` is true, returns four return values +-- (ok, code, stdout, stderr); if `returns` is false, -- returns either (false, stderr) or (true, stderr, stdout). -function kong_exec(cmd, env, pl_returns, env_vars) +function kong_exec(cmd, env, returns, env_vars) cmd = cmd or "" env = env or {} @@ -3214,7 +3210,7 @@ function kong_exec(cmd, env, pl_returns, env_vars) env_vars = string.format("%s KONG_%s='%s'", env_vars, k:upper(), v) end - return exec(env_vars .. " " .. BIN_PATH .. " " .. cmd, pl_returns) + return exec(env_vars .. " " .. BIN_PATH .. " " .. cmd, returns) end @@ -3257,7 +3253,7 @@ local function clean_prefix(prefix) local res, err = pl_path.rmdir(root) -- skip errors when trying to remove mount points - if not res and os.execute("findmnt " .. root .. " 2>&1 >/dev/null") == 0 then + if not res and shell.run("findmnt " .. root .. " 2>&1 >/dev/null", nil, 0) == 0 then return nil, err .. ": " .. root end end @@ -3294,7 +3290,7 @@ local function pid_dead(pid, timeout) local max_time = ngx.now() + (timeout or 10) repeat - if not pl_utils.execute("ps -p " .. pid .. " >/dev/null 2>&1") then + if not shell.run("ps -p " .. pid .. " >/dev/null 2>&1", nil, 0) then return true end -- still running, wait some more @@ -3324,7 +3320,7 @@ local function wait_pid(pid_path, timeout, is_retry) end -- Timeout reached: kill with SIGKILL - pl_utils.execute("kill -9 " .. pid .. " >/dev/null 2>&1") + shell.run("kill -9 " .. pid .. " >/dev/null 2>&1", nil, 0) -- Sanity check: check pid again, but don't loop. wait_pid(pid_path, timeout, true) @@ -3431,15 +3427,15 @@ end local function build_go_plugins(path) if pl_path.exists(pl_path.join(path, "go.mod")) then - local ok, _, _, stderr = pl_utils.executeex(string.format( - "cd %s; go mod tidy; go mod download", path)) + local ok, _, stderr = shell.run(string.format( + "cd %s; go mod tidy; go mod download", path), nil, 0) assert(ok, stderr) end for _, go_source in ipairs(pl_dir.getfiles(path, "*.go")) do - local ok, _, _, stderr = pl_utils.executeex(string.format( + local ok, _, stderr = shell.run(string.format( "cd %s; go build %s", path, pl_path.basename(go_source) - )) + ), nil, 0) assert(ok, stderr) end end @@ -3462,7 +3458,7 @@ local function make(workdir, specs) for _, src in ipairs(spec.src) do local srcpath = pl_path.join(workdir, src) if isnewer(targetpath, srcpath) then - local ok, _, _, stderr = pl_utils.executeex(string.format("cd %s; %s", workdir, spec.cmd)) + local ok, _, stderr = shell.run(string.format("cd %s; %s", workdir, spec.cmd), nil, 0) assert(ok, stderr) if isnewer(targetpath, srcpath) then error(string.format("couldn't make %q newer than %q", targetpath, srcpath)) @@ -3685,7 +3681,7 @@ local function stop_kong(prefix, preserve_prefix, preserve_dc, signal, nowait) return nil, err end - local ok, _, _, err = pl_utils.executeex(string.format("kill -%s %d", signal, pid)) + local ok, _, err = shell.run(string.format("kill -%s %d", signal, pid), nil, 0) if not ok then return nil, err end @@ -4133,7 +4129,7 @@ end end local cmd = string.format("pkill %s -P `cat %s`", signal, pid_path) - local _, code = pl_utils.execute(cmd) + local _, _, _, _, code = shell.run(cmd) if not pid_dead(pid_path) then return false diff --git a/spec/helpers/http_mock/nginx_instance.lua b/spec/helpers/http_mock/nginx_instance.lua index 860a12439f60..1fe011264b10 100644 --- a/spec/helpers/http_mock/nginx_instance.lua +++ b/spec/helpers/http_mock/nginx_instance.lua @@ -7,7 +7,7 @@ local pl_path = require "pl.path" local pl_dir = require "pl.dir" local pl_file = require "pl.file" local pl_utils = require "pl.utils" -local os = require "os" +local shell = require "resty.shell" local print = print local error = error @@ -60,7 +60,7 @@ function http_mock:stop(no_clean, signal, timeout) pid_file:close() local kill_nginx_cmd = "kill -s " .. signal .. " " .. pid - if not os.execute(kill_nginx_cmd) then + if not shell.run(kill_nginx_cmd, nil, 0) then error("failed to kill nginx at " .. self.prefix, 2) end diff --git a/spec/helpers/perf/charts.lua b/spec/helpers/perf/charts.lua index 6d6589b66d28..4bfcade8fcb9 100644 --- a/spec/helpers/perf/charts.lua +++ b/spec/helpers/perf/charts.lua @@ -16,6 +16,7 @@ local unsaved_results_lookup = {} local unsaved_results = {} local function gen_plots(results, fname, opts) + local shell = require "resty.shell" opts = opts or options if not results or not next(results) then @@ -23,7 +24,7 @@ local function gen_plots(results, fname, opts) return end - os.execute("mkdir -p output") + shell.run("mkdir -p output", nil, 0) local output_data = { options = opts, diff --git a/spec/helpers/perf/utils.lua b/spec/helpers/perf/utils.lua index 5620773dbdd8..81a774922452 100644 --- a/spec/helpers/perf/utils.lua +++ b/spec/helpers/perf/utils.lua @@ -225,7 +225,8 @@ local function clear_loaded_package() end local function print_and_save(s, path) - os.execute("mkdir -p output") + local shell = require "resty.shell" + shell.run("mkdir -p output", nil, 0) print(s) local f = io.open(path or "output/result.txt", "a") f:write(s) From 7f93a9292be1bbf413666fc304cc889fba5ba58b Mon Sep 17 00:00:00 2001 From: Datong Sun Date: Thu, 14 Dec 2023 15:25:20 +0800 Subject: [PATCH 204/249] chore(labeler): upgrade to version 5 syntax and use the official action workflow file from `actions/labeler` (#12210) KAG-3349 --- .github/labeler.yml | 191 +++++++++++++++++++++------------- .github/workflows/label.yml | 22 ---- .github/workflows/labeler.yml | 12 +++ 3 files changed, 128 insertions(+), 97 deletions(-) delete mode 100644 .github/workflows/label.yml create mode 100644 .github/workflows/labeler.yml diff --git a/.github/labeler.yml b/.github/labeler.yml index 5b6dc2ff62b6..d75a21fa48a0 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,190 +1,231 @@ core/admin-api: -- kong/api/**/* +- changed-files: + - any-glob-to-any-file: kong/api/**/* core/balancer: -- kong/runloop/balancer/* +- changed-files: + - any-glob-to-any-file: kong/runloop/balancer/* core/cli: -- kong/cmd/**/* +- changed-files: + - any-glob-to-any-file: kong/cmd/**/* core/clustering: -- kong/clustering/**/* -- kong/cluster_events/**/* +- changed-files: + - any-glob-to-any-file: ['kong/clustering/**/*', 'kong/cluster_events/**/*'] core/configuration: -- kong/conf_loader/* +- changed-files: + - any-glob-to-any-file: kong/conf_loader/* core/db/migrations: -- kong/db/migrations/**/* +- changed-files: + - any-glob-to-any-file: kong/db/migrations/**/* core/db: -- any: ['kong/db/**/*', '!kong/db/migrations/**/*'] +- changed-files: + - all-globs-to-any-file: ['kong/db/**/*', '!kong/db/migrations/**/*'] changelog: -- CHANGELOG.md +- changed-files: + - any-glob-to-any-file: CHANGELOG.md core/docs: -- any: ['**/*.md', '!CHANGELOG.md'] +- changed-files: + - all-globs-to-any-file: ['**/*.md', '!CHANGELOG.md'] autodoc: -- 'autodoc/**/*' +- changed-files: + - any-glob-to-any-file: 'autodoc/**/*' core/language/go: -- kong/runloop/plugin_servers/* +- changed-files: + - any-glob-to-any-file: kong/runloop/plugin_servers/* core/language/js: -- kong/runloop/plugin_servers/* +- changed-files: + - any-glob-to-any-file: kong/runloop/plugin_servers/* core/language/python: -- kong/runloop/plugin_servers/* +- changed-files: + - any-glob-to-any-file: kong/runloop/plugin_servers/* core/logs: -- kong/pdk/log.lua +- changed-files: + - any-glob-to-any-file: kong/pdk/log.lua core/pdk: -- any: ['kong/pdk/**/*', '!kong/pdk/log.lua'] +- changed-files: + - all-globs-to-any-file: ['kong/pdk/**/*', '!kong/pdk/log.lua'] core/proxy: -- any: ['kong/runloop/**/*', '!kong/runloop/balancer/*', '!kong/runloop/plugin_servers/*'] +- changed-files: + - all-globs-to-any-file: ['kong/runloop/**/*', '!kong/runloop/balancer/*', '!kong/runloop/plugin_servers/*'] core/router: -- kong/router.lua +- changed-files: + - any-glob-to-any-file: kong/router/* core/templates: -- kong/templates/* +- changed-files: + - any-glob-to-any-file: kong/templates/* core/tracing: -- kong/tracing/**/* -- kong/pdk/tracing.lua +- changed-files: + - any-glob-to-any-file: ['kong/tracing/**/*', 'kong/pdk/tracing.lua'] chore: -- .github/**/* -- .devcontainer/**/* +- changed-files: + - any-glob-to-any-file: ['.github/**/*', '.devcontainer/**/*'] plugins/acl: -- kong/plugins/acl/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/acl/**/* plugins/acme: -- kong/plugins/acme/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/acme/**/* plugins/aws-lambda: -- kong/plugins/aws-lambda/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/aws-lambda/**/* plugins/azure-functions: -- kong/plugins/azure-functions/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/azure-functions/**/* plugins/basic-auth: -- kong/plugins/basic-auth/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/basic-auth/**/* plugins/bot-detection: -- kong/plugins/bot-detection/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/bot-detection/**/* plugins/correlation-id: -- kong/plugins/correlation-id/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/correlation-id/**/* plugins/cors: -- kong/plugins/cors/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/cors/**/* plugins/datadog: -- kong/plugins/datadog/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/datadog/**/* plugins/file-log: -- kong/plugins/file-log/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/file-log/**/* plugins/grpc-gateway: -- kong/plugins/grpc-gateway/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/grpc-gateway/**/* plugins/grpc-web: -- kong/plugins/grpc-web/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/grpc-web/**/* plugins/hmac-auth: -- kong/plugins/hmac-auth/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/hmac-auth/**/* plugins/http-log: -- kong/plugins/http-log/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/http-log/**/* plugins/ip-restriction: -- kong/plugins/ip-restriction/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/ip-restriction/**/* plugins/jwt: -- kong/plugins/jwt/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/jwt/**/* plugins/key-auth: -- kong/plugins/key-auth/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/key-auth/**/* plugins/ldap-auth: -- kong/plugins/ldap-auth/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/ldap-auth/**/* plugins/loggly: -- kong/plugins/loggly/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/loggly/**/* plugins/oauth2: -- kong/plugins/oauth2/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/oauth2/**/* plugins/prometheus: -- kong/plugins/prometheus/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/prometheus/**/* plugins/proxy-cache: -- kong/plugins/proxy-cache/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/proxy-cache/**/* plugins/rate-limiting: -- kong/plugins/rate-limiting/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/rate-limiting/**/* plugins/request-size-limiting: -- kong/plugins/request-size-limiting/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/request-size-limiting/**/* plugins/request-termination: -- kong/plugins/request-termination/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/request-termination/**/* plugins/request-transformer: -- kong/plugins/request-transformer/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/request-transformer/**/* plugins/response-ratelimiting: -- kong/plugins/response-ratelimiting/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/response-ratelimiting/**/* plugins/response-transformer: -- kong/plugins/response-transformer/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/response-transformer/**/* plugins/session: -- kong/plugins/session/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/session/**/* plugins/serverless-functions: -- kong/plugins/post-function/**/* -- kong/plugins/pre-function/**/* +- changed-files: + - any-glob-to-any-file: ['kong/plugins/post-function/**/*', 'kong/plugins/pre-function/**/*'] plugins/statsd: -- kong/plugins/statsd/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/statsd/**/* plugins/syslog: -- kong/plugins/syslog/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/syslog/**/* plugins/tcp-log: -- kong/plugins/tcp-log/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/tcp-log/**/* plugins/udp-log: -- kong/plugins/udp-log/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/udp-log/**/* plugins/zipkin: -- kong/plugins/zipkin/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/zipkin/**/* plugins/opentelemetry: -- kong/plugins/opentelemetry/**/* +- changed-files: + - any-glob-to-any-file: kong/plugins/opentelemetry/**/* schema-change-noteworthy: -- kong/db/schema/**/*.lua -- kong/**/schema.lua -- kong/plugins/**/daos.lua -- plugins-ee/**/daos.lua -- plugins-ee/**/schema.lua -- kong/db/dao/*.lua -- kong/enterprise_edition/redis/init.lua +- changed-files: + - any-glob-to-any-file: ['kong/db/schema/**/*.lua', 'kong/**/schema.lua', 'kong/plugins/**/daos.lua', 'plugins-ee/**/daos.lua', 'plugins-ee/**/schema.lua', 'kong/db/dao/*.lua', 'kong/enterprise_edition/redis/init.lua'] build/bazel: -- '**/*.bazel' -- '**/*.bzl' -- build/**/* -- WORKSPACE -- .bazelignore -- .bazelrc -- .bazelversion -- scripts/build-*.sh +- changed-files: + - any-glob-to-any-file: ['**/*.bazel', '**/*.bzl', 'build/**/*', 'WORKSPACE', '.bazelignore', '.bazelrc', '.bazelversion', 'scripts/build-*.sh'] diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml deleted file mode 100644 index d23c4d403f5a..000000000000 --- a/.github/workflows/label.yml +++ /dev/null @@ -1,22 +0,0 @@ -# This workflow will triage pull requests and apply a label based on the -# paths that are modified in the pull request. -# -# To use this workflow, you will need to set up a .github/labeler.yml -# file with configuration. For more information, see: -# https://github.com/actions/labeler - -name: Labeler -on: [pull_request_target] - -jobs: - label: - - runs-on: ubuntu-latest - permissions: - contents: read - pull-requests: write - - steps: - - uses: actions/labeler@v5 - with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 000000000000..e57cd86e2b3c --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,12 @@ +name: "Pull Request Labeler" +on: +- pull_request_target + +jobs: + labeler: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v5 From ac59ffdd5c9b9e415e4e2ee6123ca4f303704434 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Fri, 15 Dec 2023 06:58:54 +0100 Subject: [PATCH 205/249] chore(actions): dynamic test scheduler / balancer (#12180) This commit adds an automatic scheduler for running busted tests. It replaces the static, shell script based scheduler by a mechanism that distributes the load onto a number of runners. Each runner gets to work on a portion of the tests that need to be run. The scheduler uses historic run time information to distribute the work evenly across runners, with the goal of making them all run for the same amount of time. With the 7 runners configured in the PR, the overall time it takes to run tests is reduced from around 30 minutes to around 11 minutes. Previously, the scheduling for tests was defined by what the run_tests.sh shell script did. This has now changed so that the new JSON file `test_suites.json` is instead used to define the tests that need to run. Like before, each of the test suites can have its own set of environment variables and test exclusions. The test runner has been rewritten in Javascript in order to make it easier to interface with the declarative configuration file and to facilitate reporting and interfacing with busted. It resides in the https://github.com/Kong/gateway-test-scheduler repository and provides its functionality through custom GitHub Actions. A couple of tests had to be changed to isolate them from other tests better. As the tests are no longer run in identical order every time, it has become more important that each test performs any required cleanup before it runs. KAG-3196 --- .ci/run_tests.sh | 154 ----------- .ci/test_suites.json | 34 +++ .github/workflows/build_and_test.yml | 241 +++++++----------- .../update-test-runtime-statistics.yml | 35 +++ spec/01-unit/19-hybrid/03-compat_spec.lua | 4 +- .../02-admin_gui_template_spec.lua | 4 +- .../17-admin_gui/02-log_spec.lua | 1 + .../37-opentelemetry/05-otelcol_spec.lua | 1 + spec/busted-ci-helper.lua | 59 +++++ spec/busted-log-failed.lua | 33 --- spec/fixtures/aws-sam.lua | 26 +- 11 files changed, 249 insertions(+), 343 deletions(-) delete mode 100755 .ci/run_tests.sh create mode 100644 .ci/test_suites.json create mode 100644 .github/workflows/update-test-runtime-statistics.yml create mode 100644 spec/busted-ci-helper.lua delete mode 100644 spec/busted-log-failed.lua diff --git a/.ci/run_tests.sh b/.ci/run_tests.sh deleted file mode 100755 index 447936f73ff6..000000000000 --- a/.ci/run_tests.sh +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env bash -set -e - -function cyan() { - echo -e "\033[1;36m$*\033[0m" -} - -function red() { - echo -e "\033[1;31m$*\033[0m" -} - -function get_failed { - if [ ! -z "$FAILED_TEST_FILES_FILE" -a -f "$FAILED_TEST_FILES_FILE" ] - then - cat < $FAILED_TEST_FILES_FILE - else - echo "$@" - fi -} - -BUSTED_ARGS="--keep-going -o htest -v --exclude-tags=flaky,ipv6" -if [ ! -z "$FAILED_TEST_FILES_FILE" ] -then - BUSTED_ARGS="--helper=spec/busted-log-failed.lua $BUSTED_ARGS" -fi - -if [ "$KONG_TEST_DATABASE" == "postgres" ]; then - export TEST_CMD="bin/busted $BUSTED_ARGS,off" - - psql -v ON_ERROR_STOP=1 -h localhost --username "$KONG_TEST_PG_USER" <<-EOSQL - CREATE user ${KONG_TEST_PG_USER}_ro; - GRANT CONNECT ON DATABASE $KONG_TEST_PG_DATABASE TO ${KONG_TEST_PG_USER}_ro; - \c $KONG_TEST_PG_DATABASE; - GRANT USAGE ON SCHEMA public TO ${KONG_TEST_PG_USER}_ro; - ALTER DEFAULT PRIVILEGES FOR ROLE $KONG_TEST_PG_USER IN SCHEMA public GRANT SELECT ON TABLES TO ${KONG_TEST_PG_USER}_ro; -EOSQL - -elif [ "$KONG_TEST_DATABASE" == "cassandra" ]; then - echo "Cassandra is no longer supported" - exit 1 - -else - export TEST_CMD="bin/busted $BUSTED_ARGS,postgres,db" -fi - -if [ "$TEST_SUITE" == "integration" ]; then - if [[ "$TEST_SPLIT" == first* ]]; then - # GitHub Actions, run first batch of integration tests - files=$(ls -d spec/02-integration/* | sort | grep -v 05-proxy) - files=$(get_failed $files) - eval "$TEST_CMD" $files - - elif [[ "$TEST_SPLIT" == second* ]]; then - # GitHub Actions, run second batch of integration tests - # Note that the split here is chosen carefully to result - # in a similar run time between the two batches, and should - # be adjusted if imbalance become significant in the future - files=$(ls -d spec/02-integration/* | sort | grep 05-proxy) - files=$(get_failed $files) - eval "$TEST_CMD" $files - - else - # Non GitHub Actions - eval "$TEST_CMD" $(get_failed spec/02-integration/) - fi -fi - -if [ "$TEST_SUITE" == "dbless" ]; then - eval "$TEST_CMD" $(get_failed spec/02-integration/02-cmd \ - spec/02-integration/05-proxy \ - spec/02-integration/04-admin_api/02-kong_routes_spec.lua \ - spec/02-integration/04-admin_api/15-off_spec.lua \ - spec/02-integration/08-status_api/01-core_routes_spec.lua \ - spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua \ - spec/02-integration/11-dbless \ - spec/02-integration/20-wasm) -fi -if [ "$TEST_SUITE" == "plugins" ]; then - set +ex - rm -f .failed - - if [[ "$TEST_SPLIT" == first* ]]; then - # GitHub Actions, run first batch of plugin tests - PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | head -n22)) - - elif [[ "$TEST_SPLIT" == second* ]]; then - # GitHub Actions, run second batch of plugin tests - # Note that the split here is chosen carefully to result - # in a similar run time between the two batches, and should - # be adjusted if imbalance become significant in the future - PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | tail -n+23)) - - else - # Non GitHub Actions - PLUGINS=$(get_failed $(ls -d spec/03-plugins/*)) - fi - - for p in $PLUGINS; do - echo - cyan "--------------------------------------" - cyan $(basename $p) - cyan "--------------------------------------" - echo - - $TEST_CMD $p || echo "* $p" >> .failed - done - - if [[ "$TEST_SPLIT" != first* ]]; then - cat kong-*.rockspec | grep kong- | grep -v zipkin | grep -v sidecar | grep "~" | grep -v kong-prometheus-plugin | while read line ; do - REPOSITORY=`echo $line | sed "s/\"/ /g" | awk -F" " '{print $1}'` - VERSION=`luarocks show $REPOSITORY | grep $REPOSITORY | head -1 | awk -F" " '{print $2}' | cut -f1 -d"-"` - REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-prometheus-plugin/kong-plugin-prometheus/g'` - REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-proxy-cache-plugin/kong-plugin-proxy-cache/g'` - - echo - cyan "--------------------------------------" - cyan $REPOSITORY $VERSION - cyan "--------------------------------------" - echo - - git clone https://github.com/Kong/$REPOSITORY.git --branch $VERSION --single-branch /tmp/test-$REPOSITORY || \ - git clone https://github.com/Kong/$REPOSITORY.git --branch v$VERSION --single-branch /tmp/test-$REPOSITORY - sed -i 's/grpcbin:9000/localhost:15002/g' /tmp/test-$REPOSITORY/spec/*.lua - sed -i 's/grpcbin:9001/localhost:15003/g' /tmp/test-$REPOSITORY/spec/*.lua - cp -R /tmp/test-$REPOSITORY/spec/fixtures/* spec/fixtures/ || true - pushd /tmp/test-$REPOSITORY - luarocks make - popd - - $TEST_CMD /tmp/test-$REPOSITORY/spec/ || echo "* $REPOSITORY" >> .failed - - done - fi - - if [ -f .failed ]; then - echo - red "--------------------------------------" - red "Plugin tests failed:" - red "--------------------------------------" - cat .failed - exit 1 - else - exit 0 - fi -fi -if [ "$TEST_SUITE" == "pdk" ]; then - prove -I. -r t -fi -if [ "$TEST_SUITE" == "unit" ]; then - unset KONG_TEST_NGINX_USER KONG_PG_PASSWORD KONG_TEST_PG_PASSWORD - scripts/autodoc - bin/busted -v -o htest spec/01-unit - make lint -fi diff --git a/.ci/test_suites.json b/.ci/test_suites.json new file mode 100644 index 000000000000..eb6b15e5909e --- /dev/null +++ b/.ci/test_suites.json @@ -0,0 +1,34 @@ +[ + { + "name": "unit", + "exclude_tags": "flaky,ipv6", + "specs": ["spec/01-unit/"] + }, + { + "name": "integration", + "exclude_tags": "flaky,ipv6,off", + "environment": { + "KONG_TEST_DATABASE": "postgres" + }, + "specs": ["spec/02-integration/"] + }, + { + "name": "dbless", + "exclude_tags": "flaky,ipv6,postgres,db", + "specs": [ + "spec/02-integration/02-cmd/", + "spec/02-integration/05-proxy/", + "spec/02-integration/04-admin_api/02-kong_routes_spec.lua", + "spec/02-integration/04-admin_api/15-off_spec.lua", + "spec/02-integration/08-status_api/01-core_routes_spec.lua", + "spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua", + "spec/02-integration/11-dbless/", + "spec/02-integration/20-wasm/" + ] + }, + { + "name": "plugins", + "exclude_tags": "flaky,ipv6", + "specs": ["spec/03-plugins/"] + } +] diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 8b3c77ccf375..5cca0656ac08 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -33,6 +33,7 @@ concurrency: env: BUILD_ROOT: ${{ github.workspace }}/bazel-bin/build KONG_TEST_COVERAGE: ${{ inputs.coverage == true || github.event_name == 'schedule' }} + RUNNER_COUNT: 7 jobs: build: @@ -40,22 +41,11 @@ jobs: with: relative-build-root: bazel-bin/build - lint-doc-and-unit-tests: - name: Lint, Doc and Unit tests + lint-and-doc-tests: + name: Lint and Doc tests runs-on: ubuntu-22.04 needs: build - services: - postgres: - image: postgres:13 - env: - POSTGRES_USER: kong - POSTGRES_DB: kong - POSTGRES_HOST_AUTH_METHOD: trust - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 8 - steps: - name: Checkout Kong source code uses: actions/checkout@v4 @@ -93,41 +83,56 @@ jobs: - name: Check labeler configuration run: scripts/check-labeler.pl .github/labeler.yml - - name: Unit tests - env: - KONG_TEST_PG_DATABASE: kong - KONG_TEST_PG_USER: kong - run: | - source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - TEST_CMD="bin/busted -v -o htest spec/01-unit" - if [[ $KONG_TEST_COVERAGE = true ]]; then - TEST_CMD="$TEST_CMD --coverage" - fi - $TEST_CMD + schedule: + name: Schedule busted tests to run + runs-on: ubuntu-22.04 + needs: build - - name: Archive coverage stats file + env: + WORKFLOW_ID: ${{ github.run_id }} + + outputs: + runners: ${{ steps.generate-runner-array.outputs.RUNNERS }} + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + + - name: Download runtimes file + uses: Kong/gh-storage/download@main + with: + repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json + + - name: Schedule tests + uses: Kong/gateway-test-scheduler/schedule@main + with: + test-suites-file: .ci/test_suites.json + test-file-runtime-file: .ci/runtimes.json + output-prefix: test-chunk. + runner-count: ${{ env.RUNNER_COUNT }} + + - name: Upload schedule files uses: actions/upload-artifact@v3 - if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} + continue-on-error: true with: - name: luacov-stats-out-${{ github.job }}-${{ github.run_id }} - retention-days: 1 - path: | - luacov.stats.out + name: schedule-test-files + path: test-chunk.* + retention-days: 7 - - name: Get kernel message - if: failure() + - name: Generate runner array + id: generate-runner-array run: | - sudo dmesg -T + echo "RUNNERS=[$(echo $(seq 1 $(( $RUNNER_COUNT ))))]" | sed -e 's/ /, /g' >> $GITHUB_OUTPUT - integration-tests-postgres: - name: Postgres ${{ matrix.suite }} - ${{ matrix.split }} tests + busted-tests: + name: Busted test runner ${{ matrix.runner }} runs-on: ubuntu-22.04 - needs: build + needs: [build,schedule] + strategy: fail-fast: false matrix: - suite: [integration, plugins] - split: [first, second] + runner: ${{ fromJSON(needs.schedule.outputs.runners) }} services: postgres: @@ -179,7 +184,6 @@ jobs: echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts - name: Enable SSL for Redis - if: ${{ matrix.suite == 'plugins' }} run: | docker cp ${{ github.workspace }} kong_redis:/workspace docker cp ${{ github.workspace }}/spec/fixtures/redis/docker-entrypoint.sh kong_redis:/usr/local/bin/docker-entrypoint.sh @@ -202,47 +206,54 @@ jobs: docker logs opentelemetry-collector - name: Install AWS SAM cli tool - if: ${{ matrix.suite == 'plugins' }} run: | curl -L -s -o /tmp/aws-sam-cli.zip https://github.com/aws/aws-sam-cli/releases/latest/download/aws-sam-cli-linux-x86_64.zip unzip -o /tmp/aws-sam-cli.zip -d /tmp/aws-sam-cli sudo /tmp/aws-sam-cli/install --update - - name: Update PATH - run: | - echo "$BUILD_ROOT/kong-dev/bin" >> $GITHUB_PATH - echo "$BUILD_ROOT/kong-dev/openresty/nginx/sbin" >> $GITHUB_PATH - echo "$BUILD_ROOT/kong-dev/openresty/bin" >> $GITHUB_PATH - - - name: Debug (nginx) + - name: Create kong_ro user in Postgres run: | - echo nginx: $(which nginx) - nginx -V 2>&1 | sed -re 's/ --/\n--/g' - ldd $(which nginx) - - - name: Debug (luarocks) - run: | - echo luarocks: $(which luarocks) - luarocks --version - luarocks config + psql -v ON_ERROR_STOP=1 -h localhost --username kong <<\EOD + CREATE user kong_ro; + GRANT CONNECT ON DATABASE kong TO kong_ro; + \c kong; + GRANT USAGE ON SCHEMA public TO kong_ro; + ALTER DEFAULT PRIVILEGES FOR ROLE kong IN SCHEMA public GRANT SELECT ON TABLES TO kong_ro; + EOD - name: Tune up postgres max_connections run: | # arm64 runners may use more connections due to more worker cores psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;' - - name: Generate test rerun filename + - name: Download test schedule file + uses: actions/download-artifact@v3 + continue-on-error: true + with: + name: schedule-test-files + + - name: Generate helper environment variables run: | - echo FAILED_TEST_FILES_FILE=$(echo '${{ github.run_id }}-${{ matrix.suite }}-${{ matrix.split }}' | tr A-Z a-z | sed -Ee 's/[^a-z0-9]+/-/g').txt >> $GITHUB_ENV + echo FAILED_TEST_FILES_FILE=failed-tests.json >> $GITHUB_ENV + echo TEST_FILE_RUNTIME_FILE=test-runtime.json >> $GITHUB_ENV + - name: Build & install dependencies + run: | + make dev - name: Download test rerun information uses: actions/download-artifact@v3 continue-on-error: true with: - name: ${{ env.FAILED_TEST_FILES_FILE }} + name: test-rerun-info-${{ matrix.runner }} - - name: Tests + - name: Download test runtime statistics from previous runs + uses: actions/download-artifact@v3 + continue-on-error: true + with: + name: test-runtime-statistics-${{ matrix.runner }} + + - name: Run Tests env: KONG_TEST_PG_DATABASE: kong KONG_TEST_PG_USER: kong @@ -250,108 +261,44 @@ jobs: KONG_SPEC_TEST_GRPCBIN_PORT: "15002" KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003" KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json - TEST_SUITE: ${{ matrix.suite }} - TEST_SPLIT: ${{ matrix.split }} - run: | - make dev # required to install other dependencies like bin/grpcurl - source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - .ci/run_tests.sh + DD_ENV: ci + DD_SERVICE: kong-ce-ci + DD_CIVISIBILITY_MANUAL_API_ENABLED: 1 + DD_CIVISIBILITY_AGENTLESS_ENABLED: true + DD_TRACE_GIT_METADATA_ENABLED: true + DD_API_KEY: ${{ secrets.DATADOG_API_KEY }} + uses: Kong/gateway-test-scheduler/runner@main + with: + tests-to-run-file: test-chunk.${{ matrix.runner }}.json + failed-test-files-file: ${{ env.FAILED_TEST_FILES_FILE }} + test-file-runtime-file: ${{ env.TEST_FILE_RUNTIME_FILE }} + setup-venv: . ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - name: Upload test rerun information if: always() uses: actions/upload-artifact@v3 with: - name: ${{ env.FAILED_TEST_FILES_FILE }} + name: test-rerun-info-${{ matrix.runner }} path: ${{ env.FAILED_TEST_FILES_FILE }} retention-days: 2 - - name: Archive coverage stats file + - name: Upload test runtime statistics for offline scheduling + if: always() uses: actions/upload-artifact@v3 - if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: - name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.suite }}-${{ contains(matrix.split, 'first') && '1' || '2' }} - retention-days: 1 - path: | - luacov.stats.out - - - name: Get kernel message - if: failure() - run: | - sudo dmesg -T - - integration-tests-dbless: - name: DB-less integration tests - runs-on: ubuntu-22.04 - needs: build - - services: - grpcbin: - image: kong/grpcbin - ports: - - 15002:9000 - - 15003:9001 - - steps: - - name: Checkout Kong source code - uses: actions/checkout@v4 - - - name: Lookup build cache - id: cache-deps - uses: actions/cache@v3 - with: - path: ${{ env.BUILD_ROOT }} - key: ${{ needs.build.outputs.cache-key }} - - - name: Build WASM Test Filters - uses: ./.github/actions/build-wasm-test-filters - - - name: Add gRPC test host names - run: | - echo "127.0.0.1 grpcs_1.test" | sudo tee -a /etc/hosts - echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts - - - name: Run OpenTelemetry Collector - run: | - mkdir -p ${{ github.workspace }}/tmp/otel - touch ${{ github.workspace }}/tmp/otel/file_exporter.json - sudo chmod 777 -R ${{ github.workspace }}/tmp/otel - docker run -p 4317:4317 -p 4318:4318 -p 55679:55679 \ - -v ${{ github.workspace }}/spec/fixtures/opentelemetry/otelcol.yaml:/etc/otel-collector-config.yaml \ - -v ${{ github.workspace }}/tmp/otel:/etc/otel \ - --name opentelemetry-collector -d \ - otel/opentelemetry-collector-contrib:0.52.0 \ - --config=/etc/otel-collector-config.yaml - sleep 2 - docker logs opentelemetry-collector - - - name: Tests - env: - KONG_TEST_PG_DATABASE: kong - KONG_TEST_PG_USER: kong - KONG_TEST_DATABASE: 'off' - KONG_SPEC_TEST_GRPCBIN_PORT: "15002" - KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003" - KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json - TEST_SUITE: dbless - run: | - make dev # required to install other dependencies like bin/grpcurl - source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh - .ci/run_tests.sh + name: test-runtime-statistics-${{ matrix.runner }} + path: ${{ env.TEST_FILE_RUNTIME_FILE }} + retention-days: 7 - name: Archive coverage stats file uses: actions/upload-artifact@v3 if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: - name: luacov-stats-out-${{ github.job }}-${{ github.run_id }} + name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.runner }} retention-days: 1 path: | luacov.stats.out - - name: Get kernel message - if: failure() - run: | - sudo dmesg -T - pdk-tests: name: PDK tests runs-on: ubuntu-22.04 @@ -388,7 +335,7 @@ jobs: export PDK_LUACOV=1 fi eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) - .ci/run_tests.sh + prove -I. -r t - name: Archive coverage stats file uses: actions/upload-artifact@v3 @@ -404,9 +351,9 @@ jobs: run: | sudo dmesg -T - aggregator: - needs: [lint-doc-and-unit-tests,pdk-tests,integration-tests-postgres,integration-tests-dbless] - name: Luacov stats aggregator + cleanup-and-aggregate-stats: + needs: [lint-and-doc-tests,pdk-tests,busted-tests] + name: Cleanup and Luacov stats aggregator if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} runs-on: ubuntu-22.04 diff --git a/.github/workflows/update-test-runtime-statistics.yml b/.github/workflows/update-test-runtime-statistics.yml new file mode 100644 index 000000000000..de53f0e38f06 --- /dev/null +++ b/.github/workflows/update-test-runtime-statistics.yml @@ -0,0 +1,35 @@ +name: Update test runtime statistics file for test scheduling +on: + workflow_dispatch: + schedule: + - cron: "1 0 * * SAT" + # push rule below needed for testing only + push: + branches: + - feat/test-run-scheduler + +jobs: + process-statistics: + name: Download statistics from GitHub and combine them + runs-on: ubuntu-22.04 + steps: + - name: Checkout source code + uses: actions/checkout@v4 + with: + token: ${{ secrets.PAT }} + + - name: Process statistics + uses: Kong/gateway-test-scheduler/analyze@main + env: + GITHUB_TOKEN: ${{ secrets.PAT }} + with: + workflow-name: build_and_test.yml + test-file-runtime-file: .ci/runtimes.json + artifact-name-regexp: "^test-runtime-statistics-\\d+$" + + - name: Upload new runtimes file + uses: Kong/gh-storage/upload@main + env: + GITHUB_TOKEN: ${{ secrets.PAT }} + with: + repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json diff --git a/spec/01-unit/19-hybrid/03-compat_spec.lua b/spec/01-unit/19-hybrid/03-compat_spec.lua index 48085ab24ecf..b2a0030aa0f0 100644 --- a/spec/01-unit/19-hybrid/03-compat_spec.lua +++ b/spec/01-unit/19-hybrid/03-compat_spec.lua @@ -390,7 +390,7 @@ describe("kong.clustering.compat", function() end end) - it(function() + it("has_update", function() local config = { config_table = declarative.export_config() } local has_update = compat.update_compatible_payload(config, "3.0.0", "test_") assert.truthy(has_update) @@ -561,7 +561,7 @@ describe("kong.clustering.compat", function() config = { config_table = declarative.export_config() } end) - it(function() + it("plugin.use_srv_name", function() local has_update, result = compat.update_compatible_payload(config, "3.0.0", "test_") assert.truthy(has_update) result = cjson_decode(inflate_gzip(result)).config_table diff --git a/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua b/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua index 9a3df93ab523..de4c337fda36 100644 --- a/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua +++ b/spec/01-unit/29-admin_gui/02-admin_gui_template_spec.lua @@ -57,6 +57,7 @@ describe("admin_gui template", function() setup(function() prefix_handler.prepare_prefixed_interface_dir("/usr/local/kong", "gui", conf) + os.execute("mkdir -p " .. mock_prefix) assert(pl_path.isdir(mock_prefix)) end) @@ -138,6 +139,7 @@ describe("admin_gui template", function() setup(function() prefix_handler.prepare_prefixed_interface_dir("/usr/local/kong", "gui", conf) + os.execute("mkdir -p " .. mock_prefix) assert(pl_path.isdir(mock_prefix)) end) @@ -183,7 +185,7 @@ describe("admin_gui template", function() conf.prefix = mock_prefix if not pl_path.exists(usr_interface_path) then - assert(pl_path.mkdir(usr_interface_path)) + os.execute("mkdir -p " .. usr_interface_path) end end) diff --git a/spec/02-integration/17-admin_gui/02-log_spec.lua b/spec/02-integration/17-admin_gui/02-log_spec.lua index 226ff7d17901..e1b0176129ee 100644 --- a/spec/02-integration/17-admin_gui/02-log_spec.lua +++ b/spec/02-integration/17-admin_gui/02-log_spec.lua @@ -6,6 +6,7 @@ for _, strategy in helpers.each_strategy() do describe("Admin API - GUI logs - kong_admin #" .. strategy, function () lazy_setup(function () + helpers.get_db_utils(strategy) -- clear db assert(helpers.start_kong({ strategy = strategy, prefix = "servroot", diff --git a/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua b/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua index 7f8e4a1e3359..ca4fb585e381 100644 --- a/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua +++ b/spec/03-plugins/37-opentelemetry/05-otelcol_spec.lua @@ -76,6 +76,7 @@ for _, strategy in helpers.each_strategy() do lazy_setup(function() -- clear file local shell = require "resty.shell" + shell.run("mkdir -p $(dirname " .. OTELCOL_FILE_EXPORTER_PATH .. ")", nil, 0) shell.run("cat /dev/null > " .. OTELCOL_FILE_EXPORTER_PATH, nil, 0) setup_instrumentations("all") end) diff --git a/spec/busted-ci-helper.lua b/spec/busted-ci-helper.lua new file mode 100644 index 000000000000..ff85767086ff --- /dev/null +++ b/spec/busted-ci-helper.lua @@ -0,0 +1,59 @@ +-- busted-log-failed.lua + +-- Log which test files run by busted had failures or errors in a +-- file. The file to use for logging is specified in the +-- FAILED_TEST_FILES_FILE environment variable. This is used to +-- reduce test rerun times for flaky tests. + +local busted = require 'busted' +local cjson = require 'cjson' +local socket_unix = require 'socket.unix' + +local busted_event_path = os.getenv("BUSTED_EVENT_PATH") + +-- Function to recursively copy a table, skipping keys associated with functions +local function copyTable(original, copied) + copied = copied or {} + + for key, value in pairs(original) do + if type(value) == "table" then + copied[key] = copyTable(value, {}) + elseif type(value) ~= "function" then + copied[key] = value + end + end + + return copied +end + +if busted_event_path then + local sock = assert(socket_unix()) + assert(sock:connect(busted_event_path)) + + local events = {{ 'suite', 'reset' }, + { 'suite', 'start' }, + { 'suite', 'end' }, + { 'file', 'start' }, + { 'file', 'end' }, + { 'test', 'start' }, + { 'test', 'end' }, + { 'pending' }, + { 'failure', 'it' }, + { 'error', 'it' }, + { 'failure' }, + { 'error' }} + for _, event in ipairs(events) do + busted.subscribe(event, function (...) + local args = {} + for i, original in ipairs{...} do + if type(original) == "table" then + args[i] = copyTable(original) + elseif type(original) ~= "function" then + args[i] = original + end + end + + sock:send(cjson.encode({ event = event[1] .. (event[2] and ":" .. event[2] or ""), args = args }) .. "\n") + end) + end +end diff --git a/spec/busted-log-failed.lua b/spec/busted-log-failed.lua deleted file mode 100644 index 7bfe6804b83f..000000000000 --- a/spec/busted-log-failed.lua +++ /dev/null @@ -1,33 +0,0 @@ --- busted-log-failed.lua - --- Log which test files run by busted had failures or errors in a --- file. The file to use for logging is specified in the --- FAILED_TEST_FILES_FILE environment variable. This is used to --- reduce test rerun times for flaky tests. - -local busted = require 'busted' -local failed_files_file = assert(os.getenv("FAILED_TEST_FILES_FILE"), - "FAILED_TEST_FILES_FILE environment variable not set") - -local FAILED_FILES = {} - -busted.subscribe({ 'failure' }, function(element, parent, message, debug) - FAILED_FILES[element.trace.source] = true -end) - -busted.subscribe({ 'error' }, function(element, parent, message, debug) - FAILED_FILES[element.trace.source] = true -end) - -busted.subscribe({ 'suite', 'end' }, function(suite, count, total) - local output = assert(io.open(failed_files_file, "w")) - if next(FAILED_FILES) then - for failed_file in pairs(FAILED_FILES) do - if failed_file:sub(1, 1) == '@' then - failed_file = failed_file:sub(2) - end - assert(output:write(failed_file .. "\n")) - end - end - output:close() -end) diff --git a/spec/fixtures/aws-sam.lua b/spec/fixtures/aws-sam.lua index 5aa67f972eab..6316f7c574c4 100644 --- a/spec/fixtures/aws-sam.lua +++ b/spec/fixtures/aws-sam.lua @@ -1,4 +1,5 @@ --AWS SAM Local Test Helper +local ngx_pipe = require "ngx.pipe" local helpers = require "spec.helpers" local utils = require "spec.helpers.perf.utils" local fmt = string.format @@ -26,6 +27,9 @@ function _M.is_sam_installed() end +local sam_proc + + function _M.start_local_lambda() local port = helpers.get_available_port() if not port then @@ -33,9 +37,16 @@ function _M.start_local_lambda() end -- run in background - local _ = ngx.thread.spawn(function() - utils.execute("sam local start-lambda --template-file=spec/fixtures/sam-app/template.yaml --port " .. port) - end) + local err + sam_proc, err = ngx_pipe.spawn({"sam", + "local", + "start-lambda", + "--template-file", "spec/fixtures/sam-app/template.yaml", + "--port", port + }) + if not sam_proc then + return nil, err + end local ret, err = utils.execute("pgrep -f 'sam local'") if err then @@ -47,9 +58,12 @@ end function _M.stop_local_lambda() - local ret, err = utils.execute("pkill -f sam") - if err then - return nil, fmt("Stop SAM CLI failed(code: %s): %s", err, ret) + if sam_proc then + local ok, err = sam_proc:kill(15) + if not ok then + return nil, fmt("Stop SAM CLI failed: %s", err) + end + sam_proc = nil end return true From dd4efe8959390a00e4272b588b3c9c5b57c6a43b Mon Sep 17 00:00:00 2001 From: Zachary Hu <6426329+outsinre@users.noreply.github.com> Date: Mon, 18 Dec 2023 20:38:20 +0800 Subject: [PATCH 206/249] chore(ci): fix Slack bot notification (#7598) (#12208) * chore(ci): fix Slack bot notification (#7598) 1. backport notification fails due to new backport message. 2. build notification uses PR author instead of merger. * chore(ci): downgrade actions/labeler from v5 to v4 * Revert "chore(ci): downgrade actions/labeler from v5 to v4" This reverts commit 57f83709ae3696b702ad92adf766e17ec1e429d6. --- .github/workflows/backport-fail-bot.yml | 2 +- .../workflows/release-and-tests-fail-bot.yml | 22 +++++++++++++++++-- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/.github/workflows/backport-fail-bot.yml b/.github/workflows/backport-fail-bot.yml index 94eff6defd80..9d83c6df036a 100644 --- a/.github/workflows/backport-fail-bot.yml +++ b/.github/workflows/backport-fail-bot.yml @@ -7,7 +7,7 @@ on: jobs: check_comment: runs-on: ubuntu-latest - if: github.event.issue.pull_request != null && contains(github.event.comment.body, 'To backport manually, run these commands in your terminal') + if: github.event.issue.pull_request != null && contains(github.event.comment.body, 'cherry-pick the changes locally and resolve any conflicts') steps: - name: Fetch mapping file diff --git a/.github/workflows/release-and-tests-fail-bot.yml b/.github/workflows/release-and-tests-fail-bot.yml index 1e9adaf073a9..1dc12b6f913b 100644 --- a/.github/workflows/release-and-tests-fail-bot.yml +++ b/.github/workflows/release-and-tests-fail-bot.yml @@ -28,6 +28,23 @@ jobs: const mapping = await response.json(); return mapping; + - name: Retrieve PR info + id: retrieve_pr_info + env: + ACCESS_TOKEN: ${{ secrets.PAT }} + run: | + repo_name="${{ github.event.workflow_run.repository.full_name }}" + head_sha="${{ github.event.workflow_run.head_sha }}" + IFS=$'\t' read pr_html_url pr_user_login < <(curl -sS \ + -H "Authorization: Bearer ${{ env.ACCESS_TOKEN }}" \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/repos/$repo_name/commits/$head_sha/pulls" \ + | jq -r '.[0] | [.html_url, .user.login] | @tsv') + echo "pr_html_url=$pr_html_url" >> $GITHUB_OUTPUT + echo "pr_user_login=$pr_user_login" >> $GITHUB_OUTPUT + shell: bash + - name: Generate Slack Payload id: generate-payload env: @@ -36,16 +53,17 @@ jobs: uses: actions/github-script@v7 with: script: | + const pr_html_url = "${{ steps.retrieve_pr_info.outputs.pr_html_url }}"; const workflow_name = "${{ github.event.workflow_run.name }}"; const repo_name = "${{ github.event.workflow_run.repository.full_name }}"; const branch_name = "${{ github.event.workflow_run.head_branch }}"; const run_url = "${{ github.event.workflow_run.html_url }}"; const slack_mapping = JSON.parse(process.env.SLACK_MAPPING); - const actor_github_id = "${{ github.event.workflow_run.actor.login }}"; + const actor_github_id = "${{ steps.retrieve_pr_info.outputs.pr_user_login }}"; const actor_slack_id = slack_mapping[actor_github_id]; const actor = actor_slack_id ? `<@${actor_slack_id}>` : actor_github_id; const payload = { - text: `Hello ${actor} , workflow “${workflow_name}” failed in repo: "${repo_name}", branch: "${branch_name}". Please check it: ${run_url}.`, + text: `${actor} , workflow “${workflow_name}” failed, repo: "${repo_name}", branch: "${branch_name}", PR: "${pr_html_url}". Please check it: ${run_url}.`, channel: process.env.SLACK_CHANNEL, }; return JSON.stringify(payload); From 98cf98924f754440a806db6806bdbd6883a2663e Mon Sep 17 00:00:00 2001 From: Chrono Date: Mon, 18 Dec 2023 21:44:05 +0800 Subject: [PATCH 207/249] refactor(conf_loader): separate parsing functions into parse.lua (#12182) --- kong-3.6.0-0.rockspec | 1 + kong/conf_loader/init.lua | 881 +------------------------ kong/conf_loader/parse.lua | 925 +++++++++++++++++++++++++++ spec/01-unit/03-conf_loader_spec.lua | 3 +- 4 files changed, 935 insertions(+), 875 deletions(-) create mode 100644 kong/conf_loader/parse.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index c49b7e137fb4..4e07f3823b0e 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -67,6 +67,7 @@ build = { ["kong.conf_loader"] = "kong/conf_loader/init.lua", ["kong.conf_loader.constants"] = "kong/conf_loader/constants.lua", + ["kong.conf_loader.parse"] = "kong/conf_loader/parse.lua", ["kong.conf_loader.listeners"] = "kong/conf_loader/listeners.lua", ["kong.clustering"] = "kong/clustering/init.lua", diff --git a/kong/conf_loader/init.lua b/kong/conf_loader/init.lua index 71e863892c59..bb36dde41e9f 100644 --- a/kong/conf_loader/init.lua +++ b/kong/conf_loader/init.lua @@ -3,14 +3,12 @@ local require = require local kong_default_conf = require "kong.templates.kong_defaults" local process_secrets = require "kong.cmd.utils.process_secrets" -local nginx_signals = require "kong.cmd.utils.nginx_signals" -local openssl_pkey = require "resty.openssl.pkey" -local openssl_x509 = require "resty.openssl.x509" local pl_stringio = require "pl.stringio" local pl_stringx = require "pl.stringx" local socket_url = require "socket.url" local conf_constants = require "kong.conf_loader.constants" local listeners = require "kong.conf_loader.listeners" +local conf_parse = require "kong.conf_loader.parse" local pl_pretty = require "pl.pretty" local pl_config = require "pl.config" local pl_file = require "pl.file" @@ -22,7 +20,6 @@ local env = require "kong.cmd.utils.env" local ffi = require "ffi" -local re_match = ngx.re.match local fmt = string.format local sub = string.sub local type = type @@ -30,9 +27,7 @@ local sort = table.sort local find = string.find local gsub = string.gsub local strip = pl_stringx.strip -local floor = math.floor local lower = string.lower -local upper = string.upper local match = string.match local pairs = pairs local assert = assert @@ -40,26 +35,11 @@ local unpack = unpack local ipairs = ipairs local insert = table.insert local remove = table.remove -local concat = table.concat local getenv = os.getenv local exists = pl_path.exists local abspath = pl_path.abspath -local isdir = pl_path.isdir local tostring = tostring -local tonumber = tonumber local setmetatable = setmetatable -local try_decode_base64 = utils.try_decode_base64 - - -local get_phase do - if ngx and ngx.get_phase then - get_phase = ngx.get_phase - else - get_phase = function() - return "timer" - end - end -end local C = ffi.C @@ -72,859 +52,12 @@ ffi.cdef([[ ]]) -local function is_predefined_dhgroup(group) - if type(group) ~= "string" then - return false - end - - return not not openssl_pkey.paramgen({ - type = "DH", - group = group, - }) -end - - -local function parse_value(value, typ) - if type(value) == "string" then - value = strip(value) - end - - -- transform {boolean} values ("on"/"off" aliasing to true/false) - -- transform {ngx_boolean} values ("on"/"off" aliasing to on/off) - -- transform {explicit string} values (number values converted to strings) - -- transform {array} values (comma-separated strings) - if typ == "boolean" then - value = value == true or value == "on" or value == "true" - - elseif typ == "ngx_boolean" then - value = (value == "on" or value == true) and "on" or "off" - - elseif typ == "string" then - value = tostring(value) -- forced string inference - - elseif typ == "number" then - value = tonumber(value) -- catch ENV variables (strings) that are numbers - - elseif typ == "array" and type(value) == "string" then - -- must check type because pl will already convert comma - -- separated strings to tables (but not when the arr has - -- only one element) - value = setmetatable(pl_stringx.split(value, ","), nil) -- remove List mt - - for i = 1, #value do - value[i] = strip(value[i]) - end - end - - if value == "" then - -- unset values are removed - value = nil - end - - return value -end - - --- Check if module is dynamic -local function check_dynamic_module(mod_name) - local configure_line = ngx.config.nginx_configure() - local mod_re = [[^.*--add-dynamic-module=(.+\/]] .. mod_name .. [[(\s|$)).*$]] - return re_match(configure_line, mod_re, "oi") ~= nil -end - - --- Lookup dynamic module object --- this function will lookup for the `mod_name` dynamic module in the following --- paths: --- - /usr/local/kong/modules -- default path for modules in container images --- - /../modules --- @param[type=string] mod_name The module name to lookup, without file extension -local function lookup_dynamic_module_so(mod_name, kong_conf) - log.debug("looking up dynamic module %s", mod_name) - - local mod_file = fmt("/usr/local/kong/modules/%s.so", mod_name) - if exists(mod_file) then - log.debug("module '%s' found at '%s'", mod_name, mod_file) - return mod_file - end - - local nginx_bin = nginx_signals.find_nginx_bin(kong_conf) - mod_file = fmt("%s/../modules/%s.so", pl_path.dirname(nginx_bin), mod_name) - if exists(mod_file) then - log.debug("module '%s' found at '%s'", mod_name, mod_file) - return mod_file - end - - return nil, fmt("%s dynamic module shared object not found", mod_name) -end - - --- Validate Wasm properties -local function validate_wasm(conf) - local wasm_enabled = conf.wasm - local filters_path = conf.wasm_filters_path - - if wasm_enabled then - if filters_path and not exists(filters_path) and not isdir(filters_path) then - return nil, fmt("wasm_filters_path '%s' is not a valid directory", filters_path) - end - end - - return true -end - -local validate_labels -do - local MAX_KEY_SIZE = 63 - local MAX_VALUE_SIZE = 63 - local MAX_KEYS_COUNT = 10 - - - -- validation rules based on Kong Labels AIP - -- https://kong-aip.netlify.app/aip/129/ - local BASE_PTRN = "[a-z0-9]([\\w\\.:-]*[a-z0-9]|)$" - local KEY_PTRN = "(?!kong)(?!konnect)(?!insomnia)(?!mesh)(?!kic)" .. BASE_PTRN - local VAL_PTRN = BASE_PTRN - - - local function validate_entry(str, max_size, pattern) - if str == "" or #str > max_size then - return nil, fmt( - "%s must have between 1 and %d characters", str, max_size) - end - if not re_match(str, pattern, "ajoi") then - return nil, fmt("%s is invalid. Must match pattern: %s", str, pattern) - end - return true - end - - - -- Validates a label array. - -- Validates labels based on the kong Labels AIP - function validate_labels(raw_labels) - local nkeys = require "table.nkeys" - if nkeys(raw_labels) > MAX_KEYS_COUNT then - return nil, fmt( - "labels validation failed: count exceeded %d max elements", - MAX_KEYS_COUNT - ) - end - - for _, kv in ipairs(raw_labels) do - local del = kv:find(":", 1, true) - local k = del and kv:sub(1, del - 1) or "" - local v = del and kv:sub(del + 1) or "" - - local ok, err = validate_entry(k, MAX_KEY_SIZE, KEY_PTRN) - if not ok then - return nil, "label key validation failed: " .. err - end - ok, err = validate_entry(v, MAX_VALUE_SIZE, VAL_PTRN) - if not ok then - return nil, "label value validation failed: " .. err - end - end - - return true - end -end - - --- Validate properties (type/enum/custom) and infer their type. --- @param[type=table] conf The configuration table to treat. -local function check_and_parse(conf, opts) - local errors = {} - - for k, value in pairs(conf) do - local v_schema = conf_constants.CONF_PARSERS[k] or {} - - value = parse_value(value, v_schema.typ) - - local typ = v_schema.typ or "string" - if value and not conf_constants.TYP_CHECKS[typ](value) then - errors[#errors + 1] = fmt("%s is not a %s: '%s'", k, typ, - tostring(value)) - - elseif v_schema.enum and not tablex.find(v_schema.enum, value) then - errors[#errors + 1] = fmt("%s has an invalid value: '%s' (%s)", k, - tostring(value), concat(v_schema.enum, ", ")) - - end - - conf[k] = value - end - - --------------------- - -- custom validations - --------------------- - - if conf.lua_ssl_trusted_certificate then - local new_paths = {} - - for _, trusted_cert in ipairs(conf.lua_ssl_trusted_certificate) do - if trusted_cert == "system" then - local system_path, err = utils.get_system_trusted_certs_filepath() - if system_path then - trusted_cert = system_path - - elseif not ngx.IS_CLI then - log.info("lua_ssl_trusted_certificate: unable to locate system bundle: " .. err .. - ". If you are using TLS connections, consider specifying " .. - "\"lua_ssl_trusted_certificate\" manually") - end - end - - if trusted_cert ~= "system" then - if not exists(trusted_cert) then - trusted_cert = try_decode_base64(trusted_cert) - local _, err = openssl_x509.new(trusted_cert) - if err then - errors[#errors + 1] = "lua_ssl_trusted_certificate: " .. - "failed loading certificate from " .. - trusted_cert - end - end - - new_paths[#new_paths + 1] = trusted_cert - end - end - - conf.lua_ssl_trusted_certificate = new_paths - end - - -- leave early if we're still at the stage before executing the main `resty` cmd - if opts.pre_cmd then - return #errors == 0, errors[1], errors - end - - conf.host_ports = {} - if conf.port_maps then - local MIN_PORT = 1 - local MAX_PORT = 65535 - - for _, port_map in ipairs(conf.port_maps) do - local colpos = find(port_map, ":", nil, true) - if not colpos then - errors[#errors + 1] = "invalid port mapping (`port_maps`): " .. port_map - - else - local host_port_str = sub(port_map, 1, colpos - 1) - local host_port_num = tonumber(host_port_str, 10) - local kong_port_str = sub(port_map, colpos + 1) - local kong_port_num = tonumber(kong_port_str, 10) - - if (host_port_num and host_port_num >= MIN_PORT and host_port_num <= MAX_PORT) - and (kong_port_num and kong_port_num >= MIN_PORT and kong_port_num <= MAX_PORT) - then - conf.host_ports[kong_port_num] = host_port_num - conf.host_ports[kong_port_str] = host_port_num - else - errors[#errors + 1] = "invalid port mapping (`port_maps`): " .. port_map - end - end - end - end - - for _, prefix in ipairs({ "proxy_", "admin_", "admin_gui_", "status_" }) do - local listen = conf[prefix .. "listen"] - - local ssl_enabled = find(concat(listen, ",") .. " ", "%sssl[%s,]") ~= nil - if not ssl_enabled and prefix == "proxy_" then - ssl_enabled = find(concat(conf.stream_listen, ",") .. " ", "%sssl[%s,]") ~= nil - end - - if prefix == "proxy_" then - prefix = "" - end - - if ssl_enabled then - conf.ssl_enabled = true - - local ssl_cert = conf[prefix .. "ssl_cert"] - local ssl_cert_key = conf[prefix .. "ssl_cert_key"] - - if #ssl_cert > 0 and #ssl_cert_key == 0 then - errors[#errors + 1] = prefix .. "ssl_cert_key must be specified" - - elseif #ssl_cert_key > 0 and #ssl_cert == 0 then - errors[#errors + 1] = prefix .. "ssl_cert must be specified" - - elseif #ssl_cert ~= #ssl_cert_key then - errors[#errors + 1] = prefix .. "ssl_cert was specified " .. #ssl_cert .. " times while " .. - prefix .. "ssl_cert_key was specified " .. #ssl_cert_key .. " times" - end - - if ssl_cert then - for i, cert in ipairs(ssl_cert) do - if not exists(cert) then - cert = try_decode_base64(cert) - ssl_cert[i] = cert - local _, err = openssl_x509.new(cert) - if err then - errors[#errors + 1] = prefix .. "ssl_cert: failed loading certificate from " .. cert - end - end - end - conf[prefix .. "ssl_cert"] = ssl_cert - end - - if ssl_cert_key then - for i, cert_key in ipairs(ssl_cert_key) do - if not exists(cert_key) then - cert_key = try_decode_base64(cert_key) - ssl_cert_key[i] = cert_key - local _, err = openssl_pkey.new(cert_key) - if err then - errors[#errors + 1] = prefix .. "ssl_cert_key: failed loading key from " .. cert_key - end - end - end - conf[prefix .. "ssl_cert_key"] = ssl_cert_key - end - end - end - - if conf.client_ssl then - local client_ssl_cert = conf.client_ssl_cert - local client_ssl_cert_key = conf.client_ssl_cert_key - - if client_ssl_cert and not client_ssl_cert_key then - errors[#errors + 1] = "client_ssl_cert_key must be specified" - - elseif client_ssl_cert_key and not client_ssl_cert then - errors[#errors + 1] = "client_ssl_cert must be specified" - end - - if client_ssl_cert and not exists(client_ssl_cert) then - client_ssl_cert = try_decode_base64(client_ssl_cert) - conf.client_ssl_cert = client_ssl_cert - local _, err = openssl_x509.new(client_ssl_cert) - if err then - errors[#errors + 1] = "client_ssl_cert: failed loading certificate from " .. client_ssl_cert - end - end - - if client_ssl_cert_key and not exists(client_ssl_cert_key) then - client_ssl_cert_key = try_decode_base64(client_ssl_cert_key) - conf.client_ssl_cert_key = client_ssl_cert_key - local _, err = openssl_pkey.new(client_ssl_cert_key) - if err then - errors[#errors + 1] = "client_ssl_cert_key: failed loading key from " .. - client_ssl_cert_key - end - end - end - - if conf.admin_gui_path then - if not conf.admin_gui_path:find("^/") then - errors[#errors + 1] = "admin_gui_path must start with a slash ('/')" - end - if conf.admin_gui_path:find("^/.+/$") then - errors[#errors + 1] = "admin_gui_path must not end with a slash ('/')" - end - if conf.admin_gui_path:match("[^%a%d%-_/]+") then - errors[#errors + 1] = "admin_gui_path can only contain letters, digits, " .. - "hyphens ('-'), underscores ('_'), and slashes ('/')" - end - if conf.admin_gui_path:match("//+") then - errors[#errors + 1] = "admin_gui_path must not contain continuous slashes ('/')" - end - end - - if conf.ssl_cipher_suite ~= "custom" then - local suite = conf_constants.CIPHER_SUITES[conf.ssl_cipher_suite] - if suite then - conf.ssl_ciphers = suite.ciphers - conf.nginx_http_ssl_protocols = suite.protocols - conf.nginx_http_ssl_prefer_server_ciphers = suite.prefer_server_ciphers - conf.nginx_stream_ssl_protocols = suite.protocols - conf.nginx_stream_ssl_prefer_server_ciphers = suite.prefer_server_ciphers - - -- There is no secure predefined one for old at the moment (and it's too slow to generate one). - -- Intermediate (the default) forcibly sets this to predefined ffdhe2048 group. - -- Modern just forcibly sets this to nil as there are no ciphers that need it. - if conf.ssl_cipher_suite ~= "old" then - conf.ssl_dhparam = suite.dhparams - conf.nginx_http_ssl_dhparam = suite.dhparams - conf.nginx_stream_ssl_dhparam = suite.dhparams - end - - else - errors[#errors + 1] = "Undefined cipher suite " .. tostring(conf.ssl_cipher_suite) - end - end - - if conf.ssl_dhparam then - if not is_predefined_dhgroup(conf.ssl_dhparam) - and not exists(conf.ssl_dhparam) then - conf.ssl_dhparam = try_decode_base64(conf.ssl_dhparam) - local _, err = openssl_pkey.new( - { - type = "DH", - param = conf.ssl_dhparam - } - ) - if err then - errors[#errors + 1] = "ssl_dhparam: failed loading certificate from " - .. conf.ssl_dhparam - end - end - - else - for _, key in ipairs({ "nginx_http_ssl_dhparam", "nginx_stream_ssl_dhparam" }) do - local file = conf[key] - if file and not is_predefined_dhgroup(file) and not exists(file) then - errors[#errors + 1] = key .. ": no such file at " .. file - end - end - end - - if conf.headers then - for _, token in ipairs(conf.headers) do - if token ~= "off" and not conf_constants.HEADER_KEY_TO_NAME[lower(token)] then - errors[#errors + 1] = fmt("headers: invalid entry '%s'", - tostring(token)) - end - end - end - - if conf.headers_upstream then - for _, token in ipairs(conf.headers_upstream) do - if token ~= "off" and not conf_constants.UPSTREAM_HEADER_KEY_TO_NAME[lower(token)] then - errors[#errors + 1] = fmt("headers_upstream: invalid entry '%s'", - tostring(token)) - end - end - end - - if conf.dns_resolver then - for _, server in ipairs(conf.dns_resolver) do - local dns = utils.normalize_ip(server) - - if not dns or dns.type == "name" then - errors[#errors + 1] = "dns_resolver must be a comma separated list " .. - "in the form of IPv4/6 or IPv4/6:port, got '" .. - server .. "'" - end - end - end - - if conf.dns_hostsfile then - if not pl_path.isfile(conf.dns_hostsfile) then - errors[#errors + 1] = "dns_hostsfile: file does not exist" - end - end - - if conf.dns_order then - local allowed = { LAST = true, A = true, AAAA = true, - CNAME = true, SRV = true } - - for _, name in ipairs(conf.dns_order) do - if not allowed[upper(name)] then - errors[#errors + 1] = fmt("dns_order: invalid entry '%s'", - tostring(name)) - end - end - end - - if not conf.lua_package_cpath then - conf.lua_package_cpath = "" - end - - -- checking the trusted ips - for _, address in ipairs(conf.trusted_ips) do - if not utils.is_valid_ip_or_cidr(address) and address ~= "unix:" then - errors[#errors + 1] = "trusted_ips must be a comma separated list in " .. - "the form of IPv4 or IPv6 address or CIDR " .. - "block or 'unix:', got '" .. address .. "'" - end - end - - if conf.pg_max_concurrent_queries < 0 then - errors[#errors + 1] = "pg_max_concurrent_queries must be greater than 0" - end - - if conf.pg_max_concurrent_queries ~= floor(conf.pg_max_concurrent_queries) then - errors[#errors + 1] = "pg_max_concurrent_queries must be an integer greater than 0" - end - - if conf.pg_semaphore_timeout < 0 then - errors[#errors + 1] = "pg_semaphore_timeout must be greater than 0" - end - - if conf.pg_semaphore_timeout ~= floor(conf.pg_semaphore_timeout) then - errors[#errors + 1] = "pg_semaphore_timeout must be an integer greater than 0" - end - - if conf.pg_keepalive_timeout then - if conf.pg_keepalive_timeout < 0 then - errors[#errors + 1] = "pg_keepalive_timeout must be greater than 0" - end - - if conf.pg_keepalive_timeout ~= floor(conf.pg_keepalive_timeout) then - errors[#errors + 1] = "pg_keepalive_timeout must be an integer greater than 0" - end - end - - if conf.pg_pool_size then - if conf.pg_pool_size < 0 then - errors[#errors + 1] = "pg_pool_size must be greater than 0" - end - - if conf.pg_pool_size ~= floor(conf.pg_pool_size) then - errors[#errors + 1] = "pg_pool_size must be an integer greater than 0" - end - end - - if conf.pg_backlog then - if conf.pg_backlog < 0 then - errors[#errors + 1] = "pg_backlog must be greater than 0" - end - - if conf.pg_backlog ~= floor(conf.pg_backlog) then - errors[#errors + 1] = "pg_backlog must be an integer greater than 0" - end - end - - if conf.pg_ro_max_concurrent_queries then - if conf.pg_ro_max_concurrent_queries < 0 then - errors[#errors + 1] = "pg_ro_max_concurrent_queries must be greater than 0" - end - - if conf.pg_ro_max_concurrent_queries ~= floor(conf.pg_ro_max_concurrent_queries) then - errors[#errors + 1] = "pg_ro_max_concurrent_queries must be an integer greater than 0" - end - end - - if conf.pg_ro_semaphore_timeout then - if conf.pg_ro_semaphore_timeout < 0 then - errors[#errors + 1] = "pg_ro_semaphore_timeout must be greater than 0" - end - - if conf.pg_ro_semaphore_timeout ~= floor(conf.pg_ro_semaphore_timeout) then - errors[#errors + 1] = "pg_ro_semaphore_timeout must be an integer greater than 0" - end - end - - if conf.pg_ro_keepalive_timeout then - if conf.pg_ro_keepalive_timeout < 0 then - errors[#errors + 1] = "pg_ro_keepalive_timeout must be greater than 0" - end - - if conf.pg_ro_keepalive_timeout ~= floor(conf.pg_ro_keepalive_timeout) then - errors[#errors + 1] = "pg_ro_keepalive_timeout must be an integer greater than 0" - end - end - - if conf.pg_ro_pool_size then - if conf.pg_ro_pool_size < 0 then - errors[#errors + 1] = "pg_ro_pool_size must be greater than 0" - end - - if conf.pg_ro_pool_size ~= floor(conf.pg_ro_pool_size) then - errors[#errors + 1] = "pg_ro_pool_size must be an integer greater than 0" - end - end - - if conf.pg_ro_backlog then - if conf.pg_ro_backlog < 0 then - errors[#errors + 1] = "pg_ro_backlog must be greater than 0" - end - - if conf.pg_ro_backlog ~= floor(conf.pg_ro_backlog) then - errors[#errors + 1] = "pg_ro_backlog must be an integer greater than 0" - end - end - - if conf.worker_state_update_frequency <= 0 then - errors[#errors + 1] = "worker_state_update_frequency must be greater than 0" - end - - if conf.proxy_server then - local parsed, err = socket_url.parse(conf.proxy_server) - if err then - errors[#errors + 1] = "proxy_server is invalid: " .. err - - elseif not parsed.scheme then - errors[#errors + 1] = "proxy_server missing scheme" - - elseif parsed.scheme ~= "http" and parsed.scheme ~= "https" then - errors[#errors + 1] = "proxy_server only supports \"http\" and \"https\", got " .. parsed.scheme - - elseif not parsed.host then - errors[#errors + 1] = "proxy_server missing host" - - elseif parsed.fragment or parsed.query or parsed.params then - errors[#errors + 1] = "fragments, query strings or parameters are meaningless in proxy configuration" - end - end - - if conf.role == "control_plane" or conf.role == "data_plane" then - local cluster_cert = conf.cluster_cert - local cluster_cert_key = conf.cluster_cert_key - local cluster_ca_cert = conf.cluster_ca_cert - - if not cluster_cert or not cluster_cert_key then - errors[#errors + 1] = "cluster certificate and key must be provided to use Hybrid mode" - - else - if not exists(cluster_cert) then - cluster_cert = try_decode_base64(cluster_cert) - conf.cluster_cert = cluster_cert - local _, err = openssl_x509.new(cluster_cert) - if err then - errors[#errors + 1] = "cluster_cert: failed loading certificate from " .. cluster_cert - end - end - - if not exists(cluster_cert_key) then - cluster_cert_key = try_decode_base64(cluster_cert_key) - conf.cluster_cert_key = cluster_cert_key - local _, err = openssl_pkey.new(cluster_cert_key) - if err then - errors[#errors + 1] = "cluster_cert_key: failed loading key from " .. cluster_cert_key - end - end - end - - if cluster_ca_cert and not exists(cluster_ca_cert) then - cluster_ca_cert = try_decode_base64(cluster_ca_cert) - conf.cluster_ca_cert = cluster_ca_cert - local _, err = openssl_x509.new(cluster_ca_cert) - if err then - errors[#errors + 1] = "cluster_ca_cert: failed loading certificate from " .. - cluster_ca_cert - end - end - end - - if conf.role == "control_plane" then - if #conf.admin_listen < 1 or strip(conf.admin_listen[1]) == "off" then - errors[#errors + 1] = "admin_listen must be specified when role = \"control_plane\"" - end - - if conf.cluster_mtls == "pki" and not conf.cluster_ca_cert then - errors[#errors + 1] = "cluster_ca_cert must be specified when cluster_mtls = \"pki\"" - end - - if #conf.cluster_listen < 1 or strip(conf.cluster_listen[1]) == "off" then - errors[#errors + 1] = "cluster_listen must be specified when role = \"control_plane\"" - end - - if conf.database == "off" then - errors[#errors + 1] = "in-memory storage can not be used when role = \"control_plane\"" - end - - if conf.cluster_use_proxy then - errors[#errors + 1] = "cluster_use_proxy can not be used when role = \"control_plane\"" - end - - if conf.cluster_dp_labels and #conf.cluster_dp_labels > 0 then - errors[#errors + 1] = "cluster_dp_labels can not be used when role = \"control_plane\"" - end - - elseif conf.role == "data_plane" then - if #conf.proxy_listen < 1 or strip(conf.proxy_listen[1]) == "off" then - errors[#errors + 1] = "proxy_listen must be specified when role = \"data_plane\"" - end - - if conf.database ~= "off" then - errors[#errors + 1] = "only in-memory storage can be used when role = \"data_plane\"\n" .. - "Hint: set database = off in your kong.conf" - end - - if not conf.lua_ssl_trusted_certificate then - conf.lua_ssl_trusted_certificate = {} - end - - if conf.cluster_mtls == "shared" then - insert(conf.lua_ssl_trusted_certificate, conf.cluster_cert) - - elseif conf.cluster_mtls == "pki" or conf.cluster_mtls == "pki_check_cn" then - insert(conf.lua_ssl_trusted_certificate, conf.cluster_ca_cert) - end - - if conf.cluster_use_proxy and not conf.proxy_server then - errors[#errors + 1] = "cluster_use_proxy is turned on but no proxy_server is configured" - end - - if conf.cluster_dp_labels then - local _, err = validate_labels(conf.cluster_dp_labels) - if err then - errors[#errors + 1] = err - end - end - - else - if conf.cluster_dp_labels and #conf.cluster_dp_labels > 0 then - errors[#errors + 1] = "cluster_dp_labels can only be used when role = \"data_plane\"" - end - end - - if conf.cluster_data_plane_purge_delay < 60 then - errors[#errors + 1] = "cluster_data_plane_purge_delay must be 60 or greater" - end - - if conf.cluster_max_payload < 4194304 then - errors[#errors + 1] = "cluster_max_payload must be 4194304 (4MB) or greater" - end - - if conf.upstream_keepalive_pool_size < 0 then - errors[#errors + 1] = "upstream_keepalive_pool_size must be 0 or greater" - end - - if conf.upstream_keepalive_max_requests < 0 then - errors[#errors + 1] = "upstream_keepalive_max_requests must be 0 or greater" - end - - if conf.upstream_keepalive_idle_timeout < 0 then - errors[#errors + 1] = "upstream_keepalive_idle_timeout must be 0 or greater" - end - - if conf.tracing_instrumentations and #conf.tracing_instrumentations > 0 then - local instrumentation = require "kong.tracing.instrumentation" - local available_types_map = utils.cycle_aware_deep_copy(instrumentation.available_types) - available_types_map["all"] = true - available_types_map["off"] = true - available_types_map["request"] = true - - for _, trace_type in ipairs(conf.tracing_instrumentations) do - if not available_types_map[trace_type] then - errors[#errors + 1] = "invalid tracing type: " .. trace_type - end - end - - if #conf.tracing_instrumentations > 1 - and tablex.find(conf.tracing_instrumentations, "off") - then - errors[#errors + 1] = "invalid tracing types: off, other types are mutually exclusive" - end - - if conf.tracing_sampling_rate < 0 or conf.tracing_sampling_rate > 1 then - errors[#errors + 1] = "tracing_sampling_rate must be between 0 and 1" - end - end - - if conf.lua_max_req_headers < 1 or conf.lua_max_req_headers > 1000 - or conf.lua_max_req_headers ~= floor(conf.lua_max_req_headers) - then - errors[#errors + 1] = "lua_max_req_headers must be an integer between 1 and 1000" - end - - if conf.lua_max_resp_headers < 1 or conf.lua_max_resp_headers > 1000 - or conf.lua_max_resp_headers ~= floor(conf.lua_max_resp_headers) - then - errors[#errors + 1] = "lua_max_resp_headers must be an integer between 1 and 1000" - end - - if conf.lua_max_uri_args < 1 or conf.lua_max_uri_args > 1000 - or conf.lua_max_uri_args ~= floor(conf.lua_max_uri_args) - then - errors[#errors + 1] = "lua_max_uri_args must be an integer between 1 and 1000" - end - - if conf.lua_max_post_args < 1 or conf.lua_max_post_args > 1000 - or conf.lua_max_post_args ~= floor(conf.lua_max_post_args) - then - errors[#errors + 1] = "lua_max_post_args must be an integer between 1 and 1000" - end - - if conf.node_id and not utils.is_valid_uuid(conf.node_id) then - errors[#errors + 1] = "node_id must be a valid UUID" - end - - if conf.database == "cassandra" then - errors[#errors + 1] = "Cassandra as a datastore for Kong is not supported in versions 3.4 and above. Please use Postgres." - end - - local ok, err = validate_wasm(conf) - if not ok then - errors[#errors + 1] = err - end - - if conf.wasm and check_dynamic_module("ngx_wasm_module") then - local err - conf.wasm_dynamic_module, err = lookup_dynamic_module_so("ngx_wasm_module", conf) - if err then - errors[#errors + 1] = err - end - end - - if #conf.admin_listen < 1 or strip(conf.admin_listen[1]) == "off" then - if #conf.admin_gui_listen > 0 and strip(conf.admin_gui_listen[1]) ~= "off" then - log.warn("Kong Manager won't be functional because the Admin API is not listened on any interface") - end - end - - return #errors == 0, errors[1], errors -end - - -local function overrides(k, default_v, opts, file_conf, arg_conf) - opts = opts or {} - - local value -- definitive value for this property - - -- default values have lowest priority - - if file_conf and file_conf[k] == nil and not opts.no_defaults then - -- PL will ignore empty strings, so we need a placeholder (NONE) - value = default_v == "NONE" and "" or default_v - - else - value = file_conf[k] -- given conf values have middle priority - end - - if opts.defaults_only then - return value, k - end - - if not opts.from_kong_env then - -- environment variables have higher priority - - local env_name = "KONG_" .. upper(k) - local env = getenv(env_name) - if env ~= nil then - local to_print = env - - if conf_constants.CONF_SENSITIVE[k] then - to_print = conf_constants.CONF_SENSITIVE_PLACEHOLDER - end - - log.debug('%s ENV found with "%s"', env_name, to_print) - - value = env - end - end - - -- arg_conf have highest priority - if arg_conf and arg_conf[k] ~= nil then - value = arg_conf[k] - end - - return value, k -end - - -local function parse_nginx_directives(dyn_namespace, conf, injected_in_namespace) - conf = conf or {} - local directives = {} - - for k, v in pairs(conf) do - if type(k) == "string" and not injected_in_namespace[k] then - local directive = match(k, dyn_namespace.prefix .. "(.+)") - if directive then - if v ~= "NONE" and not dyn_namespace.ignore[directive] then - insert(directives, { name = directive, value = v }) - end - - injected_in_namespace[k] = true - end - end - end - - return directives -end +local get_phase = conf_parse.get_phase +local is_predefined_dhgroup = conf_parse.is_predefined_dhgroup +local parse_value = conf_parse.parse_value +local check_and_parse = conf_parse.check_and_parse +local overrides = conf_parse.overrides +local parse_nginx_directives = conf_parse.parse_nginx_directives local function aliased_properties(conf) diff --git a/kong/conf_loader/parse.lua b/kong/conf_loader/parse.lua new file mode 100644 index 000000000000..841bff4e1b46 --- /dev/null +++ b/kong/conf_loader/parse.lua @@ -0,0 +1,925 @@ +local require = require + + +local pl_stringx = require "pl.stringx" +local pl_path = require "pl.path" +local socket_url = require "socket.url" +local tablex = require "pl.tablex" +local openssl_x509 = require "resty.openssl.x509" +local openssl_pkey = require "resty.openssl.pkey" +local log = require "kong.cmd.utils.log" +local nginx_signals = require "kong.cmd.utils.nginx_signals" +local conf_constants = require "kong.conf_loader.constants" + + +local tools_system = require("kong.tools.system") -- for unit-testing +local tools_ip = require("kong.tools.ip") + + +local normalize_ip = tools_ip.normalize_ip +local is_valid_ip_or_cidr = tools_ip.is_valid_ip_or_cidr +local try_decode_base64 = require("kong.tools.string").try_decode_base64 +local cycle_aware_deep_copy = require("kong.tools.table").cycle_aware_deep_copy +local is_valid_uuid = require("kong.tools.uuid").is_valid_uuid + + +local type = type +local pairs = pairs +local ipairs = ipairs +local tostring = tostring +local tonumber = tonumber +local setmetatable = setmetatable +local floor = math.floor +local fmt = string.format +local find = string.find +local sub = string.sub +local lower = string.lower +local upper = string.upper +local match = string.match +local insert = table.insert +local concat = table.concat +local getenv = os.getenv +local re_match = ngx.re.match +local strip = pl_stringx.strip +local exists = pl_path.exists +local isdir = pl_path.isdir + + +local get_phase do + if ngx and ngx.get_phase then + get_phase = ngx.get_phase + else + get_phase = function() + return "timer" + end + end +end + + +local function is_predefined_dhgroup(group) + if type(group) ~= "string" then + return false + end + + return not not openssl_pkey.paramgen({ + type = "DH", + group = group, + }) +end + + +local function parse_value(value, typ) + if type(value) == "string" then + value = strip(value) + end + + -- transform {boolean} values ("on"/"off" aliasing to true/false) + -- transform {ngx_boolean} values ("on"/"off" aliasing to on/off) + -- transform {explicit string} values (number values converted to strings) + -- transform {array} values (comma-separated strings) + if typ == "boolean" then + value = value == true or value == "on" or value == "true" + + elseif typ == "ngx_boolean" then + value = (value == "on" or value == true) and "on" or "off" + + elseif typ == "string" then + value = tostring(value) -- forced string inference + + elseif typ == "number" then + value = tonumber(value) -- catch ENV variables (strings) that are numbers + + elseif typ == "array" and type(value) == "string" then + -- must check type because pl will already convert comma + -- separated strings to tables (but not when the arr has + -- only one element) + value = setmetatable(pl_stringx.split(value, ","), nil) -- remove List mt + + for i = 1, #value do + value[i] = strip(value[i]) + end + end + + if value == "" then + -- unset values are removed + value = nil + end + + return value +end + + +-- Check if module is dynamic +local function check_dynamic_module(mod_name) + local configure_line = ngx.config.nginx_configure() + local mod_re = [[^.*--add-dynamic-module=(.+\/]] .. mod_name .. [[(\s|$)).*$]] + return re_match(configure_line, mod_re, "oi") ~= nil +end + + +-- Lookup dynamic module object +-- this function will lookup for the `mod_name` dynamic module in the following +-- paths: +-- - /usr/local/kong/modules -- default path for modules in container images +-- - /../modules +-- @param[type=string] mod_name The module name to lookup, without file extension +local function lookup_dynamic_module_so(mod_name, kong_conf) + log.debug("looking up dynamic module %s", mod_name) + + local mod_file = fmt("/usr/local/kong/modules/%s.so", mod_name) + if exists(mod_file) then + log.debug("module '%s' found at '%s'", mod_name, mod_file) + return mod_file + end + + local nginx_bin = nginx_signals.find_nginx_bin(kong_conf) + mod_file = fmt("%s/../modules/%s.so", pl_path.dirname(nginx_bin), mod_name) + if exists(mod_file) then + log.debug("module '%s' found at '%s'", mod_name, mod_file) + return mod_file + end + + return nil, fmt("%s dynamic module shared object not found", mod_name) +end + + +-- Validate Wasm properties +local function validate_wasm(conf) + local wasm_enabled = conf.wasm + local filters_path = conf.wasm_filters_path + + if wasm_enabled then + if filters_path and not exists(filters_path) and not isdir(filters_path) then + return nil, fmt("wasm_filters_path '%s' is not a valid directory", filters_path) + end + end + + return true +end + + +local validate_labels +do + local MAX_KEY_SIZE = 63 + local MAX_VALUE_SIZE = 63 + local MAX_KEYS_COUNT = 10 + + + -- validation rules based on Kong Labels AIP + -- https://kong-aip.netlify.app/aip/129/ + local BASE_PTRN = "[a-z0-9]([\\w\\.:-]*[a-z0-9]|)$" + local KEY_PTRN = "(?!kong)(?!konnect)(?!insomnia)(?!mesh)(?!kic)" .. BASE_PTRN + local VAL_PTRN = BASE_PTRN + + + local function validate_entry(str, max_size, pattern) + if str == "" or #str > max_size then + return nil, fmt( + "%s must have between 1 and %d characters", str, max_size) + end + if not re_match(str, pattern, "ajoi") then + return nil, fmt("%s is invalid. Must match pattern: %s", str, pattern) + end + return true + end + + + -- Validates a label array. + -- Validates labels based on the kong Labels AIP + function validate_labels(raw_labels) + local nkeys = require "table.nkeys" + if nkeys(raw_labels) > MAX_KEYS_COUNT then + return nil, fmt( + "labels validation failed: count exceeded %d max elements", + MAX_KEYS_COUNT + ) + end + + for _, kv in ipairs(raw_labels) do + local del = kv:find(":", 1, true) + local k = del and kv:sub(1, del - 1) or "" + local v = del and kv:sub(del + 1) or "" + + local ok, err = validate_entry(k, MAX_KEY_SIZE, KEY_PTRN) + if not ok then + return nil, "label key validation failed: " .. err + end + ok, err = validate_entry(v, MAX_VALUE_SIZE, VAL_PTRN) + if not ok then + return nil, "label value validation failed: " .. err + end + end + + return true + end +end + + +-- Validate properties (type/enum/custom) and infer their type. +-- @param[type=table] conf The configuration table to treat. +local function check_and_parse(conf, opts) + local errors = {} + + for k, value in pairs(conf) do + local v_schema = conf_constants.CONF_PARSERS[k] or {} + + value = parse_value(value, v_schema.typ) + + local typ = v_schema.typ or "string" + if value and not conf_constants.TYP_CHECKS[typ](value) then + errors[#errors + 1] = fmt("%s is not a %s: '%s'", k, typ, + tostring(value)) + + elseif v_schema.enum and not tablex.find(v_schema.enum, value) then + errors[#errors + 1] = fmt("%s has an invalid value: '%s' (%s)", k, + tostring(value), concat(v_schema.enum, ", ")) + + end + + conf[k] = value + end + + --------------------- + -- custom validations + --------------------- + + if conf.lua_ssl_trusted_certificate then + local new_paths = {} + + for _, trusted_cert in ipairs(conf.lua_ssl_trusted_certificate) do + if trusted_cert == "system" then + local system_path, err = tools_system.get_system_trusted_certs_filepath() + if system_path then + trusted_cert = system_path + + elseif not ngx.IS_CLI then + log.info("lua_ssl_trusted_certificate: unable to locate system bundle: " .. err .. + ". If you are using TLS connections, consider specifying " .. + "\"lua_ssl_trusted_certificate\" manually") + end + end + + if trusted_cert ~= "system" then + if not exists(trusted_cert) then + trusted_cert = try_decode_base64(trusted_cert) + local _, err = openssl_x509.new(trusted_cert) + if err then + errors[#errors + 1] = "lua_ssl_trusted_certificate: " .. + "failed loading certificate from " .. + trusted_cert + end + end + + new_paths[#new_paths + 1] = trusted_cert + end + end + + conf.lua_ssl_trusted_certificate = new_paths + end + + -- leave early if we're still at the stage before executing the main `resty` cmd + if opts.pre_cmd then + return #errors == 0, errors[1], errors + end + + conf.host_ports = {} + if conf.port_maps then + local MIN_PORT = 1 + local MAX_PORT = 65535 + + for _, port_map in ipairs(conf.port_maps) do + local colpos = find(port_map, ":", nil, true) + if not colpos then + errors[#errors + 1] = "invalid port mapping (`port_maps`): " .. port_map + + else + local host_port_str = sub(port_map, 1, colpos - 1) + local host_port_num = tonumber(host_port_str, 10) + local kong_port_str = sub(port_map, colpos + 1) + local kong_port_num = tonumber(kong_port_str, 10) + + if (host_port_num and host_port_num >= MIN_PORT and host_port_num <= MAX_PORT) + and (kong_port_num and kong_port_num >= MIN_PORT and kong_port_num <= MAX_PORT) + then + conf.host_ports[kong_port_num] = host_port_num + conf.host_ports[kong_port_str] = host_port_num + else + errors[#errors + 1] = "invalid port mapping (`port_maps`): " .. port_map + end + end + end + end + + for _, prefix in ipairs({ "proxy_", "admin_", "admin_gui_", "status_" }) do + local listen = conf[prefix .. "listen"] + + local ssl_enabled = find(concat(listen, ",") .. " ", "%sssl[%s,]") ~= nil + if not ssl_enabled and prefix == "proxy_" then + ssl_enabled = find(concat(conf.stream_listen, ",") .. " ", "%sssl[%s,]") ~= nil + end + + if prefix == "proxy_" then + prefix = "" + end + + if ssl_enabled then + conf.ssl_enabled = true + + local ssl_cert = conf[prefix .. "ssl_cert"] + local ssl_cert_key = conf[prefix .. "ssl_cert_key"] + + if #ssl_cert > 0 and #ssl_cert_key == 0 then + errors[#errors + 1] = prefix .. "ssl_cert_key must be specified" + + elseif #ssl_cert_key > 0 and #ssl_cert == 0 then + errors[#errors + 1] = prefix .. "ssl_cert must be specified" + + elseif #ssl_cert ~= #ssl_cert_key then + errors[#errors + 1] = prefix .. "ssl_cert was specified " .. #ssl_cert .. " times while " .. + prefix .. "ssl_cert_key was specified " .. #ssl_cert_key .. " times" + end + + if ssl_cert then + for i, cert in ipairs(ssl_cert) do + if not exists(cert) then + cert = try_decode_base64(cert) + ssl_cert[i] = cert + local _, err = openssl_x509.new(cert) + if err then + errors[#errors + 1] = prefix .. "ssl_cert: failed loading certificate from " .. cert + end + end + end + conf[prefix .. "ssl_cert"] = ssl_cert + end + + if ssl_cert_key then + for i, cert_key in ipairs(ssl_cert_key) do + if not exists(cert_key) then + cert_key = try_decode_base64(cert_key) + ssl_cert_key[i] = cert_key + local _, err = openssl_pkey.new(cert_key) + if err then + errors[#errors + 1] = prefix .. "ssl_cert_key: failed loading key from " .. cert_key + end + end + end + conf[prefix .. "ssl_cert_key"] = ssl_cert_key + end + end + end + + if conf.client_ssl then + local client_ssl_cert = conf.client_ssl_cert + local client_ssl_cert_key = conf.client_ssl_cert_key + + if client_ssl_cert and not client_ssl_cert_key then + errors[#errors + 1] = "client_ssl_cert_key must be specified" + + elseif client_ssl_cert_key and not client_ssl_cert then + errors[#errors + 1] = "client_ssl_cert must be specified" + end + + if client_ssl_cert and not exists(client_ssl_cert) then + client_ssl_cert = try_decode_base64(client_ssl_cert) + conf.client_ssl_cert = client_ssl_cert + local _, err = openssl_x509.new(client_ssl_cert) + if err then + errors[#errors + 1] = "client_ssl_cert: failed loading certificate from " .. client_ssl_cert + end + end + + if client_ssl_cert_key and not exists(client_ssl_cert_key) then + client_ssl_cert_key = try_decode_base64(client_ssl_cert_key) + conf.client_ssl_cert_key = client_ssl_cert_key + local _, err = openssl_pkey.new(client_ssl_cert_key) + if err then + errors[#errors + 1] = "client_ssl_cert_key: failed loading key from " .. + client_ssl_cert_key + end + end + end + + if conf.admin_gui_path then + if not conf.admin_gui_path:find("^/") then + errors[#errors + 1] = "admin_gui_path must start with a slash ('/')" + end + if conf.admin_gui_path:find("^/.+/$") then + errors[#errors + 1] = "admin_gui_path must not end with a slash ('/')" + end + if conf.admin_gui_path:match("[^%a%d%-_/]+") then + errors[#errors + 1] = "admin_gui_path can only contain letters, digits, " .. + "hyphens ('-'), underscores ('_'), and slashes ('/')" + end + if conf.admin_gui_path:match("//+") then + errors[#errors + 1] = "admin_gui_path must not contain continuous slashes ('/')" + end + end + + if conf.ssl_cipher_suite ~= "custom" then + local suite = conf_constants.CIPHER_SUITES[conf.ssl_cipher_suite] + if suite then + conf.ssl_ciphers = suite.ciphers + conf.nginx_http_ssl_protocols = suite.protocols + conf.nginx_http_ssl_prefer_server_ciphers = suite.prefer_server_ciphers + conf.nginx_stream_ssl_protocols = suite.protocols + conf.nginx_stream_ssl_prefer_server_ciphers = suite.prefer_server_ciphers + + -- There is no secure predefined one for old at the moment (and it's too slow to generate one). + -- Intermediate (the default) forcibly sets this to predefined ffdhe2048 group. + -- Modern just forcibly sets this to nil as there are no ciphers that need it. + if conf.ssl_cipher_suite ~= "old" then + conf.ssl_dhparam = suite.dhparams + conf.nginx_http_ssl_dhparam = suite.dhparams + conf.nginx_stream_ssl_dhparam = suite.dhparams + end + + else + errors[#errors + 1] = "Undefined cipher suite " .. tostring(conf.ssl_cipher_suite) + end + end + + if conf.ssl_dhparam then + if not is_predefined_dhgroup(conf.ssl_dhparam) + and not exists(conf.ssl_dhparam) then + conf.ssl_dhparam = try_decode_base64(conf.ssl_dhparam) + local _, err = openssl_pkey.new( + { + type = "DH", + param = conf.ssl_dhparam + } + ) + if err then + errors[#errors + 1] = "ssl_dhparam: failed loading certificate from " + .. conf.ssl_dhparam + end + end + + else + for _, key in ipairs({ "nginx_http_ssl_dhparam", "nginx_stream_ssl_dhparam" }) do + local file = conf[key] + if file and not is_predefined_dhgroup(file) and not exists(file) then + errors[#errors + 1] = key .. ": no such file at " .. file + end + end + end + + if conf.headers then + for _, token in ipairs(conf.headers) do + if token ~= "off" and not conf_constants.HEADER_KEY_TO_NAME[lower(token)] then + errors[#errors + 1] = fmt("headers: invalid entry '%s'", + tostring(token)) + end + end + end + + if conf.headers_upstream then + for _, token in ipairs(conf.headers_upstream) do + if token ~= "off" and not conf_constants.UPSTREAM_HEADER_KEY_TO_NAME[lower(token)] then + errors[#errors + 1] = fmt("headers_upstream: invalid entry '%s'", + tostring(token)) + end + end + end + + if conf.dns_resolver then + for _, server in ipairs(conf.dns_resolver) do + local dns = normalize_ip(server) + + if not dns or dns.type == "name" then + errors[#errors + 1] = "dns_resolver must be a comma separated list " .. + "in the form of IPv4/6 or IPv4/6:port, got '" .. + server .. "'" + end + end + end + + if conf.dns_hostsfile then + if not pl_path.isfile(conf.dns_hostsfile) then + errors[#errors + 1] = "dns_hostsfile: file does not exist" + end + end + + if conf.dns_order then + local allowed = { LAST = true, A = true, AAAA = true, + CNAME = true, SRV = true } + + for _, name in ipairs(conf.dns_order) do + if not allowed[upper(name)] then + errors[#errors + 1] = fmt("dns_order: invalid entry '%s'", + tostring(name)) + end + end + end + + if not conf.lua_package_cpath then + conf.lua_package_cpath = "" + end + + -- checking the trusted ips + for _, address in ipairs(conf.trusted_ips) do + if not is_valid_ip_or_cidr(address) and address ~= "unix:" then + errors[#errors + 1] = "trusted_ips must be a comma separated list in " .. + "the form of IPv4 or IPv6 address or CIDR " .. + "block or 'unix:', got '" .. address .. "'" + end + end + + if conf.pg_max_concurrent_queries < 0 then + errors[#errors + 1] = "pg_max_concurrent_queries must be greater than 0" + end + + if conf.pg_max_concurrent_queries ~= floor(conf.pg_max_concurrent_queries) then + errors[#errors + 1] = "pg_max_concurrent_queries must be an integer greater than 0" + end + + if conf.pg_semaphore_timeout < 0 then + errors[#errors + 1] = "pg_semaphore_timeout must be greater than 0" + end + + if conf.pg_semaphore_timeout ~= floor(conf.pg_semaphore_timeout) then + errors[#errors + 1] = "pg_semaphore_timeout must be an integer greater than 0" + end + + if conf.pg_keepalive_timeout then + if conf.pg_keepalive_timeout < 0 then + errors[#errors + 1] = "pg_keepalive_timeout must be greater than 0" + end + + if conf.pg_keepalive_timeout ~= floor(conf.pg_keepalive_timeout) then + errors[#errors + 1] = "pg_keepalive_timeout must be an integer greater than 0" + end + end + + if conf.pg_pool_size then + if conf.pg_pool_size < 0 then + errors[#errors + 1] = "pg_pool_size must be greater than 0" + end + + if conf.pg_pool_size ~= floor(conf.pg_pool_size) then + errors[#errors + 1] = "pg_pool_size must be an integer greater than 0" + end + end + + if conf.pg_backlog then + if conf.pg_backlog < 0 then + errors[#errors + 1] = "pg_backlog must be greater than 0" + end + + if conf.pg_backlog ~= floor(conf.pg_backlog) then + errors[#errors + 1] = "pg_backlog must be an integer greater than 0" + end + end + + if conf.pg_ro_max_concurrent_queries then + if conf.pg_ro_max_concurrent_queries < 0 then + errors[#errors + 1] = "pg_ro_max_concurrent_queries must be greater than 0" + end + + if conf.pg_ro_max_concurrent_queries ~= floor(conf.pg_ro_max_concurrent_queries) then + errors[#errors + 1] = "pg_ro_max_concurrent_queries must be an integer greater than 0" + end + end + + if conf.pg_ro_semaphore_timeout then + if conf.pg_ro_semaphore_timeout < 0 then + errors[#errors + 1] = "pg_ro_semaphore_timeout must be greater than 0" + end + + if conf.pg_ro_semaphore_timeout ~= floor(conf.pg_ro_semaphore_timeout) then + errors[#errors + 1] = "pg_ro_semaphore_timeout must be an integer greater than 0" + end + end + + if conf.pg_ro_keepalive_timeout then + if conf.pg_ro_keepalive_timeout < 0 then + errors[#errors + 1] = "pg_ro_keepalive_timeout must be greater than 0" + end + + if conf.pg_ro_keepalive_timeout ~= floor(conf.pg_ro_keepalive_timeout) then + errors[#errors + 1] = "pg_ro_keepalive_timeout must be an integer greater than 0" + end + end + + if conf.pg_ro_pool_size then + if conf.pg_ro_pool_size < 0 then + errors[#errors + 1] = "pg_ro_pool_size must be greater than 0" + end + + if conf.pg_ro_pool_size ~= floor(conf.pg_ro_pool_size) then + errors[#errors + 1] = "pg_ro_pool_size must be an integer greater than 0" + end + end + + if conf.pg_ro_backlog then + if conf.pg_ro_backlog < 0 then + errors[#errors + 1] = "pg_ro_backlog must be greater than 0" + end + + if conf.pg_ro_backlog ~= floor(conf.pg_ro_backlog) then + errors[#errors + 1] = "pg_ro_backlog must be an integer greater than 0" + end + end + + if conf.worker_state_update_frequency <= 0 then + errors[#errors + 1] = "worker_state_update_frequency must be greater than 0" + end + + if conf.proxy_server then + local parsed, err = socket_url.parse(conf.proxy_server) + if err then + errors[#errors + 1] = "proxy_server is invalid: " .. err + + elseif not parsed.scheme then + errors[#errors + 1] = "proxy_server missing scheme" + + elseif parsed.scheme ~= "http" and parsed.scheme ~= "https" then + errors[#errors + 1] = "proxy_server only supports \"http\" and \"https\", got " .. parsed.scheme + + elseif not parsed.host then + errors[#errors + 1] = "proxy_server missing host" + + elseif parsed.fragment or parsed.query or parsed.params then + errors[#errors + 1] = "fragments, query strings or parameters are meaningless in proxy configuration" + end + end + + if conf.role == "control_plane" or conf.role == "data_plane" then + local cluster_cert = conf.cluster_cert + local cluster_cert_key = conf.cluster_cert_key + local cluster_ca_cert = conf.cluster_ca_cert + + if not cluster_cert or not cluster_cert_key then + errors[#errors + 1] = "cluster certificate and key must be provided to use Hybrid mode" + + else + if not exists(cluster_cert) then + cluster_cert = try_decode_base64(cluster_cert) + conf.cluster_cert = cluster_cert + local _, err = openssl_x509.new(cluster_cert) + if err then + errors[#errors + 1] = "cluster_cert: failed loading certificate from " .. cluster_cert + end + end + + if not exists(cluster_cert_key) then + cluster_cert_key = try_decode_base64(cluster_cert_key) + conf.cluster_cert_key = cluster_cert_key + local _, err = openssl_pkey.new(cluster_cert_key) + if err then + errors[#errors + 1] = "cluster_cert_key: failed loading key from " .. cluster_cert_key + end + end + end + + if cluster_ca_cert and not exists(cluster_ca_cert) then + cluster_ca_cert = try_decode_base64(cluster_ca_cert) + conf.cluster_ca_cert = cluster_ca_cert + local _, err = openssl_x509.new(cluster_ca_cert) + if err then + errors[#errors + 1] = "cluster_ca_cert: failed loading certificate from " .. + cluster_ca_cert + end + end + end + + if conf.role == "control_plane" then + if #conf.admin_listen < 1 or strip(conf.admin_listen[1]) == "off" then + errors[#errors + 1] = "admin_listen must be specified when role = \"control_plane\"" + end + + if conf.cluster_mtls == "pki" and not conf.cluster_ca_cert then + errors[#errors + 1] = "cluster_ca_cert must be specified when cluster_mtls = \"pki\"" + end + + if #conf.cluster_listen < 1 or strip(conf.cluster_listen[1]) == "off" then + errors[#errors + 1] = "cluster_listen must be specified when role = \"control_plane\"" + end + + if conf.database == "off" then + errors[#errors + 1] = "in-memory storage can not be used when role = \"control_plane\"" + end + + if conf.cluster_use_proxy then + errors[#errors + 1] = "cluster_use_proxy can not be used when role = \"control_plane\"" + end + + if conf.cluster_dp_labels and #conf.cluster_dp_labels > 0 then + errors[#errors + 1] = "cluster_dp_labels can not be used when role = \"control_plane\"" + end + + elseif conf.role == "data_plane" then + if #conf.proxy_listen < 1 or strip(conf.proxy_listen[1]) == "off" then + errors[#errors + 1] = "proxy_listen must be specified when role = \"data_plane\"" + end + + if conf.database ~= "off" then + errors[#errors + 1] = "only in-memory storage can be used when role = \"data_plane\"\n" .. + "Hint: set database = off in your kong.conf" + end + + if not conf.lua_ssl_trusted_certificate then + conf.lua_ssl_trusted_certificate = {} + end + + if conf.cluster_mtls == "shared" then + insert(conf.lua_ssl_trusted_certificate, conf.cluster_cert) + + elseif conf.cluster_mtls == "pki" or conf.cluster_mtls == "pki_check_cn" then + insert(conf.lua_ssl_trusted_certificate, conf.cluster_ca_cert) + end + + if conf.cluster_use_proxy and not conf.proxy_server then + errors[#errors + 1] = "cluster_use_proxy is turned on but no proxy_server is configured" + end + + if conf.cluster_dp_labels then + local _, err = validate_labels(conf.cluster_dp_labels) + if err then + errors[#errors + 1] = err + end + end + + else + if conf.cluster_dp_labels and #conf.cluster_dp_labels > 0 then + errors[#errors + 1] = "cluster_dp_labels can only be used when role = \"data_plane\"" + end + end + + if conf.cluster_data_plane_purge_delay < 60 then + errors[#errors + 1] = "cluster_data_plane_purge_delay must be 60 or greater" + end + + if conf.cluster_max_payload < 4194304 then + errors[#errors + 1] = "cluster_max_payload must be 4194304 (4MB) or greater" + end + + if conf.upstream_keepalive_pool_size < 0 then + errors[#errors + 1] = "upstream_keepalive_pool_size must be 0 or greater" + end + + if conf.upstream_keepalive_max_requests < 0 then + errors[#errors + 1] = "upstream_keepalive_max_requests must be 0 or greater" + end + + if conf.upstream_keepalive_idle_timeout < 0 then + errors[#errors + 1] = "upstream_keepalive_idle_timeout must be 0 or greater" + end + + if conf.tracing_instrumentations and #conf.tracing_instrumentations > 0 then + local instrumentation = require "kong.tracing.instrumentation" + local available_types_map = cycle_aware_deep_copy(instrumentation.available_types) + available_types_map["all"] = true + available_types_map["off"] = true + available_types_map["request"] = true + + for _, trace_type in ipairs(conf.tracing_instrumentations) do + if not available_types_map[trace_type] then + errors[#errors + 1] = "invalid tracing type: " .. trace_type + end + end + + if #conf.tracing_instrumentations > 1 + and tablex.find(conf.tracing_instrumentations, "off") + then + errors[#errors + 1] = "invalid tracing types: off, other types are mutually exclusive" + end + + if conf.tracing_sampling_rate < 0 or conf.tracing_sampling_rate > 1 then + errors[#errors + 1] = "tracing_sampling_rate must be between 0 and 1" + end + end + + if conf.lua_max_req_headers < 1 or conf.lua_max_req_headers > 1000 + or conf.lua_max_req_headers ~= floor(conf.lua_max_req_headers) + then + errors[#errors + 1] = "lua_max_req_headers must be an integer between 1 and 1000" + end + + if conf.lua_max_resp_headers < 1 or conf.lua_max_resp_headers > 1000 + or conf.lua_max_resp_headers ~= floor(conf.lua_max_resp_headers) + then + errors[#errors + 1] = "lua_max_resp_headers must be an integer between 1 and 1000" + end + + if conf.lua_max_uri_args < 1 or conf.lua_max_uri_args > 1000 + or conf.lua_max_uri_args ~= floor(conf.lua_max_uri_args) + then + errors[#errors + 1] = "lua_max_uri_args must be an integer between 1 and 1000" + end + + if conf.lua_max_post_args < 1 or conf.lua_max_post_args > 1000 + or conf.lua_max_post_args ~= floor(conf.lua_max_post_args) + then + errors[#errors + 1] = "lua_max_post_args must be an integer between 1 and 1000" + end + + if conf.node_id and not is_valid_uuid(conf.node_id) then + errors[#errors + 1] = "node_id must be a valid UUID" + end + + if conf.database == "cassandra" then + errors[#errors + 1] = "Cassandra as a datastore for Kong is not supported in versions 3.4 and above. Please use Postgres." + end + + local ok, err = validate_wasm(conf) + if not ok then + errors[#errors + 1] = err + end + + if conf.wasm and check_dynamic_module("ngx_wasm_module") then + local err + conf.wasm_dynamic_module, err = lookup_dynamic_module_so("ngx_wasm_module", conf) + if err then + errors[#errors + 1] = err + end + end + + if #conf.admin_listen < 1 or strip(conf.admin_listen[1]) == "off" then + if #conf.admin_gui_listen > 0 and strip(conf.admin_gui_listen[1]) ~= "off" then + log.warn("Kong Manager won't be functional because the Admin API is not listened on any interface") + end + end + + return #errors == 0, errors[1], errors +end + + +local function overrides(k, default_v, opts, file_conf, arg_conf) + opts = opts or {} + + local value -- definitive value for this property + + -- default values have lowest priority + + if file_conf and file_conf[k] == nil and not opts.no_defaults then + -- PL will ignore empty strings, so we need a placeholder (NONE) + value = default_v == "NONE" and "" or default_v + + else + value = file_conf[k] -- given conf values have middle priority + end + + if opts.defaults_only then + return value, k + end + + if not opts.from_kong_env then + -- environment variables have higher priority + + local env_name = "KONG_" .. upper(k) + local env = getenv(env_name) + if env ~= nil then + local to_print = env + + if conf_constants.CONF_SENSITIVE[k] then + to_print = conf_constants.CONF_SENSITIVE_PLACEHOLDER + end + + log.debug('%s ENV found with "%s"', env_name, to_print) + + value = env + end + end + + -- arg_conf have highest priority + if arg_conf and arg_conf[k] ~= nil then + value = arg_conf[k] + end + + return value, k +end + + +local function parse_nginx_directives(dyn_namespace, conf, injected_in_namespace) + conf = conf or {} + local directives = {} + + for k, v in pairs(conf) do + if type(k) == "string" and not injected_in_namespace[k] then + local directive = match(k, dyn_namespace.prefix .. "(.+)") + if directive then + if v ~= "NONE" and not dyn_namespace.ignore[directive] then + insert(directives, { name = directive, value = v }) + end + + injected_in_namespace[k] = true + end + end + end + + return directives +end + + +return { + get_phase = get_phase, + + is_predefined_dhgroup = is_predefined_dhgroup, + parse_value = parse_value, + + check_and_parse = check_and_parse, + + overrides = overrides, + parse_nginx_directives = parse_nginx_directives, +} diff --git a/spec/01-unit/03-conf_loader_spec.lua b/spec/01-unit/03-conf_loader_spec.lua index c2d0df449682..c51b9b46a618 100644 --- a/spec/01-unit/03-conf_loader_spec.lua +++ b/spec/01-unit/03-conf_loader_spec.lua @@ -1,6 +1,5 @@ local kong_meta = require "kong.meta" local conf_loader = require "kong.conf_loader" -local utils = require "kong.tools.utils" local log = require "kong.cmd.utils.log" local helpers = require "spec.helpers" local tablex = require "pl.tablex" @@ -983,6 +982,8 @@ describe("Configuration loader", function() assert.matches(".ca_combined", conf.lua_ssl_trusted_certificate_combined) end) it("expands the `system` property in lua_ssl_trusted_certificate", function() + local utils = require "kong.tools.system" + local old_gstcf = utils.get_system_trusted_certs_filepath local old_exists = pl_path.exists finally(function() From 410d9bd32f6206dfab1c8121f79b1f50d532a5d4 Mon Sep 17 00:00:00 2001 From: oowl Date: Tue, 19 Dec 2023 11:13:53 +0800 Subject: [PATCH 208/249] fix(dbless): fix error data loss caused by weakly typed of function in declarative_config_flattened function (#12167) FTI-5584 --- ...declarative-config-flattened-data-loss.yml | 3 ++ kong/db/errors.lua | 8 +++- .../04-admin_api/15-off_spec.lua | 37 +++++++++++++++++++ 3 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/fix-declarative-config-flattened-data-loss.yml diff --git a/changelog/unreleased/kong/fix-declarative-config-flattened-data-loss.yml b/changelog/unreleased/kong/fix-declarative-config-flattened-data-loss.yml new file mode 100644 index 000000000000..05991af010d0 --- /dev/null +++ b/changelog/unreleased/kong/fix-declarative-config-flattened-data-loss.yml @@ -0,0 +1,3 @@ +message: fix error data loss caused by weakly typed of function in declarative_config_flattened function +type: bugfix +scope: Configuration diff --git a/kong/db/errors.lua b/kong/db/errors.lua index 5a43911741a0..7139c636ddb6 100644 --- a/kong/db/errors.lua +++ b/kong/db/errors.lua @@ -1033,7 +1033,13 @@ do for i, err_t_i in drain(section_errors) do local entity = entities[i] - if type(entity) == "table" then + + -- promote error strings to `@entity` type errors + if type(err_t_i) == "string" then + err_t_i = { ["@entity"] = err_t_i } + end + + if type(entity) == "table" and type(err_t_i) == "table" then add_entity_errors(entity_type, entity, err_t_i, flattened) else diff --git a/spec/02-integration/04-admin_api/15-off_spec.lua b/spec/02-integration/04-admin_api/15-off_spec.lua index 7373a82b3564..54bb00e7e820 100644 --- a/spec/02-integration/04-admin_api/15-off_spec.lua +++ b/spec/02-integration/04-admin_api/15-off_spec.lua @@ -2697,6 +2697,43 @@ R6InCcH2Wh8wSeY5AuDXvu2tv9g/PW9wIJmPuKSHMA== }, }, flattened) end) + it("origin error do not loss when enable flatten_errors - (#12167)", function() + local input = { + _format_version = "3.0", + consumers = { + { + id = "a73dc9a7-93df-584d-97c0-7f41a1bbce3d", + username = "test-consumer-1", + tags = { "consumer-1" }, + }, + { + id = "a73dc9a7-93df-584d-97c0-7f41a1bbce32", + username = "test-consumer-1", + tags = { "consumer-2" }, + }, + }, + } + local flattened = post_config(input) + validate({ + { + entity_type = "consumer", + entity_id = "a73dc9a7-93df-584d-97c0-7f41a1bbce32", + entity_name = nil, + entity_tags = { "consumer-2" }, + entity = { + id = "a73dc9a7-93df-584d-97c0-7f41a1bbce32", + username = "test-consumer-1", + tags = { "consumer-2" }, + }, + errors = { + { + type = "entity", + message = "uniqueness violation: 'consumers' entity with username set to 'test-consumer-1' already declared", + } + }, + }, + }, flattened) + end) end) From c976643692cad8469305e10355f829cb24d10457 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Tue, 19 Dec 2023 15:55:25 +0800 Subject: [PATCH 209/249] fix(oauth2): use new style KDF API to work better with FIPS mode (#12212) --- kong/plugins/oauth2/secret.lua | 43 ++++++++++++------- spec/03-plugins/25-oauth2/05-kdf_spec.lua | 51 +++++++++++++++++++++++ 2 files changed, 80 insertions(+), 14 deletions(-) create mode 100644 spec/03-plugins/25-oauth2/05-kdf_spec.lua diff --git a/kong/plugins/oauth2/secret.lua b/kong/plugins/oauth2/secret.lua index 015f944f9e1b..31d1c75278d0 100644 --- a/kong/plugins/oauth2/secret.lua +++ b/kong/plugins/oauth2/secret.lua @@ -201,9 +201,9 @@ if ENABLED_ALGORITHMS.PBKDF2 then local PBKDF2_PREFIX local ok, crypt = pcall(function() - local kdf = require "resty.openssl.kdf" + local openssl_kdf = require "resty.openssl.kdf" - -- pbkdf2 settings + -- pbkdf2 default settings local PBKDF2_DIGEST = "sha512" local PBKDF2_ITERATIONS = 10000 local PBKDF2_HASH_LEN = 32 @@ -211,17 +211,32 @@ if ENABLED_ALGORITHMS.PBKDF2 then local EMPTY = {} + local kdf + local function derive(secret, opts) opts = opts or EMPTY + local err + if kdf then + local _, err = kdf:reset() + if err then + kdf = nil + end + end + + if not kdf then + kdf, err = openssl_kdf.new("PBKDF2") + if err then + return nil, err + end + end + local salt = opts.salt or utils.get_rand_bytes(PBKDF2_SALT_LEN) - local hash, err = kdf.derive({ - type = kdf.PBKDF2, - outlen = opts.outlen or PBKDF2_HASH_LEN, + local hash, err = kdf:derive(opts.outlen or PBKDF2_HASH_LEN, { pass = secret, salt = salt, - md = opts.md or PBKDF2_DIGEST, - pbkdf2_iter = opts.pbkdf2_iter or PBKDF2_ITERATIONS, - }) + digest = opts.digest or PBKDF2_DIGEST, + iter = opts.iter or PBKDF2_ITERATIONS, + }, 4) if not hash then return nil, err end @@ -245,8 +260,8 @@ if ENABLED_ALGORITHMS.PBKDF2 then local crypt = {} - function crypt.hash(secret) - return derive(secret) + function crypt.hash(secret, options) + return derive(secret, options) end function crypt.verify(secret, hash) @@ -263,8 +278,8 @@ if ENABLED_ALGORITHMS.PBKDF2 then local calculated_hash, err = derive(secret, { outlen = outlen, salt = phc.salt, - md = phc.digest, - pbkdf2_iter = phc.params.i + digest = phc.digest, + iter = phc.params.i }) if not calculated_hash then return nil, err @@ -287,7 +302,7 @@ end local crypt = {} -function crypt.hash(secret) +function crypt.hash(secret, options) assert(type(secret) == "string", "secret needs to be a string") if ARGON2 then @@ -299,7 +314,7 @@ function crypt.hash(secret) end if PBKDF2 then - return PBKDF2.hash(secret) + return PBKDF2.hash(secret, options) end return nil, "no suitable password hashing algorithm found" diff --git a/spec/03-plugins/25-oauth2/05-kdf_spec.lua b/spec/03-plugins/25-oauth2/05-kdf_spec.lua new file mode 100644 index 000000000000..829bf65bd9e8 --- /dev/null +++ b/spec/03-plugins/25-oauth2/05-kdf_spec.lua @@ -0,0 +1,51 @@ +-- This software is copyright Kong Inc. and its licensors. +-- Use of the software is subject to the agreement between your organization +-- and Kong Inc. If there is no such agreement, use is governed by and +-- subject to the terms of the Kong Master Software License Agreement found +-- at https://konghq.com/enterprisesoftwarelicense/. +-- [ END OF LICENSE 0867164ffc95e54f04670b5169c09574bdbd9bba ] + +local secret_impl = require "kong.plugins.oauth2.secret" + + +describe("Plugin: oauth2 (secret)", function() + describe("PBKDF", function() + + local static_key = "$pbkdf2-sha512$i=10000,l=32$YSBsaXR0ZSBiaXQsIGp1c3QgYSBsaXR0bGUgYml0$z6ysNworexAhDELywIDi0ba0B0T7F/MBZ6Ige9lWRYI" + + it("sanity test", function() + -- Note: to pass test in FIPS mode, salt length has to be 16 bytes or more + local derived, err = secret_impl.hash("tofu", { salt = "a litte bit, just a little bit" }) + assert.is_nil(err) + assert.same(static_key, derived) + end) + + it("uses random salt by default", function() + local derived, err = secret_impl.hash("tofu") + assert.is_nil(err) + assert.not_same(static_key, derived) + end) + + it("verifies correctly", function() + local derived, err = secret_impl.hash("tofu") + assert.is_nil(err) + + local ok, err = secret_impl.verify("tofu", derived) + assert.is_nil(err) + assert.is_truthy(ok) + + local ok, err = secret_impl.verify("tofu", static_key) + assert.is_nil(err) + assert.is_truthy(ok) + + + local derived2, err = secret_impl.hash("bun") + assert.is_nil(err) + + local ok, err = secret_impl.verify("tofu", derived2) + assert.is_nil(err) + assert.is_falsy(ok) + end) + + end) +end) From c3800381ddf2855f1b45edb2d7320dceabd0720b Mon Sep 17 00:00:00 2001 From: Xiaochen Date: Tue, 19 Dec 2023 17:00:10 +0800 Subject: [PATCH 210/249] fix(globalpatches): remove timer from SharedDict APIs (#12187) 1. It checks the expiration of an item when referring it instead of using timer. 2. The API `set()` returns the item now, which will be used in the API `SharedDict:incr()`. 3. It introduces a new internal API `get(data, key)` to check and retrieve non-expired items. 4. It fixes the returned ttl value to align with the `ngx.shared.DICT:ttl()` API of lua-nginx-module. KAG-3309 --- kong/globalpatches.lua | 78 ++++++++++++++++++++---------------------- 1 file changed, 37 insertions(+), 41 deletions(-) diff --git a/kong/globalpatches.lua b/kong/globalpatches.lua index 56de8dcfb68b..eef57220a539 100644 --- a/kong/globalpatches.lua +++ b/kong/globalpatches.lua @@ -251,6 +251,15 @@ return function(options) value = value, info = {expire_at = expire_at} } + return data[key] + end + local function get(data, key) + local item = data[key] + if item and item.info.expire_at and item.info.expire_at <= ngx.now() then + data[key] = nil + item = nil + end + return item end function SharedDict:new() return setmetatable({data = {}}, {__index = self}) @@ -262,25 +271,18 @@ return function(options) return 0 end function SharedDict:get(key) - return self.data[key] and self.data[key].value, nil + local item = get(self.data, key) + return item and item.value, nil end SharedDict.get_stale = SharedDict.get function SharedDict:set(key, value, exptime) - local expire_at = nil - - if exptime then - ngx.timer.at(exptime, function() - self.data[key] = nil - end) - expire_at = ngx.now() + exptime - end - + local expire_at = (exptime and exptime ~= 0) and (ngx.now() + exptime) set(self.data, key, value, expire_at) return true, nil, false end SharedDict.safe_set = SharedDict.set function SharedDict:add(key, value, exptime) - if self.data[key] ~= nil then + if get(self.data, key) then return false, "exists", false end @@ -288,7 +290,7 @@ return function(options) end SharedDict.safe_add = SharedDict.add function SharedDict:replace(key, value) - if self.data[key] == nil then + if not get(key) then return false, "not found", false end set(self.data, key, value) @@ -301,23 +303,17 @@ return function(options) return true end function SharedDict:incr(key, value, init, init_ttl) - if not self.data[key] then + local item = get(self.data, key) + if not item then if not init then return nil, "not found" - else - self.data[key] = { value = init, info = {} } - if init_ttl then - self.data[key].info.expire_at = ngx.now() + init_ttl - ngx.timer.at(init_ttl, function() - self.data[key] = nil - end) - end end - elseif type(self.data[key].value) ~= "number" then + item = set(self.data, key, init, init_ttl and ngx.now() + init_ttl) + elseif type(item.value) ~= "number" then return nil, "not a number" end - self.data[key].value = self.data[key].value + value - return self.data[key].value, nil + item.value = item.value + value + return item.value, nil end function SharedDict:flush_all() for _, item in pairs(self.data) do @@ -344,11 +340,15 @@ return function(options) n = n or 1024 local i = 0 local keys = {} - for k in pairs(self.data) do - keys[#keys+1] = k - i = i + 1 - if n ~= 0 and i == n then - break + for k, item in pairs(self.data) do + if item.info.expire_at and item.info.expire_at <= ngx.now() then + self.data[k] = nil + else + keys[#keys+1] = k + i = i + 1 + if n ~= 0 and i == n then + break + end end end return keys @@ -357,19 +357,15 @@ return function(options) local item = self.data[key] if item == nil then return nil, "not found" - else - local expire_at = item.info.expire_at - if expire_at == nil then - return 0 - else - local remaining = expire_at - ngx.now() - if remaining < 0 then - return nil, "not found" - else - return remaining - end - end end + local expire_at = item.info.expire_at + if expire_at == nil then + return 0 + end + -- There is a problem that also exists in the official OpenResty: + -- 0 means the key never expires. So it's hard to distinguish between a + -- never-expired key and an expired key with a TTL value of 0. + return expire_at - ngx.now() end -- hack From ffea9590b36c835d24fa229c851b9106c1895833 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20H=C3=BCbner?= Date: Tue, 19 Dec 2023 17:50:52 +0100 Subject: [PATCH 211/249] chore(ci): pin CI to test scheduler release v1 (#12228) --- .github/workflows/build_and_test.yml | 6 +++--- .github/workflows/update-test-runtime-statistics.yml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 5cca0656ac08..83239c316bda 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -99,12 +99,12 @@ jobs: uses: actions/checkout@v4 - name: Download runtimes file - uses: Kong/gh-storage/download@main + uses: Kong/gh-storage/download@v1 with: repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json - name: Schedule tests - uses: Kong/gateway-test-scheduler/schedule@main + uses: Kong/gateway-test-scheduler/schedule@v1 with: test-suites-file: .ci/test_suites.json test-file-runtime-file: .ci/runtimes.json @@ -267,7 +267,7 @@ jobs: DD_CIVISIBILITY_AGENTLESS_ENABLED: true DD_TRACE_GIT_METADATA_ENABLED: true DD_API_KEY: ${{ secrets.DATADOG_API_KEY }} - uses: Kong/gateway-test-scheduler/runner@main + uses: Kong/gateway-test-scheduler/runner@v1 with: tests-to-run-file: test-chunk.${{ matrix.runner }}.json failed-test-files-file: ${{ env.FAILED_TEST_FILES_FILE }} diff --git a/.github/workflows/update-test-runtime-statistics.yml b/.github/workflows/update-test-runtime-statistics.yml index de53f0e38f06..77067f35a82d 100644 --- a/.github/workflows/update-test-runtime-statistics.yml +++ b/.github/workflows/update-test-runtime-statistics.yml @@ -19,7 +19,7 @@ jobs: token: ${{ secrets.PAT }} - name: Process statistics - uses: Kong/gateway-test-scheduler/analyze@main + uses: Kong/gateway-test-scheduler/analyze@v1 env: GITHUB_TOKEN: ${{ secrets.PAT }} with: @@ -28,7 +28,7 @@ jobs: artifact-name-regexp: "^test-runtime-statistics-\\d+$" - name: Upload new runtimes file - uses: Kong/gh-storage/upload@main + uses: Kong/gh-storage/upload@v1 env: GITHUB_TOKEN: ${{ secrets.PAT }} with: From 0a1dbd8322223fbea13a0c5da992d3d997b1f0c0 Mon Sep 17 00:00:00 2001 From: aman Date: Wed, 20 Dec 2023 12:06:01 +0530 Subject: [PATCH 212/249] docs(pdk): fix documentation of kong.plugin.get_id (#12131) --- kong/pdk/plugin.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kong/pdk/plugin.lua b/kong/pdk/plugin.lua index b38d9eed300f..72e36f019cfa 100644 --- a/kong/pdk/plugin.lua +++ b/kong/pdk/plugin.lua @@ -14,7 +14,7 @@ local _plugin = {} -- @treturn string The ID of the running plugin -- @usage -- --- kong.request.get_id() -- "123e4567-e89b-12d3-a456-426614174000" +-- kong.plugin.get_id() -- "123e4567-e89b-12d3-a456-426614174000" function _plugin.get_id(self) return ngx.ctx.plugin_id end From 6646cad26d045950f1bfbcfb76547a673835bfcf Mon Sep 17 00:00:00 2001 From: Andy Zhang Date: Wed, 20 Dec 2023 15:49:45 +0800 Subject: [PATCH 213/249] feat(cd): build debian 12 packages (#12218) KAG-3015 --- .github/matrix-full.yml | 14 +- build/dockerfiles/deb.Dockerfile | 2 +- .../unreleased/kong/debian-12-support.yml | 3 + scripts/explain_manifest/config.py | 13 ++ .../fixtures/debian-12-amd64.txt | 183 ++++++++++++++++++ 5 files changed, 212 insertions(+), 3 deletions(-) create mode 100644 changelog/unreleased/kong/debian-12-support.yml create mode 100644 scripts/explain_manifest/fixtures/debian-12-amd64.txt diff --git a/.github/matrix-full.yml b/.github/matrix-full.yml index b32ca5effd54..70b4787491ec 100644 --- a/.github/matrix-full.yml +++ b/.github/matrix-full.yml @@ -28,6 +28,10 @@ build-packages: image: debian:11 package: deb check-manifest-suite: debian-11-amd64 +- label: debian-12 + image: debian:12 + package: deb + check-manifest-suite: debian-12-amd64 # RHEL - label: rhel-7 @@ -89,9 +93,9 @@ build-images: # Debian - label: debian - base-image: debian:11-slim + base-image: debian:12-slim package: deb - artifact-from: debian-11 + artifact-from: debian-12 # RHEL - label: rhel @@ -146,6 +150,12 @@ release-packages: artifact-version: 11 artifact-type: debian artifact: kong.amd64.deb +- label: debian-12 + package: deb + artifact-from: debian-12 + artifact-version: 12 + artifact-type: debian + artifact: kong.amd64.deb # RHEL - label: rhel-7 diff --git a/build/dockerfiles/deb.Dockerfile b/build/dockerfiles/deb.Dockerfile index 75c2252f875a..a55b3706fcf7 100644 --- a/build/dockerfiles/deb.Dockerfile +++ b/build/dockerfiles/deb.Dockerfile @@ -1,4 +1,4 @@ -ARG KONG_BASE_IMAGE=debian:bullseye-slim +ARG KONG_BASE_IMAGE=debian:bookworm-slim FROM --platform=$TARGETPLATFORM $KONG_BASE_IMAGE LABEL maintainer="Kong Docker Maintainers (@team-gateway-bot)" diff --git a/changelog/unreleased/kong/debian-12-support.yml b/changelog/unreleased/kong/debian-12-support.yml new file mode 100644 index 000000000000..26b8b6fcc17c --- /dev/null +++ b/changelog/unreleased/kong/debian-12-support.yml @@ -0,0 +1,3 @@ +message: "Build deb packages for Debian 12. The debian variant of kong docker image is built using Debian 12 now." +type: feature +scope: Core diff --git a/scripts/explain_manifest/config.py b/scripts/explain_manifest/config.py index 398c9346c96a..370c87643a8f 100644 --- a/scripts/explain_manifest/config.py +++ b/scripts/explain_manifest/config.py @@ -176,6 +176,19 @@ def transform(f: FileInfo): }, } ), + "debian-12-amd64": ExpectSuite( + name="Debian 12 (amd64)", + manifest="fixtures/debian-12-amd64.txt", + tests={ + common_suites: {}, + libc_libcpp_suites: { + "libc_max_version": "2.36", + # gcc 12.1.0 + "libcxx_max_version": "3.4.30", + "cxxabi_max_version": "1.3.13", + }, + } + ), "docker-image": ExpectSuite( name="Generic Docker Image", manifest=None, diff --git a/scripts/explain_manifest/fixtures/debian-12-amd64.txt b/scripts/explain_manifest/fixtures/debian-12-amd64.txt new file mode 100644 index 000000000000..fecba88d42b6 --- /dev/null +++ b/scripts/explain_manifest/fixtures/debian-12-amd64.txt @@ -0,0 +1,183 @@ +- Path : /etc/kong/kong.logrotate + +- Path : /lib/systemd/system/kong.service + +- Path : /usr/local/kong/gui + Type : directory + +- Path : /usr/local/kong/include/google + Type : directory + +- Path : /usr/local/kong/include/kong + Type : directory + +- Path : /usr/local/kong/lib/engines-3/afalg.so + Needed : + - libstdc++.so.6 + - libm.so.6 + - libcrypto.so.3 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/kong/lib/engines-3/capi.so + Needed : + - libstdc++.so.6 + - libm.so.6 + - libcrypto.so.3 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/kong/lib/engines-3/loader_attic.so + Needed : + - libstdc++.so.6 + - libm.so.6 + - libcrypto.so.3 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/kong/lib/engines-3/padlock.so + Needed : + - libstdc++.so.6 + - libm.so.6 + - libcrypto.so.3 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/kong/lib/libcrypto.so.3 + Needed : + - libstdc++.so.6 + - libm.so.6 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/kong/lib/libexpat.so.1.8.10 + Needed : + - libc.so.6 + +- Path : /usr/local/kong/lib/libssl.so.3 + Needed : + - libstdc++.so.6 + - libm.so.6 + - libcrypto.so.3 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/kong/lib/ossl-modules/legacy.so + Needed : + - libstdc++.so.6 + - libm.so.6 + - libcrypto.so.3 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/lfs.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/lpeg.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/lsyslog.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/lua_pack.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/lua_system_constants.so + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/lxp.so + Needed : + - libexpat.so.1 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/mime/core.so + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/pb.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/socket/core.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/socket/serial.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/socket/unix.so + Needed : + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/ssl.so + Needed : + - libssl.so.3 + - libcrypto.so.3 + - libc.so.6 + Runpath : /usr/local/kong/lib + +- Path : /usr/local/lib/lua/5.1/yaml.so + Needed : + - libyaml-0.so.2 + - libc.so.6 + +- Path : /usr/local/openresty/lualib/cjson.so + Needed : + - libc.so.6 + +- Path : /usr/local/openresty/lualib/libatc_router.so + Needed : + - libgcc_s.so.1 + - libm.so.6 + - libc.so.6 + - ld-linux-x86-64.so.2 + - libstdc++.so.6 + +- Path : /usr/local/openresty/lualib/librestysignal.so + +- Path : /usr/local/openresty/lualib/rds/parser.so + +- Path : /usr/local/openresty/lualib/redis/parser.so + Needed : + - libc.so.6 + +- Path : /usr/local/openresty/nginx/modules/ngx_wasm_module.so + Needed : + - libm.so.6 + - libgcc_s.so.1 + - libc.so.6 + - ld-linux-x86-64.so.2 + Runpath : /usr/local/openresty/luajit/lib:/usr/local/kong/lib:/usr/local/openresty/lualib + +- Path : /usr/local/openresty/nginx/sbin/nginx + Needed : + - libcrypt.so.1 + - libluajit-5.1.so.2 + - libssl.so.3 + - libcrypto.so.3 + - libz.so.1 + - libc.so.6 + Runpath : /usr/local/openresty/luajit/lib:/usr/local/kong/lib:/usr/local/openresty/lualib + Modules : + - lua-kong-nginx-module + - lua-kong-nginx-module/stream + - lua-resty-events + - lua-resty-lmdb + - ngx_wasm_module + OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + DWARF : True + DWARF - ngx_http_request_t related DWARF DIEs: True + From e0580930b71d0c172400e236fcc2114162cf9517 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 08:38:13 +0000 Subject: [PATCH 214/249] chore(deps): bump jschmid1/cross-repo-cherrypick-action Bumps [jschmid1/cross-repo-cherrypick-action](https://github.com/jschmid1/cross-repo-cherrypick-action) from cde6a39fa9e6eee09e633dc83bbf5e83bb476ec3 to 1182bef0772280407550496e3cceaecb7c0102d0. - [Release notes](https://github.com/jschmid1/cross-repo-cherrypick-action/releases) - [Commits](https://github.com/jschmid1/cross-repo-cherrypick-action/compare/cde6a39fa9e6eee09e633dc83bbf5e83bb476ec3...1182bef0772280407550496e3cceaecb7c0102d0) --- updated-dependencies: - dependency-name: jschmid1/cross-repo-cherrypick-action dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- .github/workflows/cherry-picks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cherry-picks.yml b/.github/workflows/cherry-picks.yml index c5539dd8f0f2..d04f54eac2bc 100644 --- a/.github/workflows/cherry-picks.yml +++ b/.github/workflows/cherry-picks.yml @@ -26,7 +26,7 @@ jobs: with: token: ${{ secrets.CHERRY_PICK_TOKEN }} - name: Create backport pull requests - uses: jschmid1/cross-repo-cherrypick-action@cde6a39fa9e6eee09e633dc83bbf5e83bb476ec3 #v1.1.0 + uses: jschmid1/cross-repo-cherrypick-action@1182bef0772280407550496e3cceaecb7c0102d0 #v1.1.0 with: token: ${{ secrets.CHERRY_PICK_TOKEN }} pull_title: '[cherry-pick -> ${target_branch}] ${pull_title}' From 9cf81aba64bfc7692ee861f7cd4ae6c6014138a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 11:11:08 +0100 Subject: [PATCH 215/249] chore(deps): bump actions/upload-artifact from 3 to 4 (#12220) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): bump actions/upload-artifact from 3 to 4 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 3 to 4. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * update download-artifact version to v4 as well Also fail on upload error. --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Hans Hübner --- .github/workflows/build.yml | 2 +- .github/workflows/build_and_test.yml | 19 +++++++++---------- .github/workflows/perf.yml | 6 +++--- .github/workflows/release.yml | 10 +++++----- 4 files changed, 18 insertions(+), 19 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 88704ccdedcd..3e5572b0f331 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -73,7 +73,7 @@ jobs: luarocks config - name: Bazel Outputs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: bazel-outputs diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 83239c316bda..7537a411afb9 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -112,7 +112,7 @@ jobs: runner-count: ${{ env.RUNNER_COUNT }} - name: Upload schedule files - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 continue-on-error: true with: name: schedule-test-files @@ -227,8 +227,7 @@ jobs: psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;' - name: Download test schedule file - uses: actions/download-artifact@v3 - continue-on-error: true + uses: actions/download-artifact@v4 with: name: schedule-test-files @@ -242,13 +241,13 @@ jobs: make dev - name: Download test rerun information - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 continue-on-error: true with: name: test-rerun-info-${{ matrix.runner }} - name: Download test runtime statistics from previous runs - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 continue-on-error: true with: name: test-runtime-statistics-${{ matrix.runner }} @@ -276,7 +275,7 @@ jobs: - name: Upload test rerun information if: always() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: test-rerun-info-${{ matrix.runner }} path: ${{ env.FAILED_TEST_FILES_FILE }} @@ -284,14 +283,14 @@ jobs: - name: Upload test runtime statistics for offline scheduling if: always() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: test-runtime-statistics-${{ matrix.runner }} path: ${{ env.TEST_FILE_RUNTIME_FILE }} retention-days: 7 - name: Archive coverage stats file - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.runner }} @@ -338,7 +337,7 @@ jobs: prove -I. -r t - name: Archive coverage stats file - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: name: luacov-stats-out-${{ github.job }}-${{ github.run_id }} @@ -368,7 +367,7 @@ jobs: sudo luarocks install luafilesystem # Download all archived coverage stats files - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 - name: Stats aggregation shell: bash diff --git a/.github/workflows/perf.yml b/.github/workflows/perf.yml index d71b88519039..337111269bf1 100644 --- a/.github/workflows/perf.yml +++ b/.github/workflows/perf.yml @@ -65,7 +65,7 @@ jobs: luarocks - name: Bazel Outputs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: bazel-outputs @@ -267,7 +267,7 @@ jobs: done - name: Save results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: perf-results @@ -278,7 +278,7 @@ jobs: retention-days: 31 - name: Save error logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: error_logs diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0dced5a70e25..94e957e14dae 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -270,7 +270,7 @@ jobs: tail -n500 bazel-out/**/*/CMake.log || true - name: Upload artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.label }}-packages path: bazel-bin/pkg @@ -290,7 +290,7 @@ jobs: - uses: actions/checkout@v3 - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.label }}-packages path: bazel-bin/pkg @@ -322,14 +322,14 @@ jobs: - uses: actions/checkout@v3 - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.artifact-from }}-packages path: bazel-bin/pkg - name: Download artifact (alt) if: matrix.artifact-from-alt != '' - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.artifact-from-alt }}-packages path: bazel-bin/pkg @@ -618,7 +618,7 @@ jobs: - uses: actions/checkout@v4 - name: Download artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ matrix.artifact-from }}-packages path: bazel-bin/pkg From 329e0efefeb303b07fca9f961bf1acde721fe78a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 11:16:11 +0100 Subject: [PATCH 216/249] chore(deps): bump korthout/backport-action (#12219) Bumps [korthout/backport-action](https://github.com/korthout/backport-action) from e355f68e2fc1cb0063b1c1b717882290ffc994bf to 930286d359d53effaf69607223933cbbb02460eb. - [Release notes](https://github.com/korthout/backport-action/releases) - [Commits](https://github.com/korthout/backport-action/compare/e355f68e2fc1cb0063b1c1b717882290ffc994bf...930286d359d53effaf69607223933cbbb02460eb) --- updated-dependencies: - dependency-name: korthout/backport-action dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 3e2dd71dc7df..b415b108faa7 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Create backport pull requests - uses: korthout/backport-action@e355f68e2fc1cb0063b1c1b717882290ffc994bf #v2.2.0 + uses: korthout/backport-action@930286d359d53effaf69607223933cbbb02460eb #v2.2.0 with: github_token: ${{ secrets.PAT }} pull_title: '[backport -> ${target_branch}] ${pull_title}' From f7e6eeefe006af11129d1b0e39a1c06449a53d42 Mon Sep 17 00:00:00 2001 From: Xiaochen Date: Thu, 21 Dec 2023 18:22:47 +0800 Subject: [PATCH 217/249] perf(proxy): use higher default keepalive request value for Nginx tuning (#12223) Bumped default values of `nginx_http_keepalive_requests` and `upstream_keepalive_max_requests` to `10000`. KAG-3360 --------- Co-authored-by: Datong Sun --- changelog/unreleased/kong/optimize_keepalive_parameters.yml | 3 +++ kong.conf.default | 2 +- kong/templates/kong_defaults.lua | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 changelog/unreleased/kong/optimize_keepalive_parameters.yml diff --git a/changelog/unreleased/kong/optimize_keepalive_parameters.yml b/changelog/unreleased/kong/optimize_keepalive_parameters.yml new file mode 100644 index 000000000000..49ec8baf6d4f --- /dev/null +++ b/changelog/unreleased/kong/optimize_keepalive_parameters.yml @@ -0,0 +1,3 @@ +message: Bumped default values of `nginx_http_keepalive_requests` and `upstream_keepalive_max_requests` to `10000`. +type: performance +scope: Configuration diff --git a/kong.conf.default b/kong.conf.default index 5e0b3bdc5e97..6f1fe1f0844f 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -1187,7 +1187,7 @@ # not make use of the PCRE library and their behavior # is unaffected by this setting. -#nginx_http_keepalive_requests = 1000 # Sets the maximum number of client requests that can be served through one +#nginx_http_keepalive_requests = 10000 # Sets the maximum number of client requests that can be served through one # keep-alive connection. After the maximum number of requests are made, # the connection is closed. # Closing connections periodically is necessary to free per-connection diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index eb6db07ae275..7ff840c17eb3 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -70,7 +70,7 @@ headers_upstream = x-kong-request-id trusted_ips = NONE error_default_type = text/plain upstream_keepalive_pool_size = 512 -upstream_keepalive_max_requests = 1000 +upstream_keepalive_max_requests = 10000 upstream_keepalive_idle_timeout = 60 allow_debug_header = off @@ -93,7 +93,7 @@ nginx_http_ssl_session_tickets = NONE nginx_http_ssl_session_timeout = NONE nginx_http_lua_regex_match_limit = 100000 nginx_http_lua_regex_cache_max_entries = 8192 -nginx_http_keepalive_requests = 1000 +nginx_http_keepalive_requests = 10000 nginx_stream_ssl_protocols = NONE nginx_stream_ssl_prefer_server_ciphers = NONE nginx_stream_ssl_dhparam = NONE From e7f9023720ebce048626f3a05e0c7c332cae2bb5 Mon Sep 17 00:00:00 2001 From: Datong Sun Date: Fri, 22 Dec 2023 14:25:32 +0800 Subject: [PATCH 218/249] deps(requirments): bump `atc-router` to `v1.4.0` (#12231) KAG-3403 --- .requirements | 2 +- changelog/unreleased/kong/bump-atc-router-1.3.1.yml | 3 --- changelog/unreleased/kong/bump-atc-router.yml | 3 +++ 3 files changed, 4 insertions(+), 4 deletions(-) delete mode 100644 changelog/unreleased/kong/bump-atc-router-1.3.1.yml create mode 100644 changelog/unreleased/kong/bump-atc-router.yml diff --git a/.requirements b/.requirements index cac1c5e026c8..618696da509c 100644 --- a/.requirements +++ b/.requirements @@ -10,7 +10,7 @@ LUA_KONG_NGINX_MODULE=4fbc3ddc7dcbc706ed286b95344f3cb6da17e637 # 0.8.0 LUA_RESTY_LMDB=19a6da0616db43baf8197dace59e64244430b3c4 # 1.4.1 LUA_RESTY_EVENTS=8448a92cec36ac04ea522e78f6496ba03c9b1fd8 # 0.2.0 LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 -ATC_ROUTER=7a2ad42d4246598ba1f753b6ae79cb1456040afa # 1.3.1 +ATC_ROUTER=95f1d8fe10b244bba5b354e5ed3726442373325e # 1.4.0 KONG_MANAGER=nightly NGX_WASM_MODULE=388d5720293f5091ccee1f859a42683fbfd14e7d # prerelease-0.2.0 diff --git a/changelog/unreleased/kong/bump-atc-router-1.3.1.yml b/changelog/unreleased/kong/bump-atc-router-1.3.1.yml deleted file mode 100644 index b1cbe7fa8949..000000000000 --- a/changelog/unreleased/kong/bump-atc-router-1.3.1.yml +++ /dev/null @@ -1,3 +0,0 @@ -message: Bumped atc-router from 1.2.0 to 1.3.1 -type: dependency -scope: Core diff --git a/changelog/unreleased/kong/bump-atc-router.yml b/changelog/unreleased/kong/bump-atc-router.yml new file mode 100644 index 000000000000..1696ebc9d3f3 --- /dev/null +++ b/changelog/unreleased/kong/bump-atc-router.yml @@ -0,0 +1,3 @@ +message: Bumped atc-router from 1.2.0 to 1.4.0 +type: dependency +scope: Core From 28fcbcb44659c20faa3a8f73e0b7eff1fa29546d Mon Sep 17 00:00:00 2001 From: Xiaochen Date: Fri, 22 Dec 2023 15:55:30 +0800 Subject: [PATCH 219/249] fix(globalpatches): imeplement SharedDict:get_stale API (#12233) 1. It implements the `get_stale` API. 2. It completes the `set` API with support for the `flags` parameter. 3. It abstracts the `is_stale` function for reuse. 4. It does not delete expired data during referring operations. KAG-3398 --- kong/globalpatches.lua | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/kong/globalpatches.lua b/kong/globalpatches.lua index eef57220a539..c3782f0c8a0f 100644 --- a/kong/globalpatches.lua +++ b/kong/globalpatches.lua @@ -246,17 +246,19 @@ return function(options) -- See https://github.com/openresty/resty-cli/pull/12 -- for a definitive solution of using shms in CLI local SharedDict = {} - local function set(data, key, value, expire_at) + local function set(data, key, value, expire_at, flags) data[key] = { value = value, - info = {expire_at = expire_at} + info = {expire_at = expire_at, flags=flags} } return data[key] end + local function is_stale(item) + return item.info.expire_at and item.info.expire_at <= ngx.now() + end local function get(data, key) local item = data[key] - if item and item.info.expire_at and item.info.expire_at <= ngx.now() then - data[key] = nil + if item and is_stale(item) then item = nil end return item @@ -272,9 +274,18 @@ return function(options) end function SharedDict:get(key) local item = get(self.data, key) - return item and item.value, nil + if item then + return item.value, item.info.flags + end + return nil + end + function SharedDict:get_stale(key) + local item = self.data[key] + if item then + return item.value, item.info.flags, is_stale(item) + end + return nil end - SharedDict.get_stale = SharedDict.get function SharedDict:set(key, value, exptime) local expire_at = (exptime and exptime ~= 0) and (ngx.now() + exptime) set(self.data, key, value, expire_at) @@ -325,7 +336,7 @@ return function(options) local flushed = 0 for key, item in pairs(self.data) do - if item.info.expire_at and item.info.expire_at <= ngx.now() then + if is_stale(item) then data[key] = nil flushed = flushed + 1 if n and flushed == n then @@ -341,9 +352,7 @@ return function(options) local i = 0 local keys = {} for k, item in pairs(self.data) do - if item.info.expire_at and item.info.expire_at <= ngx.now() then - self.data[k] = nil - else + if not is_stale(item) then keys[#keys+1] = k i = i + 1 if n ~= 0 and i == n then From 6fe681348bb0a58efbbbc5f2a6ef57828ed61667 Mon Sep 17 00:00:00 2001 From: Samuele Date: Fri, 22 Dec 2023 09:15:55 +0100 Subject: [PATCH 220/249] feat(opentelemetry): sampling rate configuration option (#12054) Sampling rate can now be set via the Opentelemetry plugin instead of it just being a global setting for the gateway. It also fixes a small bug where, in the edge case of opentelemetry being used for propagation only (instrumentations disabled), the `sampled` flag was incorrectly set to `true` although no span was sampled for that request. Includes tests to cover more configuration scenarios (esp. different sampling rates) and verify propagation is done correctly. --- .../kong/tracing-sampling-rate-scope.yml | 5 + kong/clustering/compat/removed_fields.lua | 7 ++ kong/pdk/tracing.lua | 98 +++++++++++++------ kong/plugins/opentelemetry/handler.lua | 41 +++++--- kong/plugins/opentelemetry/schema.lua | 7 ++ .../09-hybrid_mode/09-config-compat_spec.lua | 2 + .../37-opentelemetry/03-propagation_spec.lua | 49 ++++++---- .../37-opentelemetry/04-exporter_spec.lua | 89 +++++++++++++++-- .../kong/plugins/trace-propagator/handler.lua | 28 ++++-- 9 files changed, 251 insertions(+), 75 deletions(-) create mode 100644 changelog/unreleased/kong/tracing-sampling-rate-scope.yml diff --git a/changelog/unreleased/kong/tracing-sampling-rate-scope.yml b/changelog/unreleased/kong/tracing-sampling-rate-scope.yml new file mode 100644 index 000000000000..96cde17f1ff8 --- /dev/null +++ b/changelog/unreleased/kong/tracing-sampling-rate-scope.yml @@ -0,0 +1,5 @@ +message: > + Tracing Sampling Rate can now be set via the `config.sampling_rate` property + of the OpenTelemetry plugin instead of it just being a global setting for the gateway. +type: feature +scope: Plugin diff --git a/kong/clustering/compat/removed_fields.lua b/kong/clustering/compat/removed_fields.lua index 7a0eb3c768f4..e0083de8a9b1 100644 --- a/kong/clustering/compat/removed_fields.lua +++ b/kong/clustering/compat/removed_fields.lua @@ -109,4 +109,11 @@ return { "read_body_for_logout", }, }, + + -- Any dataplane older than 3.6.0 + [3006000000] = { + opentelemetry = { + "sampling_rate", + }, + }, } diff --git a/kong/pdk/tracing.lua b/kong/pdk/tracing.lua index c41500d50196..a2074888a6b3 100644 --- a/kong/pdk/tracing.lua +++ b/kong/pdk/tracing.lua @@ -11,6 +11,7 @@ local tablepool = require "tablepool" local new_tab = require "table.new" local utils = require "kong.tools.utils" local phase_checker = require "kong.pdk.private.phases" +local tracing_context = require "kong.tracing.tracing_context" local ngx = ngx local type = type @@ -63,34 +64,29 @@ local function generate_span_id() return rand_bytes(8) end ---- Build-in sampler -local function always_on_sampler() - return true -end - -local function always_off_sampler() - return false -end - -- Fractions >= 1 will always sample. Fractions < 0 are treated as zero. -- spec: https://github.com/c24t/opentelemetry-specification/blob/3b3d321865cf46364bdfb292c179b6444dc96bf9/specification/sdk-tracing.md#probability-sampler-algorithm -local function get_trace_id_based_sampler(rate) - if type(rate) ~= "number" then - error("invalid fraction", 2) - end +local function get_trace_id_based_sampler(options_sampling_rate) + return function(trace_id, sampling_rate) + sampling_rate = sampling_rate or options_sampling_rate - if rate >= 1 then - return always_on_sampler - end + if type(sampling_rate) ~= "number" then + error("invalid fraction", 2) + end - if rate <= 0 then - return always_off_sampler - end + -- always on sampler + if sampling_rate >= 1 then + return true + end + + -- always off sampler + if sampling_rate <= 0 then + return false + end - local bound = rate * BOUND_MAX + -- probability sampler + local bound = sampling_rate * BOUND_MAX - -- TODO: is this a sound method to sample? - return function(trace_id) if #trace_id < SAMPLING_BYTE then error(TOO_SHORT_MESSAGE, 2) end @@ -200,6 +196,10 @@ local function create_span(tracer, options) span.span_id = generate_span_id() span.trace_id = trace_id span.kind = options.span_kind or SPAN_KIND.INTERNAL + -- get_sampling_decision() can be used to dynamically run the sampler's logic + -- and obtain the sampling decision for the span. This way plugins can apply + -- their configured sampling rate dynamically. The sampled flag can then be + -- overwritten by set_should_sample. span.should_sample = sampled setmetatable(span, span_mt) @@ -207,10 +207,6 @@ local function create_span(tracer, options) end local function link_span(tracer, span, name, options) - if not span.should_sample then - kong.log.debug("skipping non-sampled span") - return - end if tracer and type(tracer) ~= "table" then error("invalid tracer", 2) end @@ -270,8 +266,8 @@ end -- local time = ngx.now() -- span:finish(time * 100000000) function span_mt:finish(end_time_ns) - if self.end_time_ns ~= nil or not self.should_sample then - -- span is finished, and already processed or not sampled + if self.end_time_ns ~= nil then + -- span is finished, and already processed return end @@ -426,6 +422,7 @@ noop_tracer.active_span = NOOP noop_tracer.set_active_span = NOOP noop_tracer.process_span = NOOP noop_tracer.set_should_sample = NOOP +noop_tracer.get_sampling_decision = NOOP local VALID_TRACING_PHASES = { rewrite = true, @@ -554,6 +551,51 @@ local function new_tracer(name, options) end end + --- Get the sampling decision result + -- + -- Uses a parent-based sampler when the parent has sampled flag == false + -- to inherit the non-recording decision from the parent span, or when + -- trace_id is not available. + -- + -- Else, apply the probability-based should_sample decision. + -- + -- @function kong.tracing:get_sampling_decision + -- @tparam bool parent_should_sample value of the parent span sampled flag + -- extracted from the incoming tracing headers + -- @tparam number sampling_rate the sampling rate to apply for the + -- probability sampler + -- @treturn bool sampled value of sampled for this trace + function self:get_sampling_decision(parent_should_sample, sampling_rate) + local ctx = ngx.ctx + + local sampled + local root_span = ctx.KONG_SPANS and ctx.KONG_SPANS[1] + local trace_id = tracing_context.get_raw_trace_id(ctx) + + if not root_span or root_span.attributes["kong.propagation_only"] then + -- should not sample if there is no root span or if the root span is + -- a dummy created only to propagate headers + sampled = false + + elseif parent_should_sample == false or not trace_id then + -- trace_id can be nil when tracing instrumentations are disabled + -- and Kong is configured to only do headers propagation + sampled = parent_should_sample + + elseif not sampling_rate then + -- no custom sampling_rate was passed: + -- reuse the sampling result of the root_span + sampled = root_span.should_sample == true + + else + -- use probability-based sampler + sampled = self.sampler(trace_id, sampling_rate) + end + + -- enforce boolean + return not not sampled + end + tracer_memo[name] = setmetatable(self, tracer_mt) return tracer_memo[name] end diff --git a/kong/plugins/opentelemetry/handler.lua b/kong/plugins/opentelemetry/handler.lua index db296fe045b0..71be03634f00 100644 --- a/kong/plugins/opentelemetry/handler.lua +++ b/kong/plugins/opentelemetry/handler.lua @@ -94,26 +94,25 @@ end function OpenTelemetryHandler:access(conf) local headers = ngx_get_headers() local root_span = ngx.ctx.KONG_SPANS and ngx.ctx.KONG_SPANS[1] - local tracer = kong.tracing.new("otel") - -- make propagation running with tracing instrumetation not enabled + -- get the global tracer when available, or instantiate a new one + local tracer = kong.tracing.name == "noop" and kong.tracing.new("otel") + or kong.tracing + + -- make propagation work with tracing disabled if not root_span then root_span = tracer.start_span("root") + root_span:set_attribute("kong.propagation_only", true) - -- the span created only for the propagation and will be bypassed to the exporter + -- since tracing is disabled, turn off sampling entirely for this trace kong.ctx.plugin.should_sample = false end local injected_parent_span = tracing_context.get_unlinked_span("balancer") or root_span + local header_type, trace_id, span_id, parent_id, parent_sampled, _ = propagation_parse(headers, conf.header_type) - local header_type, trace_id, span_id, parent_id, should_sample, _ = propagation_parse(headers, conf.header_type) - if should_sample == false then - tracer:set_should_sample(should_sample) - injected_parent_span.should_sample = should_sample - end - - -- overwrite trace id - -- as we are in a chain of existing trace + -- Overwrite trace ids + -- with the value extracted from incoming tracing headers if trace_id then -- to propagate the correct trace ID we have to set it here -- before passing this span to propagation.set() @@ -121,7 +120,6 @@ function OpenTelemetryHandler:access(conf) -- update the Tracing Context with the trace ID extracted from headers tracing_context.set_raw_trace_id(trace_id) end - -- overwrite root span's parent_id if span_id then root_span.parent_id = span_id @@ -130,6 +128,25 @@ function OpenTelemetryHandler:access(conf) root_span.parent_id = parent_id end + -- Configure the sampled flags + local sampled + if kong.ctx.plugin.should_sample == false then + sampled = false + + else + -- Sampling decision for the current trace. + local err + -- get_sampling_decision() depends on the value of the trace id: call it + -- after the trace_id is updated + sampled, err = tracer:get_sampling_decision(parent_sampled, conf.sampling_rate) + if err then + ngx_log(ngx_ERR, _log_prefix, "sampler failure: ", err) + end + end + tracer:set_should_sample(sampled) + -- Set the sampled flag for the outgoing header's span + injected_parent_span.should_sample = sampled + propagation_set(conf.header_type, header_type, injected_parent_span, "w3c") end diff --git a/kong/plugins/opentelemetry/schema.lua b/kong/plugins/opentelemetry/schema.lua index afeae44008be..4601703163dd 100644 --- a/kong/plugins/opentelemetry/schema.lua +++ b/kong/plugins/opentelemetry/schema.lua @@ -59,6 +59,13 @@ return { required = false, default = "preserve", one_of = { "preserve", "ignore", "b3", "b3-single", "w3c", "jaeger", "ot", "aws", "gcp" } } }, + { sampling_rate = { + description = "Tracing sampling rate for configuring the probability-based sampler. When set, this value supersedes the global `tracing_sampling_rate` setting from kong.conf.", + type = "number", + between = {0, 1}, + required = false, + default = nil, + } }, }, entity_checks = { { custom_entity_check = { diff --git a/spec/02-integration/09-hybrid_mode/09-config-compat_spec.lua b/spec/02-integration/09-hybrid_mode/09-config-compat_spec.lua index ce941e445abd..e3fe12f9bb54 100644 --- a/spec/02-integration/09-hybrid_mode/09-config-compat_spec.lua +++ b/spec/02-integration/09-hybrid_mode/09-config-compat_spec.lua @@ -212,6 +212,7 @@ describe("CP/DP config compat transformations #" .. strategy, function() local expected_otel_prior_35 = utils.cycle_aware_deep_copy(opentelemetry) expected_otel_prior_35.config.header_type = "preserve" + expected_otel_prior_35.config.sampling_rate = nil do_assert(utils.uuid(), "3.4.0", expected_otel_prior_35) -- cleanup @@ -231,6 +232,7 @@ describe("CP/DP config compat transformations #" .. strategy, function() local expected_otel_prior_34 = utils.cycle_aware_deep_copy(opentelemetry) expected_otel_prior_34.config.header_type = "preserve" + expected_otel_prior_34.config.sampling_rate = nil do_assert(utils.uuid(), "3.3.0", expected_otel_prior_34) -- cleanup diff --git a/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua b/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua index daf0a6ee2d84..e1d029df92d1 100644 --- a/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua +++ b/spec/03-plugins/37-opentelemetry/03-propagation_spec.lua @@ -57,10 +57,22 @@ local function assert_correct_trace_hierarchy(spans, incoming_span_id) end for _, strategy in helpers.each_strategy() do -describe("propagation tests #" .. strategy, function() +for _, instrumentations in ipairs({"all", "off"}) do +for _, sampling_rate in ipairs({1, 0}) do +describe("propagation tests #" .. strategy .. " instrumentations: " .. instrumentations .. " sampling_rate: " .. sampling_rate, function() local service local proxy_client + local sampled_flag_w3c + local sampled_flag_b3 + if instrumentations == "all" and sampling_rate == 1 then + sampled_flag_w3c = "01" + sampled_flag_b3 = "1" + else + sampled_flag_w3c = "00" + sampled_flag_b3 = "0" + end + lazy_setup(function() local bp = helpers.get_db_utils(strategy, { "services", "routes", "plugins" }, { "trace-propagator" }) @@ -127,6 +139,8 @@ describe("propagation tests #" .. strategy, function() database = strategy, plugins = "bundled, trace-propagator", nginx_conf = "spec/fixtures/custom_nginx.template", + tracing_instrumentations = instrumentations, + tracing_sampling_rate = sampling_rate, }) proxy_client = helpers.proxy_client() @@ -144,8 +158,7 @@ describe("propagation tests #" .. strategy, function() }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - - assert.matches("00%-%x+-%x+-01", json.headers.traceparent) + assert.matches("00%-%x+-%x+-" .. sampled_flag_w3c, json.headers.traceparent) end) it("propagates tracing headers (b3 request)", function() @@ -176,7 +189,7 @@ describe("propagation tests #" .. strategy, function() }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - assert.matches(trace_id .. "%-%x+%-1%-%x+", json.headers.b3) + assert.matches(trace_id .. "%-%x+%-" .. sampled_flag_b3 .. "%-%x+", json.headers.b3) end) it("without parent_id", function() @@ -191,10 +204,10 @@ describe("propagation tests #" .. strategy, function() }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - assert.matches(trace_id .. "%-%x+%-1", json.headers.b3) + assert.matches(trace_id .. "%-%x+%-" .. sampled_flag_b3, json.headers.b3) end) - it("with disabled sampling", function() + it("reflects the disabled sampled flag of the incoming tracing header", function() local trace_id = gen_trace_id() local span_id = gen_span_id() @@ -206,6 +219,8 @@ describe("propagation tests #" .. strategy, function() }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) + -- incoming header has sampled=0: always disabled by + -- parent-based sampler assert.matches(trace_id .. "%-%x+%-0", json.headers.b3) end) end) @@ -222,7 +237,7 @@ describe("propagation tests #" .. strategy, function() }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - assert.matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + assert.matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) end) it("defaults to w3c without propagating when header_type set to ignore and w3c headers sent", function() @@ -239,7 +254,7 @@ describe("propagation tests #" .. strategy, function() local json = cjson.decode(body) assert.is_not_nil(json.headers.traceparent) -- incoming trace id is ignored - assert.not_matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + assert.not_matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) end) it("defaults to w3c without propagating when header_type set to ignore and b3 headers sent", function() @@ -255,7 +270,7 @@ describe("propagation tests #" .. strategy, function() local json = cjson.decode(body) assert.is_not_nil(json.headers.traceparent) -- incoming trace id is ignored - assert.not_matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + assert.not_matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) end) it("propagates w3c tracing headers when header_type set to w3c", function() @@ -270,7 +285,7 @@ describe("propagation tests #" .. strategy, function() }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - assert.matches("00%-" .. trace_id .. "%-%x+-01", json.headers.traceparent) + assert.matches("00%-" .. trace_id .. "%-%x+-" .. sampled_flag_w3c, json.headers.traceparent) end) it("propagates jaeger tracing headers", function() @@ -287,7 +302,7 @@ describe("propagation tests #" .. strategy, function() local body = assert.response(r).has.status(200) local json = cjson.decode(body) -- Trace ID is left padded with 0 for assert - assert.matches( ('0'):rep(32-#trace_id) .. trace_id .. ":%x+:%x+:01", json.headers["uber-trace-id"]) + assert.matches( ('0'):rep(32-#trace_id) .. trace_id .. ":%x+:%x+:" .. sampled_flag_w3c, json.headers["uber-trace-id"]) end) it("propagates ot headers", function() @@ -322,10 +337,10 @@ describe("propagation tests #" .. strategy, function() assert.same(32, #m[1]) assert.same(16, #m[2]) - assert.same("01", m[3]) + assert.same(sampled_flag_w3c, m[3]) end) - it("reuses span propagated by another plugin", function() + it("with multiple plugins, propagates the correct header", function() local trace_id = gen_trace_id() local r = proxy_client:get("/", { @@ -337,13 +352,11 @@ describe("propagation tests #" .. strategy, function() }) local body = assert.response(r).has.status(200) local json = cjson.decode(body) - - -- trace-propagator parses incoming b3 headers, generates a span and - -- propagates it as b3. Opentelemetry ignores incoming type, reuses span - -- generated by the other plugin and propagates it as w3c. - assert.matches("00%-%x+-" .. json.headers["x-b3-spanid"] .. "%-01", json.headers.traceparent) + assert.matches("00%-%x+-" .. json.headers["x-b3-spanid"] .. "%-" .. sampled_flag_w3c, json.headers.traceparent) end) end) +end +end for _, instrumentation in ipairs({ "request", "request,balancer", "all" }) do describe("propagation tests with enabled " .. instrumentation .. " instrumentation (issue #11294) #" .. strategy, function() diff --git a/spec/03-plugins/37-opentelemetry/04-exporter_spec.lua b/spec/03-plugins/37-opentelemetry/04-exporter_spec.lua index 55e057d09776..9eb5a71996ff 100644 --- a/spec/03-plugins/37-opentelemetry/04-exporter_spec.lua +++ b/spec/03-plugins/37-opentelemetry/04-exporter_spec.lua @@ -46,7 +46,7 @@ for _, strategy in helpers.each_strategy() do end) -- helpers - local function setup_instrumentations(types, config, fixtures, router_scoped, service_scoped, another_global) + local function setup_instrumentations(types, config, fixtures, router_scoped, service_scoped, another_global, global_sampling_rate) local http_srv = assert(bp.services:insert { name = "mock-service", host = helpers.mock_upstream_host, @@ -93,7 +93,7 @@ for _, strategy in helpers.each_strategy() do nginx_conf = "spec/fixtures/custom_nginx.template", plugins = "opentelemetry", tracing_instrumentations = types, - tracing_sampling_rate = 1, + tracing_sampling_rate = global_sampling_rate or 1, }, nil, nil, fixtures)) end @@ -131,7 +131,6 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, r) - -- close client connection cli:close() local lines @@ -165,6 +164,85 @@ for _, strategy in helpers.each_strategy() do end) end) + -- this test is not meant to check that the sampling rate is applied + -- precisely (we have unit tests for that), but rather that the config + -- option is properly handled by the plugin and has an effect on the + -- sampling decision. + for _, global_sampling_rate in ipairs{ 0, 0.001, 1} do + describe("With config.sampling_rate set, using global sampling rate: " .. global_sampling_rate, function () + local mock + local sampling_rate = 0.5 + -- this trace_id is always sampled with 0.5 rate + local sampled_trace_id = "92a54b3e1a7c4f2da9e44b8a6f3e1dab" + -- this trace_id is never sampled with 0.5 rate + local non_sampled_trace_id = "4bf92f3577b34da6a3ce929d0e0e4736" + + lazy_setup(function() + bp, _ = assert(helpers.get_db_utils(strategy, { + "services", + "routes", + "plugins", + }, { "opentelemetry" })) + + setup_instrumentations("all", { + sampling_rate = sampling_rate, + }, nil, nil, nil, nil, global_sampling_rate) + mock = helpers.http_mock(HTTP_SERVER_PORT, { timeout = HTTP_MOCK_TIMEOUT }) + end) + + lazy_teardown(function() + helpers.stop_kong() + if mock then + mock("close", true) + end + end) + + it("does not sample spans when trace_id == non_sampled_trace_id", function() + local cli = helpers.proxy_client(7000, PROXY_PORT) + local r = assert(cli:send { + method = "GET", + path = "/", + headers = { + traceparent = "00-" .. non_sampled_trace_id .. "-0123456789abcdef-01" + } + }) + assert.res_status(200, r) + + cli:close() + + ngx.sleep(2) + local lines = mock() + assert.is_falsy(lines) + end) + + it("samples spans when trace_id == sampled_trace_id", function () + local body + helpers.wait_until(function() + local cli = helpers.proxy_client(7000, PROXY_PORT) + local r = assert(cli:send { + method = "GET", + path = "/", + headers = { + traceparent = "00-" .. sampled_trace_id .. "-0123456789abcdef-01" + } + }) + assert.res_status(200, r) + + cli:close() + + local lines + lines, body = mock() + return lines + end, 10) + + local decoded = assert(pb.decode("opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest", body)) + assert.not_nil(decoded) + local scope_spans = decoded.resource_spans[1].scope_spans + assert.is_true(#scope_spans > 0, scope_spans) + end) + end) + end + for _, case in ipairs{ {true, true, true}, {true, true, nil}, @@ -208,7 +286,6 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, r) - -- close client connection cli:close() local lines, err = mock() @@ -259,7 +336,6 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, r) - -- close client connection cli:close() local lines @@ -357,7 +433,6 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, r) - -- close client connection cli:close() helpers.wait_until(function() @@ -428,7 +503,6 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, r) - -- close client connection cli:close() local lines @@ -510,7 +584,6 @@ for _, strategy in helpers.each_strategy() do }) assert.res_status(200, r) - -- close client connection cli:close() local lines diff --git a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua index 909a11f093ba..5b61cbcd3f4b 100644 --- a/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua +++ b/spec/fixtures/custom_plugins/kong/plugins/trace-propagator/handler.lua @@ -14,31 +14,41 @@ local _M = { function _M:access(conf) local headers = ngx.req.get_headers() - local tracer = kong.tracing.new("trace-propagator") + local tracer = kong.tracing.name == "noop" and kong.tracing.new("otel") + or kong.tracing local root_span = ngx.ctx.KONG_SPANS and ngx.ctx.KONG_SPANS[1] if not root_span then root_span = tracer.start_span("root") + root_span:set_attribute("kong.propagation_only", true) + kong.ctx.plugin.should_sample = false end - local injected_parent_span = tracing_context.get_unlinked_span("balancer") or root_span - local header_type, trace_id, span_id, parent_id, should_sample = propagation_parse(headers) + local injected_parent_span = tracing_context.get_unlinked_span("balancer") or root_span - if should_sample == false then - tracer:set_should_sample(should_sample) - injected_parent_span.should_sample = should_sample - end + local header_type, trace_id, span_id, parent_id, parent_sampled = propagation_parse(headers) + -- overwrite trace ids + -- with the value extracted from incoming tracing headers if trace_id then injected_parent_span.trace_id = trace_id + tracing_context.set_raw_trace_id(trace_id) end - if span_id then root_span.parent_id = span_id - elseif parent_id then root_span.parent_id = parent_id end + -- Set the sampled flag for the outgoing header's span + local sampled + if kong.ctx.plugin.should_sample == false then + sampled = false + else + sampled = tracer:get_sampling_decision(parent_sampled, conf.sampling_rate) + tracer:set_should_sample(sampled) + end + injected_parent_span.should_sample = sampled + local type = header_type and "preserve" or "w3c" propagation_set(type, header_type, injected_parent_span, "w3c") end From 2b99ee7cfd5d5de28717d8d855e20620c9610871 Mon Sep 17 00:00:00 2001 From: oowl Date: Mon, 25 Dec 2023 12:33:09 +0800 Subject: [PATCH 221/249] fix(pdk): response.set_header support header argument with table array of string (#12164) This PR lets response.set_header support setting a header with an array of strings. It also fixes a type error issue in the response-header-transformer plugin when manipulating multiple headers with the same name. FTI-5585 --- ...fix-pdk-response-set-header-with-table.yml | 3 ++ kong/pdk/private/checks.lua | 12 +++++- kong/pdk/response.lua | 5 +-- kong/pdk/service/request.lua | 5 +-- .../01-header_transformer_spec.lua | 5 +++ t/01-pdk/06-service-request/09-set_header.t | 4 +- t/01-pdk/06-service-request/10-add_header.t | 4 +- t/01-pdk/08-response/05-set_header.t | 43 +++++++++++++++++-- t/01-pdk/08-response/06-add_header.t | 6 +-- 9 files changed, 68 insertions(+), 19 deletions(-) create mode 100644 changelog/unreleased/kong/fix-pdk-response-set-header-with-table.yml diff --git a/changelog/unreleased/kong/fix-pdk-response-set-header-with-table.yml b/changelog/unreleased/kong/fix-pdk-response-set-header-with-table.yml new file mode 100644 index 000000000000..079d5e820515 --- /dev/null +++ b/changelog/unreleased/kong/fix-pdk-response-set-header-with-table.yml @@ -0,0 +1,3 @@ +message: "response.set_header support header argument with table array of string" +type: bugfix +scope: PDK diff --git a/kong/pdk/private/checks.lua b/kong/pdk/private/checks.lua index cb6719cb8a2e..455e45da8f2f 100644 --- a/kong/pdk/private/checks.lua +++ b/kong/pdk/private/checks.lua @@ -51,11 +51,19 @@ function checks.validate_header(name, value) local tvalue = type(value) if tvalue ~= "string" then - if tvalue == "number" or tvalue == "boolean" then + if tvalue == "table" then + for _, vv in ipairs(value) do + local tvv = type(vv) + if tvv ~= "string" then + error(fmt("invalid header value in array %q: got %s, " .. + "expected string", name, tvv), 3) + end + end + elseif tvalue == "number" or tvalue == "boolean" then value = tostring(value) else error(fmt("invalid header value for %q: got %s, expected " .. - "string, number or boolean", name, tvalue), 3) + "array of string, string, number or boolean", name, tvalue), 3) end end return value diff --git a/kong/pdk/response.lua b/kong/pdk/response.lua index dd83b2a8270a..b12493158bef 100644 --- a/kong/pdk/response.lua +++ b/kong/pdk/response.lua @@ -31,7 +31,6 @@ local error = error local pairs = pairs local coroutine = coroutine local cjson_encode = cjson.encode -local normalize_header = checks.normalize_header local normalize_multi_header = checks.normalize_multi_header local validate_header = checks.validate_header local validate_headers = checks.validate_headers @@ -431,7 +430,7 @@ local function new(self, major_version) return end - ngx.header[name] = normalize_header(value) + ngx.header[name] = normalize_multi_header(value) end @@ -463,7 +462,7 @@ local function new(self, major_version) validate_header(name, value) - add_header(name, normalize_header(value)) + add_header(name, normalize_multi_header(value)) end diff --git a/kong/pdk/service/request.lua b/kong/pdk/service/request.lua index 7210877f45d6..efb3c6cb0c11 100644 --- a/kong/pdk/service/request.lua +++ b/kong/pdk/service/request.lua @@ -18,7 +18,6 @@ local string_find = string.find local string_sub = string.sub local string_byte = string.byte local string_lower = string.lower -local normalize_header = checks.normalize_header local normalize_multi_header = checks.normalize_multi_header local validate_header = checks.validate_header local validate_headers = checks.validate_headers @@ -312,7 +311,7 @@ local function new(self) end end - ngx.req.set_header(header, normalize_header(value)) + ngx.req.set_header(header, normalize_multi_header(value)) end --- @@ -343,7 +342,7 @@ local function new(self) headers = { headers } end - table_insert(headers, normalize_header(value)) + table_insert(headers, normalize_multi_header(value)) ngx.req.set_header(header, headers) end diff --git a/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua b/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua index ca15b1a562a8..9fb96f839360 100644 --- a/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua +++ b/spec/03-plugins/15-response-transformer/01-header_transformer_spec.lua @@ -148,6 +148,11 @@ describe("Plugin: response-transformer", function() header_transformer.transform_headers(conf, headers) assert.same({}, headers) end) + it("header rename when same header being set twice", function() + local headers = get_headers({ h1 = { "v1", "v2"}}) + header_transformer.transform_headers(conf, headers) + assert.same({h2 = { "v1", "v2" }}, headers) + end) end) describe("replace", function() local conf = { diff --git a/t/01-pdk/06-service-request/09-set_header.t b/t/01-pdk/06-service-request/09-set_header.t index f9cf2b8e9070..bb181379dea9 100644 --- a/t/01-pdk/06-service-request/09-set_header.t +++ b/t/01-pdk/06-service-request/09-set_header.t @@ -68,7 +68,7 @@ invalid header name "127001": got number, expected string --- request GET /t --- response_body -invalid header value for "foo": got function, expected string, number or boolean +invalid header value for "foo": got function, expected array of string, string, number or boolean --- no_error_log [error] @@ -89,7 +89,7 @@ invalid header value for "foo": got function, expected string, number or boolean --- request GET /t --- response_body -invalid header value for "foo": got nil, expected string, number or boolean +invalid header value for "foo": got nil, expected array of string, string, number or boolean --- no_error_log [error] diff --git a/t/01-pdk/06-service-request/10-add_header.t b/t/01-pdk/06-service-request/10-add_header.t index 68ffadce56bd..155c616ad66b 100644 --- a/t/01-pdk/06-service-request/10-add_header.t +++ b/t/01-pdk/06-service-request/10-add_header.t @@ -68,7 +68,7 @@ invalid header name "127001": got number, expected string --- request GET /t --- response_body -invalid header value for "foo": got function, expected string, number or boolean +invalid header value for "foo": got function, expected array of string, string, number or boolean --- no_error_log [error] @@ -89,7 +89,7 @@ invalid header value for "foo": got function, expected string, number or boolean --- request GET /t --- response_body -invalid header value for "foo": got nil, expected string, number or boolean +invalid header value for "foo": got nil, expected array of string, string, number or boolean --- no_error_log [error] diff --git a/t/01-pdk/08-response/05-set_header.t b/t/01-pdk/08-response/05-set_header.t index 57a9257d113e..ed4cf1fea607 100644 --- a/t/01-pdk/08-response/05-set_header.t +++ b/t/01-pdk/08-response/05-set_header.t @@ -77,7 +77,7 @@ invalid header name "127001": got number, expected string -=== TEST 3: response.set_header() errors if value is not a string +=== TEST 3: response.set_header() errors if value is not a table contain array of string --- http_config eval: $t::Util::HttpConfig --- config location = /t { @@ -89,8 +89,9 @@ invalid header name "127001": got number, expected string local PDK = require "kong.pdk" local pdk = PDK.new() + local set_header = { {} } - local ok, err = pcall(pdk.response.set_header, "foo", {}) + local ok, err = pcall(pdk.response.set_header, "foo", set_header) if not ok then ngx.ctx.err = err end @@ -104,7 +105,7 @@ invalid header name "127001": got number, expected string --- request GET /t --- response_body chop -invalid header value for "foo": got table, expected string, number or boolean +invalid header value in array "foo": got table, expected string --- no_error_log [error] @@ -137,7 +138,7 @@ invalid header value for "foo": got table, expected string, number or boolean --- request GET /t --- response_body chop -invalid header value for "foo": got nil, expected string, number or boolean +invalid header value for "foo": got nil, expected array of string, string, number or boolean --- no_error_log [error] @@ -277,3 +278,37 @@ GET /t Transfer-Encoding: chunked --- error_log manually setting Transfer-Encoding. Ignored. + + +=== TEST 8: response.set_header() with header table +--- http_config eval: $t::Util::HttpConfig +--- config + location = /t { + content_by_lua_block { + } + + header_filter_by_lua_block { + ngx.header.content_length = nil + + local PDK = require "kong.pdk" + local pdk = PDK.new() + local set_header = {"a", "b"} + + pdk.response.set_header("X-Foo", set_header) + } + + body_filter_by_lua_block { + local new_headers = ngx.resp.get_headers() + + local cjson = require("cjson") + ngx.arg[1] = "X-Foo: {" .. new_headers["X-Foo"][1] .. "," .. new_headers["X-Foo"][2] .. "}" + + ngx.arg[2] = true + } + } +--- request +GET /t +--- response_body chop +X-Foo: {a,b} +--- no_error_log +[error] diff --git a/t/01-pdk/08-response/06-add_header.t b/t/01-pdk/08-response/06-add_header.t index f32af34cd1e2..86644b25ae59 100644 --- a/t/01-pdk/08-response/06-add_header.t +++ b/t/01-pdk/08-response/06-add_header.t @@ -90,7 +90,7 @@ invalid header name "127001": got number, expected string local PDK = require "kong.pdk" local pdk = PDK.new() - local ok, err = pcall(pdk.response.add_header, "foo", {}) + local ok, err = pcall(pdk.response.add_header, "foo", {{}}) if not ok then ngx.ctx.err = err end @@ -104,7 +104,7 @@ invalid header name "127001": got number, expected string --- request GET /t --- response_body chop -invalid header value for "foo": got table, expected string, number or boolean +invalid header value in array "foo": got table, expected string --- no_error_log [error] @@ -137,7 +137,7 @@ invalid header value for "foo": got table, expected string, number or boolean --- request GET /t --- response_body chop -invalid header value for "foo": got nil, expected string, number or boolean +invalid header value for "foo": got nil, expected array of string, string, number or boolean --- no_error_log [error] From b2a4ffd479b5acc9c4d03ddee1602b6e2fb6897f Mon Sep 17 00:00:00 2001 From: oowl Date: Mon, 25 Dec 2023 13:46:04 +0800 Subject: [PATCH 222/249] chore(deps): bump lua-resty-healthcheck to 3.0.1 (#12237) Kong/lua-resty-healthcheck#146 FTI-5478 --- changelog/unreleased/kong/bump-lua-resty-healthcheck-3.0.1.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog/unreleased/kong/bump-lua-resty-healthcheck-3.0.1.yml diff --git a/changelog/unreleased/kong/bump-lua-resty-healthcheck-3.0.1.yml b/changelog/unreleased/kong/bump-lua-resty-healthcheck-3.0.1.yml new file mode 100644 index 000000000000..aa14452feaef --- /dev/null +++ b/changelog/unreleased/kong/bump-lua-resty-healthcheck-3.0.1.yml @@ -0,0 +1,3 @@ +message: "Bumped lua-resty-healthcheck from 3.0.0 to 3.0.1" +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 4e07f3823b0e..21a5e6e7b09e 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -31,7 +31,7 @@ dependencies = { "binaryheap >= 0.4", "luaxxhash >= 1.0", "lua-protobuf == 0.5.0", - "lua-resty-healthcheck == 3.0.0", + "lua-resty-healthcheck == 3.0.1", "lua-messagepack == 0.5.4", "lua-resty-aws == 1.3.5", "lua-resty-openssl == 1.0.2", From 75ee3a0948adf9078308d5372bbe1642272c924d Mon Sep 17 00:00:00 2001 From: tzssangglass Date: Mon, 25 Dec 2023 13:58:56 +0800 Subject: [PATCH 223/249] docs(changelog): reword rpm package post remove changelog (#12245) Signed-off-by: tzssangglass --- changelog/unreleased/kong/postremove.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/unreleased/kong/postremove.yml b/changelog/unreleased/kong/postremove.yml index c3e0a805d12e..97080e2cb4c9 100644 --- a/changelog/unreleased/kong/postremove.yml +++ b/changelog/unreleased/kong/postremove.yml @@ -1,3 +1,3 @@ -message: "cleanup of rpm/deb residual files after uninstall" +message: "Ensure Kong-owned directories are cleaned up after an uninstall using the system's package manager." type: feature scope: Core From 6e91c994c5e7fb8c6e921dd4508c8174cdfef380 Mon Sep 17 00:00:00 2001 From: oowl Date: Tue, 26 Dec 2023 14:29:17 +0800 Subject: [PATCH 224/249] docs(pdk): fix missing doc for set_header related pdk (#12249) fix missing doc for #12164 --- kong/pdk/response.lua | 4 ++-- kong/pdk/service/request.lua | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/kong/pdk/response.lua b/kong/pdk/response.lua index b12493158bef..37a0c67d11f4 100644 --- a/kong/pdk/response.lua +++ b/kong/pdk/response.lua @@ -412,7 +412,7 @@ local function new(self, major_version) -- @function kong.response.set_header -- @phases rewrite, access, header_filter, response, admin_api -- @tparam string name The name of the header - -- @tparam string|number|boolean value The new value for the header. + -- @tparam array of strings|string|number|boolean value The new value for the header. -- @return Nothing; throws an error on invalid input. -- @usage -- kong.response.set_header("X-Foo", "value") @@ -445,7 +445,7 @@ local function new(self, major_version) -- @function kong.response.add_header -- @phases rewrite, access, header_filter, response, admin_api -- @tparam string name The header name. - -- @tparam string|number|boolean value The header value. + -- @tparam array of strings|string|number|boolean value The header value. -- @return Nothing; throws an error on invalid input. -- @usage -- kong.response.add_header("Cache-Control", "no-cache") diff --git a/kong/pdk/service/request.lua b/kong/pdk/service/request.lua index efb3c6cb0c11..495dbf0febcf 100644 --- a/kong/pdk/service/request.lua +++ b/kong/pdk/service/request.lua @@ -287,7 +287,7 @@ local function new(self) -- @function kong.service.request.set_header -- @phases `rewrite`, `access`, `balancer` -- @tparam string header The header name. Example: "X-Foo". - -- @tparam string|boolean|number value The header value. Example: "hello world". + -- @tparam array of strings|string|boolean|number value The header value. Example: "hello world". -- @return Nothing; throws an error on invalid inputs. -- @usage -- kong.service.request.set_header("X-Foo", "value") @@ -323,7 +323,7 @@ local function new(self) -- @function kong.service.request.add_header -- @phases `rewrite`, `access` -- @tparam string header The header name. Example: "Cache-Control". - -- @tparam string|number|boolean value The header value. Example: "no-cache". + -- @tparam array of strings|string|number|boolean value The header value. Example: "no-cache". -- @return Nothing; throws an error on invalid inputs. -- @usage -- kong.service.request.add_header("Cache-Control", "no-cache") From 1ab6ead0ee9759127d427334d644962e98a667bd Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Wed, 27 Dec 2023 06:43:47 +0000 Subject: [PATCH 225/249] feat(templates): enable `status_listen` by default on localhost (#12254) KAG-3359 --------- Co-authored-by: Keery Nie --- changelog/unreleased/kong/default_status_port.yml.yml | 3 +++ kong.conf.default | 3 ++- kong/templates/kong_defaults.lua | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 changelog/unreleased/kong/default_status_port.yml.yml diff --git a/changelog/unreleased/kong/default_status_port.yml.yml b/changelog/unreleased/kong/default_status_port.yml.yml new file mode 100644 index 000000000000..ec3c3a510de8 --- /dev/null +++ b/changelog/unreleased/kong/default_status_port.yml.yml @@ -0,0 +1,3 @@ +message: Enable `status_listen` on `127.0.0.1:8007` by default +type: feature +scope: Admin API diff --git a/kong.conf.default b/kong.conf.default index 6f1fe1f0844f..18c578403b49 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -680,7 +680,8 @@ # # Example: `admin_listen = 127.0.0.1:8444 http2 ssl` -#status_listen = off # Comma-separated list of addresses and ports on +#status_listen = 127.0.0.1:8007 reuseport backlog=16384 + # Comma-separated list of addresses and ports on # which the Status API should listen. # The Status API is a read-only endpoint # allowing monitoring tools to retrieve metrics, diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index 7ff840c17eb3..2c0802bc72af 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -28,7 +28,7 @@ proxy_listen = 0.0.0.0:8000 reuseport backlog=16384, 0.0.0.0:8443 http2 ssl reus stream_listen = off admin_listen = 127.0.0.1:8001 reuseport backlog=16384, 127.0.0.1:8444 http2 ssl reuseport backlog=16384 admin_gui_listen = 0.0.0.0:8002, 0.0.0.0:8445 ssl -status_listen = off +status_listen = 127.0.0.1:8007 reuseport backlog=16384 cluster_listen = 0.0.0.0:8005 cluster_control_plane = 127.0.0.1:8005 cluster_cert = NONE From 80fa39fcd1c8ac403c0b19ca56c0592745412881 Mon Sep 17 00:00:00 2001 From: Datong Sun Date: Tue, 26 Dec 2023 22:15:13 -0800 Subject: [PATCH 226/249] chore(actions): pin `gateway-test-scheduler` with hash This is required for security compliance. Dependabot should take care of bumping in the future. --- .github/workflows/build_and_test.yml | 4 ++-- .github/workflows/update-test-runtime-statistics.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 7537a411afb9..0aee08aa20bb 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -104,7 +104,7 @@ jobs: repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json - name: Schedule tests - uses: Kong/gateway-test-scheduler/schedule@v1 + uses: Kong/gateway-test-scheduler/schedule@b91bd7aec42bd13748652929f087be81d1d40843 # v1 with: test-suites-file: .ci/test_suites.json test-file-runtime-file: .ci/runtimes.json @@ -266,7 +266,7 @@ jobs: DD_CIVISIBILITY_AGENTLESS_ENABLED: true DD_TRACE_GIT_METADATA_ENABLED: true DD_API_KEY: ${{ secrets.DATADOG_API_KEY }} - uses: Kong/gateway-test-scheduler/runner@v1 + uses: Kong/gateway-test-scheduler/runner@b91bd7aec42bd13748652929f087be81d1d40843 # v1 with: tests-to-run-file: test-chunk.${{ matrix.runner }}.json failed-test-files-file: ${{ env.FAILED_TEST_FILES_FILE }} diff --git a/.github/workflows/update-test-runtime-statistics.yml b/.github/workflows/update-test-runtime-statistics.yml index 77067f35a82d..43e4017a518a 100644 --- a/.github/workflows/update-test-runtime-statistics.yml +++ b/.github/workflows/update-test-runtime-statistics.yml @@ -19,7 +19,7 @@ jobs: token: ${{ secrets.PAT }} - name: Process statistics - uses: Kong/gateway-test-scheduler/analyze@v1 + uses: Kong/gateway-test-scheduler/analyze@b91bd7aec42bd13748652929f087be81d1d40843 # v1 env: GITHUB_TOKEN: ${{ secrets.PAT }} with: From c9fd6c127a9576da09d9af4fa4ba1139b30b3509 Mon Sep 17 00:00:00 2001 From: Chrono Date: Wed, 27 Dec 2023 16:57:54 +0800 Subject: [PATCH 227/249] perf(router): unify cache key and context generation in expressions router (#12127) Cache key and context generation are closely related on field present inside configured expressions. It is advantageous to unify the logic for generating them to: 1. Improve cache hit rate, so that only fields referenced inside expressions participates in cache key generation. This is particularly important since we plan on adding more match fields into expressions in the future 2. Improve performance, allows field value to be cached and reused between cache key and context generation 3. Reduced code duplication KAG-3032 --- kong-3.6.0-0.rockspec | 14 +- kong/router/atc.lua | 401 +++++------------- kong/router/fields.lua | 360 ++++++++++++++++ spec/01-unit/08-router_spec.lua | 14 + .../05-proxy/02-router_spec.lua | 8 +- .../05-proxy/19-grpc_proxy_spec.lua | 10 +- 6 files changed, 486 insertions(+), 321 deletions(-) create mode 100644 kong/router/fields.lua diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 21a5e6e7b09e..127ec878673c 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -51,12 +51,6 @@ build = { ["kong.cache"] = "kong/cache/init.lua", ["kong.cache.warmup"] = "kong/cache/warmup.lua", ["kong.global"] = "kong/global.lua", - ["kong.router"] = "kong/router/init.lua", - ["kong.router.traditional"] = "kong/router/traditional.lua", - ["kong.router.compat"] = "kong/router/compat.lua", - ["kong.router.expressions"] = "kong/router/expressions.lua", - ["kong.router.atc"] = "kong/router/atc.lua", - ["kong.router.utils"] = "kong/router/utils.lua", ["kong.reports"] = "kong/reports.lua", ["kong.constants"] = "kong/constants.lua", ["kong.concurrency"] = "kong/concurrency.lua", @@ -65,6 +59,14 @@ build = { ["kong.error_handlers"] = "kong/error_handlers.lua", ["kong.hooks"] = "kong/hooks.lua", + ["kong.router"] = "kong/router/init.lua", + ["kong.router.traditional"] = "kong/router/traditional.lua", + ["kong.router.compat"] = "kong/router/compat.lua", + ["kong.router.expressions"] = "kong/router/expressions.lua", + ["kong.router.atc"] = "kong/router/atc.lua", + ["kong.router.fields"] = "kong/router/fields.lua", + ["kong.router.utils"] = "kong/router/utils.lua", + ["kong.conf_loader"] = "kong/conf_loader/init.lua", ["kong.conf_loader.constants"] = "kong/conf_loader/constants.lua", ["kong.conf_loader.parse"] = "kong/conf_loader/parse.lua", diff --git a/kong/router/atc.lua b/kong/router/atc.lua index 55064e1e34d7..6d2d32afed85 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -5,10 +5,9 @@ local _MT = { __index = _M, } local buffer = require("string.buffer") local schema = require("resty.router.schema") local router = require("resty.router.router") -local context = require("resty.router.context") local lrucache = require("resty.lrucache") -local server_name = require("ngx.ssl").server_name local tb_new = require("table.new") +local fields = require("kong.router.fields") local utils = require("kong.router.utils") local yield = require("kong.tools.yield").yield @@ -29,15 +28,14 @@ local header = ngx.header local var = ngx.var local ngx_log = ngx.log local get_phase = ngx.get_phase -local get_method = ngx.req.get_method -local get_headers = ngx.req.get_headers -local get_uri_args = ngx.req.get_uri_args local ngx_ERR = ngx.ERR local check_select_params = utils.check_select_params local get_service_info = utils.get_service_info local route_match_stat = utils.route_match_stat +local get_cache_key = fields.get_cache_key +local get_atc_context = fields.get_atc_context local DEFAULT_MATCH_LRUCACHE_SIZE = utils.DEFAULT_MATCH_LRUCACHE_SIZE @@ -184,37 +182,6 @@ local function add_atc_matcher(inst, route, route_id, end -local function categorize_fields(fields) - - if not is_http then - return fields, nil, nil - end - - local basic = {} - local headers = {} - local queries = {} - - -- 13 bytes, same len for "http.queries." - local PREFIX_LEN = 13 -- #"http.headers." - - for _, field in ipairs(fields) do - local prefix = field:sub(1, PREFIX_LEN) - - if prefix == "http.headers." then - headers[field:sub(PREFIX_LEN + 1)] = field - - elseif prefix == "http.queries." then - queries[field:sub(PREFIX_LEN + 1)] = field - - else - table.insert(basic, field) - end - end - - return basic, headers, queries -end - - local function new_from_scratch(routes, get_exp_and_priority) local phase = get_phase() @@ -253,7 +220,7 @@ local function new_from_scratch(routes, get_exp_and_priority) yield(true, phase) end - local fields, header_fields, query_fields = categorize_fields(inst:get_fields()) + local fields = inst:get_fields() return setmetatable({ schema = CACHED_SCHEMA, @@ -261,8 +228,6 @@ local function new_from_scratch(routes, get_exp_and_priority) routes = routes_t, services = services_t, fields = fields, - header_fields = header_fields, - query_fields = query_fields, updated_at = new_updated_at, rebuilding = false, }, _MT) @@ -344,11 +309,9 @@ local function new_from_previous(routes, get_exp_and_priority, old_router) yield(true, phase) end - local fields, header_fields, query_fields = categorize_fields(inst:get_fields()) + local fields = inst:get_fields() old_router.fields = fields - old_router.header_fields = header_fields - old_router.query_fields = query_fields old_router.updated_at = new_updated_at old_router.rebuilding = false @@ -423,6 +386,9 @@ do end +local CACHE_PARAMS + + if is_http then @@ -432,115 +398,25 @@ local add_debug_headers = utils.add_debug_headers local get_upstream_uri_v0 = utils.get_upstream_uri_v0 -function _M:select(req_method, req_uri, req_host, req_scheme, - _, _, - _, _, - sni, req_headers, req_queries) +function _M:matching(params) + local req_uri = params.uri + local req_host = params.host - check_select_params(req_method, req_uri, req_host, req_scheme, + check_select_params(params.method, req_uri, req_host, params.scheme, nil, nil, nil, nil, - sni, req_headers, req_queries) - - local c = context.new(self.schema) + params.sni, params.headers, params.queries) local host, port = split_host_port(req_host) - for _, field in ipairs(self.fields) do - if field == "http.method" then - assert(c:add_value(field, req_method)) - - elseif field == "http.path" then - local res, err = c:add_value(field, req_uri) - if not res then - return nil, err - end - - elseif field == "http.host" then - local res, err = c:add_value(field, host) - if not res then - return nil, err - end - - elseif field == "net.port" then - assert(c:add_value(field, port)) - - elseif field == "net.protocol" then - assert(c:add_value(field, req_scheme)) - - elseif field == "tls.sni" then - local res, err = c:add_value(field, sni) - if not res then - return nil, err - end + params.host = host + params.port = port - else -- unknown field - error("unknown router matching schema field: " .. field) + local c, err = get_atc_context(self.schema, self.fields, params) - end -- if field - - end -- for self.fields - - if req_headers then - for h, field in pairs(self.header_fields) do - - local v = req_headers[h] - - if type(v) == "string" then - local res, err = c:add_value(field, v) - if not res then - return nil, err - end - - elseif type(v) == "table" then - for _, v in ipairs(v) do - local res, err = c:add_value(field, v) - if not res then - return nil, err - end - end - end -- if type(v) - - -- if v is nil or others, ignore - - end -- for self.header_fields - end -- req_headers - - if req_queries then - for n, field in pairs(self.query_fields) do - - local v = req_queries[n] - - -- the query parameter has only one value, like /?foo=bar - if type(v) == "string" then - local res, err = c:add_value(field, v) - if not res then - return nil, err - end - - -- the query parameter has no value, like /?foo, - -- get_uri_arg will get a boolean `true` - -- we think it is equivalent to /?foo= - elseif type(v) == "boolean" then - local res, err = c:add_value(field, "") - if not res then - return nil, err - end - - -- multiple values for a single query parameter, like /?foo=bar&foo=baz - elseif type(v) == "table" then - for _, v in ipairs(v) do - local res, err = c:add_value(field, v) - if not res then - return nil, err - end - end - end -- if type(v) - - -- if v is nil or others, ignore - - end -- for self.query_fields - end -- req_queries + if not c then + return nil, err + end local matched = self.router:execute(c) if not matched then @@ -583,98 +459,48 @@ function _M:select(req_method, req_uri, req_host, req_scheme, end -local get_headers_key -local get_queries_key -do - local tb_sort = table.sort - local tb_concat = table.concat - local replace_dashes_lower = require("kong.tools.string").replace_dashes_lower - - local str_buf = buffer.new(64) - - local function get_headers_or_queries_key(values, lower_func) - str_buf:reset() - - -- NOTE: DO NOT yield until str_buf:get() - for name, value in pairs(values) do - if lower_func then - name = lower_func(name) - end - - if type(value) == "table" then - tb_sort(value) - value = tb_concat(value, ", ") - end - - str_buf:putf("|%s=%s", name, value) - end - - return str_buf:get() - end - - get_headers_key = function(headers) - return get_headers_or_queries_key(headers, replace_dashes_lower) - end - - get_queries_key = function(queries) - return get_headers_or_queries_key(queries) - end -end - +-- only for unit-testing +function _M:select(req_method, req_uri, req_host, req_scheme, + _, _, + _, _, + sni, req_headers, req_queries) --- func => get_headers or get_uri_args --- name => "headers" or "queries" --- max_config_option => "lua_max_req_headers" or "lua_max_uri_args" -local function get_http_params(func, name, max_config_option) - local params, err = func() - if err == "truncated" then - local max = kong and kong.configuration and kong.configuration[max_config_option] or 100 - ngx_log(ngx_ERR, - string.format("router: not all request %s were read in order to determine the route " .. - "as the request contains more than %d %s, " .. - "route selection may be inaccurate, " .. - "consider increasing the '%s' configuration value " .. - "(currently at %d)", - name, max, name, max_config_option, max)) - end + local params = { + method = req_method, + uri = req_uri, + host = req_host, + scheme = req_scheme, + sni = sni, + headers = req_headers, + queries = req_queries, + } - return params + return self:matching(params) end function _M:exec(ctx) - local req_method = get_method() local req_uri = ctx and ctx.request_uri or var.request_uri local req_host = var.http_host - local sni = server_name() - local headers, headers_key - if not is_empty_field(self.header_fields) then - headers = get_http_params(get_headers, "headers", "lua_max_req_headers") + req_uri = strip_uri_args(req_uri) - headers["host"] = nil + -- cache key calculation - headers_key = get_headers_key(headers) + if not CACHE_PARAMS then + -- access `kong.configuration.log_level` here + CACHE_PARAMS = require("kong.tools.request_aware_table").new() end - local queries, queries_key - if not is_empty_field(self.query_fields) then - queries = get_http_params(get_uri_args, "queries", "lua_max_uri_args") + CACHE_PARAMS:clear() - queries_key = get_queries_key(queries) - end + CACHE_PARAMS.uri = req_uri + CACHE_PARAMS.host = req_host - req_uri = strip_uri_args(req_uri) + local cache_key = get_cache_key(self.fields, CACHE_PARAMS) -- cache lookup - local cache_key = (req_method or "") .. "|" .. - (req_uri or "") .. "|" .. - (req_host or "") .. "|" .. - (sni or "") .. "|" .. - (headers_key or "") .. "|" .. - (queries_key or "") - local match_t = self.cache:get(cache_key) if not match_t then if self.cache_neg:get(cache_key) then @@ -682,12 +508,10 @@ function _M:exec(ctx) return nil end - local req_scheme = ctx and ctx.scheme or var.scheme + CACHE_PARAMS.scheme = ctx and ctx.scheme or var.scheme local err - match_t, err = self:select(req_method, req_uri, req_host, req_scheme, - nil, nil, nil, nil, - sni, headers, queries) + match_t, err = self:matching(CACHE_PARAMS) if not match_t then if err then ngx_log(ngx_ERR, "router returned an error: ", err, @@ -702,6 +526,11 @@ function _M:exec(ctx) else route_match_stat(ctx, "pos") + + -- preserve_host header logic, modify cache result + if match_t.route.preserve_host then + match_t.upstream_host = req_host + end end -- found a match @@ -714,46 +543,19 @@ end else -- is stream subsystem -function _M:select(_, _, _, scheme, - src_ip, src_port, - dst_ip, dst_port, - sni) - - check_select_params(nil, nil, nil, scheme, - src_ip, src_port, - dst_ip, dst_port, - sni) - - local c = context.new(self.schema) - - for _, field in ipairs(self.fields) do - if field == "net.protocol" then - assert(c:add_value(field, scheme)) - - elseif field == "tls.sni" then - local res, err = c:add_value(field, sni) - if not res then - return nil, err - end - - elseif field == "net.src.ip" then - assert(c:add_value(field, src_ip)) - - elseif field == "net.src.port" then - assert(c:add_value(field, src_port)) - - elseif field == "net.dst.ip" then - assert(c:add_value(field, dst_ip)) - elseif field == "net.dst.port" then - assert(c:add_value(field, dst_port)) +function _M:matching(params) + local sni = params.sni - else -- unknown field - error("unknown router matching schema field: " .. field) - - end -- if field + check_select_params(nil, nil, nil, params.scheme, + params.src_ip, params.src_port, + params.dst_ip, params.dst_port, + sni) - end -- for self.fields + local c, err = get_atc_context(self.schema, self.fields, params) + if not c then + return nil, err + end local matched = self.router:execute(c) if not matched then @@ -783,41 +585,38 @@ function _M:select(_, _, _, scheme, end -function _M:exec(ctx) - local src_ip = var.remote_addr - local dst_ip = var.server_addr +-- only for unit-testing +function _M:select(_, _, _, scheme, + src_ip, src_port, + dst_ip, dst_port, + sni) - local src_port = tonumber(var.remote_port, 10) - local dst_port = tonumber((ctx or ngx.ctx).host_port, 10) or - tonumber(var.server_port, 10) + local params = { + scheme = scheme, + src_ip = src_ip, + src_port = src_port, + dst_ip = dst_ip, + dst_port = dst_port, + sni = sni, + } - -- error value for non-TLS connections ignored intentionally - local sni = server_name() + return self:matching(params) +end - -- fallback to preread SNI if current connection doesn't terminate TLS - if not sni then - sni = var.ssl_preread_server_name - end - local scheme - if var.protocol == "UDP" then - scheme = "udp" - else - scheme = sni and "tls" or "tcp" - end +function _M:exec(ctx) + -- cache key calculation - -- when proxying TLS request in second layer or doing TLS passthrough - -- rewrite the dst_ip, port back to what specified in proxy_protocol - if var.kong_tls_passthrough_block == "1" or var.ssl_protocol then - dst_ip = var.proxy_protocol_server_addr - dst_port = tonumber(var.proxy_protocol_server_port) + if not CACHE_PARAMS then + -- access `kong.configuration.log_level` here + CACHE_PARAMS = require("kong.tools.request_aware_table").new() end - local cache_key = (src_ip or "") .. "|" .. - (src_port or "") .. "|" .. - (dst_ip or "") .. "|" .. - (dst_port or "") .. "|" .. - (sni or "") + CACHE_PARAMS:clear() + + local cache_key = get_cache_key(self.fields, CACHE_PARAMS, ctx) + + -- cache lookup local match_t = self.cache:get(cache_key) if not match_t then @@ -826,11 +625,18 @@ function _M:exec(ctx) return nil end + local scheme + if var.protocol == "UDP" then + scheme = "udp" + + else + scheme = CACHE_PARAMS.sni and "tls" or "tcp" + end + + CACHE_PARAMS.scheme = scheme + local err - match_t, err = self:select(nil, nil, nil, scheme, - src_ip, src_port, - dst_ip, dst_port, - sni) + match_t, err = self:matching(CACHE_PARAMS) if not match_t then if err then ngx_log(ngx_ERR, "router returned an error: ", err) @@ -869,19 +675,8 @@ function _M._set_ngx(mock_ngx) ngx_log = mock_ngx.log end - if type(mock_ngx.req) == "table" then - if mock_ngx.req.get_method then - get_method = mock_ngx.req.get_method - end - - if mock_ngx.req.get_headers then - get_headers = mock_ngx.req.get_headers - end - - if mock_ngx.req.get_uri_args then - get_uri_args = mock_ngx.req.get_uri_args - end - end + -- unit testing + fields._set_ngx(mock_ngx) end diff --git a/kong/router/fields.lua b/kong/router/fields.lua new file mode 100644 index 000000000000..11e2a09fe959 --- /dev/null +++ b/kong/router/fields.lua @@ -0,0 +1,360 @@ +local buffer = require("string.buffer") +local context = require("resty.router.context") + + +local type = type +local ipairs = ipairs +local assert = assert +local tb_sort = table.sort +local tb_concat = table.concat +local replace_dashes_lower = require("kong.tools.string").replace_dashes_lower + + +local var = ngx.var +local get_method = ngx.req.get_method +local get_headers = ngx.req.get_headers +local get_uri_args = ngx.req.get_uri_args +local server_name = require("ngx.ssl").server_name + + +local PREFIX_LEN = 13 -- #"http.headers." +local HTTP_HEADERS_PREFIX = "http.headers." +local HTTP_QUERIES_PREFIX = "http.queries." + + +local FIELDS_FUNCS = { + -- http.* + + ["http.method"] = + function(params) + if not params.method then + params.method = get_method() + end + + return params.method + end, + + ["http.path"] = + function(params) + return params.uri + end, + + ["http.host"] = + function(params) + return params.host + end, + + -- net.* + + ["net.src.ip"] = + function(params) + if not params.src_ip then + params.src_ip = var.remote_addr + end + + return params.src_ip + end, + + ["net.src.port"] = + function(params) + if not params.src_port then + params.src_port = tonumber(var.remote_port, 10) + end + + return params.src_port + end, + + -- below are atc context only + + ["net.protocol"] = + function(params) + return params.scheme + end, + + ["net.port"] = + function(params) + return params.port + end, +} + + +local is_http = ngx.config.subsystem == "http" + + +if is_http then + -- tls.* + + FIELDS_FUNCS["tls.sni"] = + function(params) + if not params.sni then + params.sni = server_name() + end + + return params.sni + end + + -- net.* + + FIELDS_FUNCS["net.dst.ip"] = + function(params) + if not params.dst_ip then + params.dst_ip = var.server_addr + end + + return params.dst_ip + end + + FIELDS_FUNCS["net.dst.port"] = + function(params, ctx) + if not params.dst_port then + params.dst_port = tonumber((ctx or ngx.ctx).host_port, 10) or + tonumber(var.server_port, 10) + end + + return params.dst_port + end + +else -- stream + + -- tls.* + -- error value for non-TLS connections ignored intentionally + -- fallback to preread SNI if current connection doesn't terminate TLS + + FIELDS_FUNCS["tls.sni"] = + function(params) + if not params.sni then + params.sni = server_name() or var.ssl_preread_server_name + end + + return params.sni + end + + -- net.* + -- when proxying TLS request in second layer or doing TLS passthrough + -- rewrite the dst_ip, port back to what specified in proxy_protocol + + FIELDS_FUNCS["net.dst.ip"] = + function(params) + if not params.dst_ip then + if var.kong_tls_passthrough_block == "1" or var.ssl_protocol then + params.dst_ip = var.proxy_protocol_server_addr + + else + params.dst_ip = var.server_addr + end + end + + return params.dst_ip + end + + FIELDS_FUNCS["net.dst.port"] = + function(params, ctx) + if not params.dst_port then + if var.kong_tls_passthrough_block == "1" or var.ssl_protocol then + params.dst_port = tonumber(var.proxy_protocol_server_port) + + else + params.dst_port = tonumber((ctx or ngx.ctx).host_port, 10) or + tonumber(var.server_port, 10) + end + end + + return params.dst_port + end + +end -- is_http + + +if is_http then + + local fmt = string.format + + -- func => get_headers or get_uri_args + -- name => "headers" or "queries" + -- max_config_option => "lua_max_req_headers" or "lua_max_uri_args" + local function get_http_params(func, name, max_config_option) + local params, err = func() + if err == "truncated" then + local max = kong and kong.configuration and kong.configuration[max_config_option] or 100 + ngx.log(ngx.ERR, + fmt("router: not all request %s were read in order to determine the route " .. + "as the request contains more than %d %s, " .. + "route selection may be inaccurate, " .. + "consider increasing the '%s' configuration value " .. + "(currently at %d)", + name, max, name, max_config_option, max)) + end + + return params + end + + + setmetatable(FIELDS_FUNCS, { + __index = function(_, field) + local prefix = field:sub(1, PREFIX_LEN) + + if prefix == HTTP_HEADERS_PREFIX then + return function(params) + if not params.headers then + params.headers = get_http_params(get_headers, "headers", "lua_max_req_headers") + end + + return params.headers[field:sub(PREFIX_LEN + 1)] + end + + elseif prefix == HTTP_QUERIES_PREFIX then + return function(params) + if not params.queries then + params.queries = get_http_params(get_uri_args, "queries", "lua_max_uri_args") + end + + return params.queries[field:sub(PREFIX_LEN + 1)] + end + end + + -- others return nil + end + }) + +end -- is_http + + +local function fields_visitor(fields, params, ctx, cb) + for _, field in ipairs(fields) do + local func = FIELDS_FUNCS[field] + + if not func then -- unknown field + error("unknown router matching schema field: " .. field) + end -- if func + + local value = func(params, ctx) + + local res, err = cb(field, value) + if not res then + return nil, err + end + end -- for fields + + return true +end + + +-- cache key string +local str_buf = buffer.new(64) + + +local function get_cache_key(fields, params, ctx) + str_buf:reset() + + local res = + fields_visitor(fields, params, ctx, function(field, value) + + -- these fields were not in cache key + if field == "net.protocol" or field == "net.port" then + return true + end + + local headers_or_queries = field:sub(1, PREFIX_LEN) + + if headers_or_queries == HTTP_HEADERS_PREFIX then + headers_or_queries = true + field = replace_dashes_lower(field) + + elseif headers_or_queries == HTTP_QUERIES_PREFIX then + headers_or_queries = true + + else + headers_or_queries = false + end + + if not headers_or_queries then + str_buf:put(value or ""):put("|") + + else -- headers or queries + if type(value) == "table" then + tb_sort(value) + value = tb_concat(value, ",") + end + + str_buf:putf("%s=%s|", field, value or "") + end + + return true + end) -- fields_visitor + + assert(res) + + return str_buf:get() +end + + +local function get_atc_context(schema, fields, params) + local c = context.new(schema) + + local res, err = + fields_visitor(fields, params, nil, function(field, value) + + local prefix = field:sub(1, PREFIX_LEN) + + if prefix == HTTP_HEADERS_PREFIX or prefix == HTTP_QUERIES_PREFIX then + local v_type = type(value) + + -- multiple values for a single query parameter, like /?foo=bar&foo=baz + if v_type == "table" then + for _, v in ipairs(value) do + local res, err = c:add_value(field, v) + if not res then + return nil, err + end + end + + return true + end -- if v_type + + -- the query parameter has only one value, like /?foo=bar + -- the query parameter has no value, like /?foo, + -- get_uri_arg will get a boolean `true` + -- we think it is equivalent to /?foo= + if v_type == "boolean" then + value = "" + end + end + + return c:add_value(field, value) + end) -- fields_visitor + + if not res then + return nil, err + end + + return c +end + + +local function _set_ngx(mock_ngx) + if mock_ngx.var then + var = mock_ngx.var + end + + if type(mock_ngx.req) == "table" then + if mock_ngx.req.get_method then + get_method = mock_ngx.req.get_method + end + + if mock_ngx.req.get_headers then + get_headers = mock_ngx.req.get_headers + end + + if mock_ngx.req.get_uri_args then + get_uri_args = mock_ngx.req.get_uri_args + end + end +end + + +return { + get_cache_key = get_cache_key, + get_atc_context = get_atc_context, + + _set_ngx = _set_ngx, +} diff --git a/spec/01-unit/08-router_spec.lua b/spec/01-unit/08-router_spec.lua index fa7af30c1a33..dc1247b31fff 100644 --- a/spec/01-unit/08-router_spec.lua +++ b/spec/01-unit/08-router_spec.lua @@ -12,6 +12,7 @@ local function reload_router(flavor, subsystem) ngx.config.subsystem = subsystem or "http" -- luacheck: ignore + package.loaded["kong.router.fields"] = nil package.loaded["kong.router.atc"] = nil package.loaded["kong.router.compat"] = nil package.loaded["kong.router.expressions"] = nil @@ -367,6 +368,10 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" }, } router = assert(new_router(use_case)) + + -- let atc-router happy + local _ngx = mock_ngx("GET", "/", { a = "1" }) + router._set_ngx(_ngx) end) @@ -2745,6 +2750,11 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" it("matches correct route", function() local router = assert(new_router(use_case)) + + -- let atc-router happy + local _ngx = mock_ngx("GET", "/", { a = "1" }) + router._set_ngx(_ngx) + local match_t = router:select("GET", "/my-target-uri", "domain.org") assert.truthy(match_t) assert.same(use_case[#use_case].route, match_t.route) @@ -4338,6 +4348,10 @@ for _, flavor in ipairs({ "traditional", "traditional_compatible", "expressions" } router = assert(new_router(use_case)) + + -- let atc-router happy + local _ngx = mock_ngx("GET", "/", { a = "1" }) + router._set_ngx(_ngx) end) it("[src_ip]", function() diff --git a/spec/02-integration/05-proxy/02-router_spec.lua b/spec/02-integration/05-proxy/02-router_spec.lua index 74d4f491bee3..855e64ebfe9d 100644 --- a/spec/02-integration/05-proxy/02-router_spec.lua +++ b/spec/02-integration/05-proxy/02-router_spec.lua @@ -877,7 +877,7 @@ for _, strategy in helpers.each_strategy() do describe("URI arguments (querystring)", function() local routes - lazy_setup(function() + before_each(function() routes = insert_routes(bp, { { hosts = { "mock_upstream" }, @@ -885,7 +885,7 @@ for _, strategy in helpers.each_strategy() do }) end) - lazy_teardown(function() + after_each(function() remove_routes(strategy, routes) end) @@ -1343,7 +1343,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/201", headers = { ["kong-debug"] = 1 }, }) - assert.res_status(201, res) + assert.res_status(flavor == "traditional" and 201 or 200, res) assert.equal("service_behind_www.example.org", res.headers["kong-service-name"]) @@ -1365,7 +1365,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/201", headers = { ["kong-debug"] = 1 }, }) - assert.res_status(201, res) + assert.res_status(flavor == "traditional" and 201 or 200, res) assert.equal("service_behind_example.org", res.headers["kong-service-name"]) end) diff --git a/spec/02-integration/05-proxy/19-grpc_proxy_spec.lua b/spec/02-integration/05-proxy/19-grpc_proxy_spec.lua index 2add432ae46e..2d524b085d1a 100644 --- a/spec/02-integration/05-proxy/19-grpc_proxy_spec.lua +++ b/spec/02-integration/05-proxy/19-grpc_proxy_spec.lua @@ -402,14 +402,8 @@ for _, strategy in helpers.each_strategy() do }) assert.falsy(ok) - if flavor == "expressions" then - assert.matches("Code: NotFound", resp, nil, true) - assert.matches("Message: NotFound", resp, nil, true) - - else - assert.matches("Code: Canceled", resp, nil, true) - assert.matches("Message: gRPC request matched gRPCs route", resp, nil, true) - end + assert.matches("Code: Canceled", resp, nil, true) + assert.matches("Message: gRPC request matched gRPCs route", resp, nil, true) end) end) end) From 3641e6b9e4040ba2a8978a50678bed9e2a39318f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 17:42:15 +0800 Subject: [PATCH 228/249] chore(deps): bump tj-actions/changed-files from 40.2.2 to 41.0.1 (#12247) * chore(deps): bump tj-actions/changed-files from 40.2.2 to 41.0.1 Bumps [tj-actions/changed-files](https://github.com/tj-actions/changed-files) from 40.2.2 to 41.0.1. - [Release notes](https://github.com/tj-actions/changed-files/releases) - [Changelog](https://github.com/tj-actions/changed-files/blob/main/HISTORY.md) - [Commits](https://github.com/tj-actions/changed-files/compare/94549999469dbfa032becf298d95c87a14c34394...716b1e13042866565e00e85fd4ec490e186c4a2f) --- updated-dependencies: - dependency-name: tj-actions/changed-files dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Datong Sun --- .github/workflows/changelog-requirement.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/changelog-requirement.yml b/.github/workflows/changelog-requirement.yml index 9169a9317557..65402ef3f7d5 100644 --- a/.github/workflows/changelog-requirement.yml +++ b/.github/workflows/changelog-requirement.yml @@ -21,7 +21,7 @@ jobs: - name: Find changelog files id: changelog-list - uses: tj-actions/changed-files@94549999469dbfa032becf298d95c87a14c34394 # v37 + uses: tj-actions/changed-files@716b1e13042866565e00e85fd4ec490e186c4a2f # 41.0.1 with: files_yaml: | changelogs: From 45ff701b1c92b0b5d463d8a907385886e36b6953 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 10 Dec 2023 00:08:57 +0000 Subject: [PATCH 229/249] chore(deps): bump ngx_wasm_module to b9037acf7fa2d6f9ff02898bfc05544a1edc1fad Changes since 388d5720293f5091ccee1f859a42683fbfd14e7d: * b9037ac - chore(release) ensure release artifacts names include channel --- .requirements | 2 +- changelog/unreleased/kong/bump-ngx-wasm-module.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.requirements b/.requirements index 618696da509c..8ac77a2cae12 100644 --- a/.requirements +++ b/.requirements @@ -13,7 +13,7 @@ LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 ATC_ROUTER=95f1d8fe10b244bba5b354e5ed3726442373325e # 1.4.0 KONG_MANAGER=nightly -NGX_WASM_MODULE=388d5720293f5091ccee1f859a42683fbfd14e7d # prerelease-0.2.0 +NGX_WASM_MODULE=b9037acf7fa2d6f9ff02898bfc05544a1edc1fad WASMER=3.1.1 WASMTIME=14.0.3 V8=10.5.18 diff --git a/changelog/unreleased/kong/bump-ngx-wasm-module.yml b/changelog/unreleased/kong/bump-ngx-wasm-module.yml index 64ce68434fcf..7af8fa13751a 100644 --- a/changelog/unreleased/kong/bump-ngx-wasm-module.yml +++ b/changelog/unreleased/kong/bump-ngx-wasm-module.yml @@ -1,2 +1,2 @@ -message: "Bump `ngx_wasm_module` to `388d5720293f5091ccee1f859a42683fbfd14e7d`" +message: "Bump `ngx_wasm_module` to `b9037acf7fa2d6f9ff02898bfc05544a1edc1fad`" type: dependency From fac884ed7678c8ed53fe2bada9fe57bdc6f27833 Mon Sep 17 00:00:00 2001 From: Jitendra Kumar <76531339+jitendragangwar123@users.noreply.github.com> Date: Thu, 28 Dec 2023 09:35:35 +0530 Subject: [PATCH 230/249] docs(DEVELOPER): fix typo (#12141) --- DEVELOPER.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DEVELOPER.md b/DEVELOPER.md index 99b866d49425..c30ebd17da59 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -148,7 +148,7 @@ You can follow [Managing your personal access token](https://docs.github.com/en/ Finally, we start the build process: ``` -# Build the virutual environment for developing Kong +# Build the virtual environment for developing Kong make build-venv ``` From 2346201dc9eedcd366f08d594c08f12150dfa77f Mon Sep 17 00:00:00 2001 From: Datong Sun Date: Thu, 28 Dec 2023 11:58:51 +0800 Subject: [PATCH 231/249] chore(actions): remove "do not merge" label check since it has been removed in favor of Draft PR --- .github/workflows/label-check.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/label-check.yml b/.github/workflows/label-check.yml index bfa8b67a7981..4b194e881254 100644 --- a/.github/workflows/label-check.yml +++ b/.github/workflows/label-check.yml @@ -8,9 +8,6 @@ jobs: runs-on: ubuntu-latest steps: - - name: do-not-merge label found - run: echo "do-not-merge label found, this PR will not be merged"; exit 1 - if: ${{ contains(github.event.*.labels.*.name, 'pr/do not merge') || contains(github.event.*.labels.*.name, 'DO NOT MERGE') }} - name: backport master label found run: echo "Please do not backport into master, instead, create a PR targeting master and backport from it instead."; exit 1 if: ${{ contains(github.event.*.labels.*.name, 'backport master') }} From 34dfa8121aa4165ec92bd197a1cbd0a2cb3724eb Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Thu, 28 Dec 2023 16:05:47 +0800 Subject: [PATCH 232/249] chore(deps): bump lua-resty-openssl to 1.2.0 (#12265) --- changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml | 3 --- changelog/unreleased/kong/bump-resty-openssl.yml | 3 +++ kong-3.6.0-0.rockspec | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) delete mode 100644 changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml create mode 100644 changelog/unreleased/kong/bump-resty-openssl.yml diff --git a/changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml b/changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml deleted file mode 100644 index 05ba386d7076..000000000000 --- a/changelog/unreleased/kong/bump-resty-openssl-1.0.2.yml +++ /dev/null @@ -1,3 +0,0 @@ -message: Bump resty-openssl from 0.8.25 to 1.0.2 -type: dependency -scope: Core diff --git a/changelog/unreleased/kong/bump-resty-openssl.yml b/changelog/unreleased/kong/bump-resty-openssl.yml new file mode 100644 index 000000000000..4d682ab6735d --- /dev/null +++ b/changelog/unreleased/kong/bump-resty-openssl.yml @@ -0,0 +1,3 @@ +message: Bump resty-openssl from 0.8.25 to 1.2.0 +type: dependency +scope: Core diff --git a/kong-3.6.0-0.rockspec b/kong-3.6.0-0.rockspec index 127ec878673c..3b0e10e449db 100644 --- a/kong-3.6.0-0.rockspec +++ b/kong-3.6.0-0.rockspec @@ -34,7 +34,7 @@ dependencies = { "lua-resty-healthcheck == 3.0.1", "lua-messagepack == 0.5.4", "lua-resty-aws == 1.3.5", - "lua-resty-openssl == 1.0.2", + "lua-resty-openssl == 1.2.0", "lua-resty-counter == 0.2.1", "lua-resty-ipmatcher == 0.6.1", "lua-resty-acme == 0.12.0", From 8c2b5a4e7a35da88bbbd5b78507b8a292597d420 Mon Sep 17 00:00:00 2001 From: Chrono Date: Thu, 28 Dec 2023 16:49:25 +0800 Subject: [PATCH 233/249] perf(router): reuse ATC context in router match instead of creating a new context (#12258) To avoid frequent memory allocation/deallocations. KAG-3448 --- .requirements | 2 +- changelog/unreleased/kong/atc_reuse_context.yml | 3 +++ changelog/unreleased/kong/bump-atc-router.yml | 2 +- kong/router/atc.lua | 13 +++++++++---- kong/router/fields.lua | 7 +++---- 5 files changed, 17 insertions(+), 10 deletions(-) create mode 100644 changelog/unreleased/kong/atc_reuse_context.yml diff --git a/.requirements b/.requirements index 8ac77a2cae12..d834d859bd97 100644 --- a/.requirements +++ b/.requirements @@ -10,7 +10,7 @@ LUA_KONG_NGINX_MODULE=4fbc3ddc7dcbc706ed286b95344f3cb6da17e637 # 0.8.0 LUA_RESTY_LMDB=19a6da0616db43baf8197dace59e64244430b3c4 # 1.4.1 LUA_RESTY_EVENTS=8448a92cec36ac04ea522e78f6496ba03c9b1fd8 # 0.2.0 LUA_RESTY_WEBSOCKET=60eafc3d7153bceb16e6327074e0afc3d94b1316 # 0.4.0 -ATC_ROUTER=95f1d8fe10b244bba5b354e5ed3726442373325e # 1.4.0 +ATC_ROUTER=ac71b24ea5556b38b0f9903850ed666c36ad7843 # 1.4.1 KONG_MANAGER=nightly NGX_WASM_MODULE=b9037acf7fa2d6f9ff02898bfc05544a1edc1fad diff --git a/changelog/unreleased/kong/atc_reuse_context.yml b/changelog/unreleased/kong/atc_reuse_context.yml new file mode 100644 index 000000000000..3af76d0a2d72 --- /dev/null +++ b/changelog/unreleased/kong/atc_reuse_context.yml @@ -0,0 +1,3 @@ +message: "Reuse match copntext between requests to avoid frequent memory allocation/deallocation" +type: performance +scope: Core diff --git a/changelog/unreleased/kong/bump-atc-router.yml b/changelog/unreleased/kong/bump-atc-router.yml index 1696ebc9d3f3..2013fd9dda69 100644 --- a/changelog/unreleased/kong/bump-atc-router.yml +++ b/changelog/unreleased/kong/bump-atc-router.yml @@ -1,3 +1,3 @@ -message: Bumped atc-router from 1.2.0 to 1.4.0 +message: Bumped atc-router from 1.2.0 to 1.4.1 type: dependency scope: Core diff --git a/kong/router/atc.lua b/kong/router/atc.lua index 6d2d32afed85..f05053f8eb0b 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -4,6 +4,7 @@ local _MT = { __index = _M, } local buffer = require("string.buffer") local schema = require("resty.router.schema") +local context = require("resty.router.context") local router = require("resty.router.router") local lrucache = require("resty.lrucache") local tb_new = require("table.new") @@ -35,7 +36,7 @@ local check_select_params = utils.check_select_params local get_service_info = utils.get_service_info local route_match_stat = utils.route_match_stat local get_cache_key = fields.get_cache_key -local get_atc_context = fields.get_atc_context +local fill_atc_context = fields.fill_atc_context local DEFAULT_MATCH_LRUCACHE_SIZE = utils.DEFAULT_MATCH_LRUCACHE_SIZE @@ -223,7 +224,7 @@ local function new_from_scratch(routes, get_exp_and_priority) local fields = inst:get_fields() return setmetatable({ - schema = CACHED_SCHEMA, + context = context.new(CACHED_SCHEMA), router = inst, routes = routes_t, services = services_t, @@ -412,7 +413,9 @@ function _M:matching(params) params.host = host params.port = port - local c, err = get_atc_context(self.schema, self.fields, params) + self.context:reset() + + local c, err = fill_atc_context(self.context, self.fields, params) if not c then return nil, err @@ -552,7 +555,9 @@ function _M:matching(params) params.dst_ip, params.dst_port, sni) - local c, err = get_atc_context(self.schema, self.fields, params) + self.context:reset() + + local c, err = fill_atc_context(self.context, self.fields, params) if not c then return nil, err end diff --git a/kong/router/fields.lua b/kong/router/fields.lua index 11e2a09fe959..a33b27c8fcd5 100644 --- a/kong/router/fields.lua +++ b/kong/router/fields.lua @@ -1,5 +1,4 @@ local buffer = require("string.buffer") -local context = require("resty.router.context") local type = type @@ -288,8 +287,8 @@ local function get_cache_key(fields, params, ctx) end -local function get_atc_context(schema, fields, params) - local c = context.new(schema) +local function fill_atc_context(context, fields, params) + local c = context local res, err = fields_visitor(fields, params, nil, function(field, value) @@ -354,7 +353,7 @@ end return { get_cache_key = get_cache_key, - get_atc_context = get_atc_context, + fill_atc_context = fill_atc_context, _set_ngx = _set_ngx, } From 86f0a6cfcbd477c0a51243c330bdd47a2abff2fc Mon Sep 17 00:00:00 2001 From: Chrono Date: Fri, 29 Dec 2023 14:57:53 +0800 Subject: [PATCH 234/249] fix(router): add missing `preserve_host` logic in stream subsystem (#12261) KAG-3032 --- kong/router/atc.lua | 5 +++++ kong/router/fields.lua | 21 ++++++++++++------- .../01-helpers/01-helpers_spec.lua | 1 + .../05-proxy/02-router_spec.lua | 10 +++++---- .../05-proxy/03-upstream_headers_spec.lua | 1 + .../05-proxy/14-server_tokens_spec.lua | 1 + spec/03-plugins/07-loggly/01-log_spec.lua | 1 + .../25-oauth2/04-invalidations_spec.lua | 1 + .../31-proxy-cache/02-access_spec.lua | 1 + 9 files changed, 31 insertions(+), 11 deletions(-) diff --git a/kong/router/atc.lua b/kong/router/atc.lua index f05053f8eb0b..16caac44f559 100644 --- a/kong/router/atc.lua +++ b/kong/router/atc.lua @@ -655,6 +655,11 @@ function _M:exec(ctx) else route_match_stat(ctx, "pos") + + -- preserve_host logic, modify cache result + if match_t.route.preserve_host then + match_t.upstream_host = fields.get_value("tls.sni", CACHE_PARAMS) + end end return match_t diff --git a/kong/router/fields.lua b/kong/router/fields.lua index a33b27c8fcd5..59d4cee86ec4 100644 --- a/kong/router/fields.lua +++ b/kong/router/fields.lua @@ -218,15 +218,20 @@ if is_http then end -- is_http -local function fields_visitor(fields, params, ctx, cb) - for _, field in ipairs(fields) do - local func = FIELDS_FUNCS[field] +local function get_value(field, params, ctx) + local func = FIELDS_FUNCS[field] + + if not func then -- unknown field + error("unknown router matching schema field: " .. field) + end -- if func + + return func(params, ctx) +end - if not func then -- unknown field - error("unknown router matching schema field: " .. field) - end -- if func - local value = func(params, ctx) +local function fields_visitor(fields, params, ctx, cb) + for _, field in ipairs(fields) do + local value = get_value(field, params, ctx) local res, err = cb(field, value) if not res then @@ -352,6 +357,8 @@ end return { + get_value = get_value, + get_cache_key = get_cache_key, fill_atc_context = fill_atc_context, diff --git a/spec/02-integration/01-helpers/01-helpers_spec.lua b/spec/02-integration/01-helpers/01-helpers_spec.lua index fa00dbd313aa..c4e383ffd236 100644 --- a/spec/02-integration/01-helpers/01-helpers_spec.lua +++ b/spec/02-integration/01-helpers/01-helpers_spec.lua @@ -26,6 +26,7 @@ for _, strategy in helpers.each_strategy() do bp.routes:insert { hosts = { "mock_upstream" }, protocols = { "http" }, + paths = { "/" }, service = service } diff --git a/spec/02-integration/05-proxy/02-router_spec.lua b/spec/02-integration/05-proxy/02-router_spec.lua index 855e64ebfe9d..26ba41a46176 100644 --- a/spec/02-integration/05-proxy/02-router_spec.lua +++ b/spec/02-integration/05-proxy/02-router_spec.lua @@ -877,15 +877,16 @@ for _, strategy in helpers.each_strategy() do describe("URI arguments (querystring)", function() local routes - before_each(function() + lazy_setup(function() routes = insert_routes(bp, { { hosts = { "mock_upstream" }, + paths = { "/" }, }, }) end) - after_each(function() + lazy_teardown(function() remove_routes(strategy, routes) end) @@ -1301,6 +1302,7 @@ for _, strategy in helpers.each_strategy() do routes = insert_routes(bp, { { protocols = { "https" }, + paths = { "/" }, snis = { "www.example.org" }, service = { name = "service_behind_www.example.org" @@ -1343,7 +1345,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/201", headers = { ["kong-debug"] = 1 }, }) - assert.res_status(flavor == "traditional" and 201 or 200, res) + assert.res_status(201, res) assert.equal("service_behind_www.example.org", res.headers["kong-service-name"]) @@ -1365,7 +1367,7 @@ for _, strategy in helpers.each_strategy() do path = "/status/201", headers = { ["kong-debug"] = 1 }, }) - assert.res_status(flavor == "traditional" and 201 or 200, res) + assert.res_status(201, res) assert.equal("service_behind_example.org", res.headers["kong-service-name"]) end) diff --git a/spec/02-integration/05-proxy/03-upstream_headers_spec.lua b/spec/02-integration/05-proxy/03-upstream_headers_spec.lua index 3132d0a6bfd0..c78203d3b5f5 100644 --- a/spec/02-integration/05-proxy/03-upstream_headers_spec.lua +++ b/spec/02-integration/05-proxy/03-upstream_headers_spec.lua @@ -278,6 +278,7 @@ for _, strategy in helpers.each_strategy() do assert(bp.routes:insert { hosts = { "headers-charset.test" }, + paths = { "/" }, service = service, }) diff --git a/spec/02-integration/05-proxy/14-server_tokens_spec.lua b/spec/02-integration/05-proxy/14-server_tokens_spec.lua index 6cee745a1354..3de5077db9dd 100644 --- a/spec/02-integration/05-proxy/14-server_tokens_spec.lua +++ b/spec/02-integration/05-proxy/14-server_tokens_spec.lua @@ -291,6 +291,7 @@ describe("headers [#" .. strategy .. "]", function() return function() bp.routes:insert { hosts = { "headers-inspect.test" }, + paths = { "/" }, } local service = bp.services:insert({ diff --git a/spec/03-plugins/07-loggly/01-log_spec.lua b/spec/03-plugins/07-loggly/01-log_spec.lua index dd5e35a0199d..4987cbb1d9ab 100644 --- a/spec/03-plugins/07-loggly/01-log_spec.lua +++ b/spec/03-plugins/07-loggly/01-log_spec.lua @@ -19,6 +19,7 @@ for _, strategy in helpers.each_strategy() do local route1 = bp.routes:insert { hosts = { "logging.test" }, + paths = { "/" }, } local route2 = bp.routes:insert { diff --git a/spec/03-plugins/25-oauth2/04-invalidations_spec.lua b/spec/03-plugins/25-oauth2/04-invalidations_spec.lua index 90f7b25bf858..18218b6cfdb6 100644 --- a/spec/03-plugins/25-oauth2/04-invalidations_spec.lua +++ b/spec/03-plugins/25-oauth2/04-invalidations_spec.lua @@ -43,6 +43,7 @@ for _, strategy in helpers.each_strategy() do route = assert(admin_api.routes:insert { hosts = { "oauth2.com" }, protocols = { "http", "https" }, + paths = { "/" }, service = service, }) diff --git a/spec/03-plugins/31-proxy-cache/02-access_spec.lua b/spec/03-plugins/31-proxy-cache/02-access_spec.lua index aa8b350773d7..67e026d9e326 100644 --- a/spec/03-plugins/31-proxy-cache/02-access_spec.lua +++ b/spec/03-plugins/31-proxy-cache/02-access_spec.lua @@ -38,6 +38,7 @@ do local route1 = assert(bp.routes:insert { hosts = { "route-1.test" }, + paths = { "/" }, }) local route2 = assert(bp.routes:insert { hosts = { "route-2.test" }, From e804fd4b10a78df58c758831347cdc5006ff4b0f Mon Sep 17 00:00:00 2001 From: chronolaw Date: Fri, 29 Dec 2023 10:47:51 +0800 Subject: [PATCH 235/249] chore(actions): revert dynamic test scheduler (#12180) Due to false green observed on `master`. --- .ci/run_tests.sh | 154 +++++++++++ .ci/test_suites.json | 34 --- .github/workflows/build_and_test.yml | 240 +++++++++++------- .../update-test-runtime-statistics.yml | 35 --- spec/busted-ci-helper.lua | 59 ----- spec/busted-log-failed.lua | 33 +++ 6 files changed, 334 insertions(+), 221 deletions(-) create mode 100755 .ci/run_tests.sh delete mode 100644 .ci/test_suites.json delete mode 100644 .github/workflows/update-test-runtime-statistics.yml delete mode 100644 spec/busted-ci-helper.lua create mode 100644 spec/busted-log-failed.lua diff --git a/.ci/run_tests.sh b/.ci/run_tests.sh new file mode 100755 index 000000000000..447936f73ff6 --- /dev/null +++ b/.ci/run_tests.sh @@ -0,0 +1,154 @@ +#!/usr/bin/env bash +set -e + +function cyan() { + echo -e "\033[1;36m$*\033[0m" +} + +function red() { + echo -e "\033[1;31m$*\033[0m" +} + +function get_failed { + if [ ! -z "$FAILED_TEST_FILES_FILE" -a -f "$FAILED_TEST_FILES_FILE" ] + then + cat < $FAILED_TEST_FILES_FILE + else + echo "$@" + fi +} + +BUSTED_ARGS="--keep-going -o htest -v --exclude-tags=flaky,ipv6" +if [ ! -z "$FAILED_TEST_FILES_FILE" ] +then + BUSTED_ARGS="--helper=spec/busted-log-failed.lua $BUSTED_ARGS" +fi + +if [ "$KONG_TEST_DATABASE" == "postgres" ]; then + export TEST_CMD="bin/busted $BUSTED_ARGS,off" + + psql -v ON_ERROR_STOP=1 -h localhost --username "$KONG_TEST_PG_USER" <<-EOSQL + CREATE user ${KONG_TEST_PG_USER}_ro; + GRANT CONNECT ON DATABASE $KONG_TEST_PG_DATABASE TO ${KONG_TEST_PG_USER}_ro; + \c $KONG_TEST_PG_DATABASE; + GRANT USAGE ON SCHEMA public TO ${KONG_TEST_PG_USER}_ro; + ALTER DEFAULT PRIVILEGES FOR ROLE $KONG_TEST_PG_USER IN SCHEMA public GRANT SELECT ON TABLES TO ${KONG_TEST_PG_USER}_ro; +EOSQL + +elif [ "$KONG_TEST_DATABASE" == "cassandra" ]; then + echo "Cassandra is no longer supported" + exit 1 + +else + export TEST_CMD="bin/busted $BUSTED_ARGS,postgres,db" +fi + +if [ "$TEST_SUITE" == "integration" ]; then + if [[ "$TEST_SPLIT" == first* ]]; then + # GitHub Actions, run first batch of integration tests + files=$(ls -d spec/02-integration/* | sort | grep -v 05-proxy) + files=$(get_failed $files) + eval "$TEST_CMD" $files + + elif [[ "$TEST_SPLIT" == second* ]]; then + # GitHub Actions, run second batch of integration tests + # Note that the split here is chosen carefully to result + # in a similar run time between the two batches, and should + # be adjusted if imbalance become significant in the future + files=$(ls -d spec/02-integration/* | sort | grep 05-proxy) + files=$(get_failed $files) + eval "$TEST_CMD" $files + + else + # Non GitHub Actions + eval "$TEST_CMD" $(get_failed spec/02-integration/) + fi +fi + +if [ "$TEST_SUITE" == "dbless" ]; then + eval "$TEST_CMD" $(get_failed spec/02-integration/02-cmd \ + spec/02-integration/05-proxy \ + spec/02-integration/04-admin_api/02-kong_routes_spec.lua \ + spec/02-integration/04-admin_api/15-off_spec.lua \ + spec/02-integration/08-status_api/01-core_routes_spec.lua \ + spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua \ + spec/02-integration/11-dbless \ + spec/02-integration/20-wasm) +fi +if [ "$TEST_SUITE" == "plugins" ]; then + set +ex + rm -f .failed + + if [[ "$TEST_SPLIT" == first* ]]; then + # GitHub Actions, run first batch of plugin tests + PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | head -n22)) + + elif [[ "$TEST_SPLIT" == second* ]]; then + # GitHub Actions, run second batch of plugin tests + # Note that the split here is chosen carefully to result + # in a similar run time between the two batches, and should + # be adjusted if imbalance become significant in the future + PLUGINS=$(get_failed $(ls -d spec/03-plugins/* | tail -n+23)) + + else + # Non GitHub Actions + PLUGINS=$(get_failed $(ls -d spec/03-plugins/*)) + fi + + for p in $PLUGINS; do + echo + cyan "--------------------------------------" + cyan $(basename $p) + cyan "--------------------------------------" + echo + + $TEST_CMD $p || echo "* $p" >> .failed + done + + if [[ "$TEST_SPLIT" != first* ]]; then + cat kong-*.rockspec | grep kong- | grep -v zipkin | grep -v sidecar | grep "~" | grep -v kong-prometheus-plugin | while read line ; do + REPOSITORY=`echo $line | sed "s/\"/ /g" | awk -F" " '{print $1}'` + VERSION=`luarocks show $REPOSITORY | grep $REPOSITORY | head -1 | awk -F" " '{print $2}' | cut -f1 -d"-"` + REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-prometheus-plugin/kong-plugin-prometheus/g'` + REPOSITORY=`echo $REPOSITORY | sed -e 's/kong-proxy-cache-plugin/kong-plugin-proxy-cache/g'` + + echo + cyan "--------------------------------------" + cyan $REPOSITORY $VERSION + cyan "--------------------------------------" + echo + + git clone https://github.com/Kong/$REPOSITORY.git --branch $VERSION --single-branch /tmp/test-$REPOSITORY || \ + git clone https://github.com/Kong/$REPOSITORY.git --branch v$VERSION --single-branch /tmp/test-$REPOSITORY + sed -i 's/grpcbin:9000/localhost:15002/g' /tmp/test-$REPOSITORY/spec/*.lua + sed -i 's/grpcbin:9001/localhost:15003/g' /tmp/test-$REPOSITORY/spec/*.lua + cp -R /tmp/test-$REPOSITORY/spec/fixtures/* spec/fixtures/ || true + pushd /tmp/test-$REPOSITORY + luarocks make + popd + + $TEST_CMD /tmp/test-$REPOSITORY/spec/ || echo "* $REPOSITORY" >> .failed + + done + fi + + if [ -f .failed ]; then + echo + red "--------------------------------------" + red "Plugin tests failed:" + red "--------------------------------------" + cat .failed + exit 1 + else + exit 0 + fi +fi +if [ "$TEST_SUITE" == "pdk" ]; then + prove -I. -r t +fi +if [ "$TEST_SUITE" == "unit" ]; then + unset KONG_TEST_NGINX_USER KONG_PG_PASSWORD KONG_TEST_PG_PASSWORD + scripts/autodoc + bin/busted -v -o htest spec/01-unit + make lint +fi diff --git a/.ci/test_suites.json b/.ci/test_suites.json deleted file mode 100644 index eb6b15e5909e..000000000000 --- a/.ci/test_suites.json +++ /dev/null @@ -1,34 +0,0 @@ -[ - { - "name": "unit", - "exclude_tags": "flaky,ipv6", - "specs": ["spec/01-unit/"] - }, - { - "name": "integration", - "exclude_tags": "flaky,ipv6,off", - "environment": { - "KONG_TEST_DATABASE": "postgres" - }, - "specs": ["spec/02-integration/"] - }, - { - "name": "dbless", - "exclude_tags": "flaky,ipv6,postgres,db", - "specs": [ - "spec/02-integration/02-cmd/", - "spec/02-integration/05-proxy/", - "spec/02-integration/04-admin_api/02-kong_routes_spec.lua", - "spec/02-integration/04-admin_api/15-off_spec.lua", - "spec/02-integration/08-status_api/01-core_routes_spec.lua", - "spec/02-integration/08-status_api/03-readiness_endpoint_spec.lua", - "spec/02-integration/11-dbless/", - "spec/02-integration/20-wasm/" - ] - }, - { - "name": "plugins", - "exclude_tags": "flaky,ipv6", - "specs": ["spec/03-plugins/"] - } -] diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 0aee08aa20bb..e9c6675240ce 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -33,7 +33,6 @@ concurrency: env: BUILD_ROOT: ${{ github.workspace }}/bazel-bin/build KONG_TEST_COVERAGE: ${{ inputs.coverage == true || github.event_name == 'schedule' }} - RUNNER_COUNT: 7 jobs: build: @@ -41,11 +40,22 @@ jobs: with: relative-build-root: bazel-bin/build - lint-and-doc-tests: - name: Lint and Doc tests + lint-doc-and-unit-tests: + name: Lint, Doc and Unit tests runs-on: ubuntu-22.04 needs: build + services: + postgres: + image: postgres:13 + env: + POSTGRES_USER: kong + POSTGRES_DB: kong + POSTGRES_HOST_AUTH_METHOD: trust + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 8 + steps: - name: Checkout Kong source code uses: actions/checkout@v4 @@ -83,56 +93,41 @@ jobs: - name: Check labeler configuration run: scripts/check-labeler.pl .github/labeler.yml - schedule: - name: Schedule busted tests to run - runs-on: ubuntu-22.04 - needs: build - - env: - WORKFLOW_ID: ${{ github.run_id }} - - outputs: - runners: ${{ steps.generate-runner-array.outputs.RUNNERS }} - - steps: - - name: Checkout source code - uses: actions/checkout@v4 - - - name: Download runtimes file - uses: Kong/gh-storage/download@v1 - with: - repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json - - - name: Schedule tests - uses: Kong/gateway-test-scheduler/schedule@b91bd7aec42bd13748652929f087be81d1d40843 # v1 - with: - test-suites-file: .ci/test_suites.json - test-file-runtime-file: .ci/runtimes.json - output-prefix: test-chunk. - runner-count: ${{ env.RUNNER_COUNT }} + - name: Unit tests + env: + KONG_TEST_PG_DATABASE: kong + KONG_TEST_PG_USER: kong + run: | + source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh + TEST_CMD="bin/busted -v -o htest spec/01-unit" + if [[ $KONG_TEST_COVERAGE = true ]]; then + TEST_CMD="$TEST_CMD --coverage" + fi + $TEST_CMD - - name: Upload schedule files + - name: Archive coverage stats file uses: actions/upload-artifact@v4 - continue-on-error: true + if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: - name: schedule-test-files - path: test-chunk.* - retention-days: 7 + name: luacov-stats-out-${{ github.job }}-${{ github.run_id }} + retention-days: 1 + path: | + luacov.stats.out - - name: Generate runner array - id: generate-runner-array + - name: Get kernel message + if: failure() run: | - echo "RUNNERS=[$(echo $(seq 1 $(( $RUNNER_COUNT ))))]" | sed -e 's/ /, /g' >> $GITHUB_OUTPUT + sudo dmesg -T - busted-tests: - name: Busted test runner ${{ matrix.runner }} + integration-tests-postgres: + name: Postgres ${{ matrix.suite }} - ${{ matrix.split }} tests runs-on: ubuntu-22.04 - needs: [build,schedule] - + needs: build strategy: fail-fast: false matrix: - runner: ${{ fromJSON(needs.schedule.outputs.runners) }} + suite: [integration, plugins] + split: [first, second] services: postgres: @@ -184,6 +179,7 @@ jobs: echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts - name: Enable SSL for Redis + if: ${{ matrix.suite == 'plugins' }} run: | docker cp ${{ github.workspace }} kong_redis:/workspace docker cp ${{ github.workspace }}/spec/fixtures/redis/docker-entrypoint.sh kong_redis:/usr/local/bin/docker-entrypoint.sh @@ -206,53 +202,47 @@ jobs: docker logs opentelemetry-collector - name: Install AWS SAM cli tool + if: ${{ matrix.suite == 'plugins' }} run: | curl -L -s -o /tmp/aws-sam-cli.zip https://github.com/aws/aws-sam-cli/releases/latest/download/aws-sam-cli-linux-x86_64.zip unzip -o /tmp/aws-sam-cli.zip -d /tmp/aws-sam-cli sudo /tmp/aws-sam-cli/install --update - - name: Create kong_ro user in Postgres + - name: Update PATH + run: | + echo "$BUILD_ROOT/kong-dev/bin" >> $GITHUB_PATH + echo "$BUILD_ROOT/kong-dev/openresty/nginx/sbin" >> $GITHUB_PATH + echo "$BUILD_ROOT/kong-dev/openresty/bin" >> $GITHUB_PATH + + - name: Debug (nginx) + run: | + echo nginx: $(which nginx) + nginx -V 2>&1 | sed -re 's/ --/\n--/g' + ldd $(which nginx) + + - name: Debug (luarocks) run: | - psql -v ON_ERROR_STOP=1 -h localhost --username kong <<\EOD - CREATE user kong_ro; - GRANT CONNECT ON DATABASE kong TO kong_ro; - \c kong; - GRANT USAGE ON SCHEMA public TO kong_ro; - ALTER DEFAULT PRIVILEGES FOR ROLE kong IN SCHEMA public GRANT SELECT ON TABLES TO kong_ro; - EOD + echo luarocks: $(which luarocks) + luarocks --version + luarocks config - name: Tune up postgres max_connections run: | # arm64 runners may use more connections due to more worker cores psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;' - - name: Download test schedule file - uses: actions/download-artifact@v4 - with: - name: schedule-test-files - - - name: Generate helper environment variables + - name: Generate test rerun filename run: | - echo FAILED_TEST_FILES_FILE=failed-tests.json >> $GITHUB_ENV - echo TEST_FILE_RUNTIME_FILE=test-runtime.json >> $GITHUB_ENV + echo FAILED_TEST_FILES_FILE=$(echo '${{ github.run_id }}-${{ matrix.suite }}-${{ matrix.split }}' | tr A-Z a-z | sed -Ee 's/[^a-z0-9]+/-/g').txt >> $GITHUB_ENV - - name: Build & install dependencies - run: | - make dev - name: Download test rerun information uses: actions/download-artifact@v4 continue-on-error: true with: - name: test-rerun-info-${{ matrix.runner }} - - - name: Download test runtime statistics from previous runs - uses: actions/download-artifact@v4 - continue-on-error: true - with: - name: test-runtime-statistics-${{ matrix.runner }} + name: ${{ env.FAILED_TEST_FILES_FILE }} - - name: Run Tests + - name: Tests env: KONG_TEST_PG_DATABASE: kong KONG_TEST_PG_USER: kong @@ -260,44 +250,108 @@ jobs: KONG_SPEC_TEST_GRPCBIN_PORT: "15002" KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003" KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json - DD_ENV: ci - DD_SERVICE: kong-ce-ci - DD_CIVISIBILITY_MANUAL_API_ENABLED: 1 - DD_CIVISIBILITY_AGENTLESS_ENABLED: true - DD_TRACE_GIT_METADATA_ENABLED: true - DD_API_KEY: ${{ secrets.DATADOG_API_KEY }} - uses: Kong/gateway-test-scheduler/runner@b91bd7aec42bd13748652929f087be81d1d40843 # v1 - with: - tests-to-run-file: test-chunk.${{ matrix.runner }}.json - failed-test-files-file: ${{ env.FAILED_TEST_FILES_FILE }} - test-file-runtime-file: ${{ env.TEST_FILE_RUNTIME_FILE }} - setup-venv: . ${{ env.BUILD_ROOT }}/kong-dev-venv.sh + TEST_SUITE: ${{ matrix.suite }} + TEST_SPLIT: ${{ matrix.split }} + run: | + make dev # required to install other dependencies like bin/grpcurl + source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh + .ci/run_tests.sh - name: Upload test rerun information if: always() uses: actions/upload-artifact@v4 with: - name: test-rerun-info-${{ matrix.runner }} + name: ${{ env.FAILED_TEST_FILES_FILE }} path: ${{ env.FAILED_TEST_FILES_FILE }} retention-days: 2 - - name: Upload test runtime statistics for offline scheduling - if: always() + - name: Archive coverage stats file uses: actions/upload-artifact@v4 + if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} + with: + name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.suite }}-${{ contains(matrix.split, 'first') && '1' || '2' }} + retention-days: 1 + path: | + luacov.stats.out + + - name: Get kernel message + if: failure() + run: | + sudo dmesg -T + + integration-tests-dbless: + name: DB-less integration tests + runs-on: ubuntu-22.04 + needs: build + + services: + grpcbin: + image: kong/grpcbin + ports: + - 15002:9000 + - 15003:9001 + + steps: + - name: Checkout Kong source code + uses: actions/checkout@v4 + + - name: Lookup build cache + id: cache-deps + uses: actions/cache@v3 with: - name: test-runtime-statistics-${{ matrix.runner }} - path: ${{ env.TEST_FILE_RUNTIME_FILE }} - retention-days: 7 + path: ${{ env.BUILD_ROOT }} + key: ${{ needs.build.outputs.cache-key }} + + - name: Build WASM Test Filters + uses: ./.github/actions/build-wasm-test-filters + + - name: Add gRPC test host names + run: | + echo "127.0.0.1 grpcs_1.test" | sudo tee -a /etc/hosts + echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts + + - name: Run OpenTelemetry Collector + run: | + mkdir -p ${{ github.workspace }}/tmp/otel + touch ${{ github.workspace }}/tmp/otel/file_exporter.json + sudo chmod 777 -R ${{ github.workspace }}/tmp/otel + docker run -p 4317:4317 -p 4318:4318 -p 55679:55679 \ + -v ${{ github.workspace }}/spec/fixtures/opentelemetry/otelcol.yaml:/etc/otel-collector-config.yaml \ + -v ${{ github.workspace }}/tmp/otel:/etc/otel \ + --name opentelemetry-collector -d \ + otel/opentelemetry-collector-contrib:0.52.0 \ + --config=/etc/otel-collector-config.yaml + sleep 2 + docker logs opentelemetry-collector + + - name: Tests + env: + KONG_TEST_PG_DATABASE: kong + KONG_TEST_PG_USER: kong + KONG_TEST_DATABASE: 'off' + KONG_SPEC_TEST_GRPCBIN_PORT: "15002" + KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003" + KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json + TEST_SUITE: dbless + run: | + make dev # required to install other dependencies like bin/grpcurl + source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh + .ci/run_tests.sh - name: Archive coverage stats file uses: actions/upload-artifact@v4 if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} with: - name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.runner }} + name: luacov-stats-out-${{ github.job }}-${{ github.run_id }} retention-days: 1 path: | luacov.stats.out + - name: Get kernel message + if: failure() + run: | + sudo dmesg -T + pdk-tests: name: PDK tests runs-on: ubuntu-22.04 @@ -334,7 +388,7 @@ jobs: export PDK_LUACOV=1 fi eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib) - prove -I. -r t + .ci/run_tests.sh - name: Archive coverage stats file uses: actions/upload-artifact@v4 @@ -350,9 +404,9 @@ jobs: run: | sudo dmesg -T - cleanup-and-aggregate-stats: - needs: [lint-and-doc-tests,pdk-tests,busted-tests] - name: Cleanup and Luacov stats aggregator + aggregator: + needs: [lint-doc-and-unit-tests,pdk-tests,integration-tests-postgres,integration-tests-dbless] + name: Luacov stats aggregator if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }} runs-on: ubuntu-22.04 diff --git a/.github/workflows/update-test-runtime-statistics.yml b/.github/workflows/update-test-runtime-statistics.yml deleted file mode 100644 index 43e4017a518a..000000000000 --- a/.github/workflows/update-test-runtime-statistics.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Update test runtime statistics file for test scheduling -on: - workflow_dispatch: - schedule: - - cron: "1 0 * * SAT" - # push rule below needed for testing only - push: - branches: - - feat/test-run-scheduler - -jobs: - process-statistics: - name: Download statistics from GitHub and combine them - runs-on: ubuntu-22.04 - steps: - - name: Checkout source code - uses: actions/checkout@v4 - with: - token: ${{ secrets.PAT }} - - - name: Process statistics - uses: Kong/gateway-test-scheduler/analyze@b91bd7aec42bd13748652929f087be81d1d40843 # v1 - env: - GITHUB_TOKEN: ${{ secrets.PAT }} - with: - workflow-name: build_and_test.yml - test-file-runtime-file: .ci/runtimes.json - artifact-name-regexp: "^test-runtime-statistics-\\d+$" - - - name: Upload new runtimes file - uses: Kong/gh-storage/upload@v1 - env: - GITHUB_TOKEN: ${{ secrets.PAT }} - with: - repo-path: Kong/gateway-action-storage/main/.ci/runtimes.json diff --git a/spec/busted-ci-helper.lua b/spec/busted-ci-helper.lua deleted file mode 100644 index ff85767086ff..000000000000 --- a/spec/busted-ci-helper.lua +++ /dev/null @@ -1,59 +0,0 @@ --- busted-log-failed.lua - --- Log which test files run by busted had failures or errors in a --- file. The file to use for logging is specified in the --- FAILED_TEST_FILES_FILE environment variable. This is used to --- reduce test rerun times for flaky tests. - -local busted = require 'busted' -local cjson = require 'cjson' -local socket_unix = require 'socket.unix' - -local busted_event_path = os.getenv("BUSTED_EVENT_PATH") - --- Function to recursively copy a table, skipping keys associated with functions -local function copyTable(original, copied) - copied = copied or {} - - for key, value in pairs(original) do - if type(value) == "table" then - copied[key] = copyTable(value, {}) - elseif type(value) ~= "function" then - copied[key] = value - end - end - - return copied -end - -if busted_event_path then - local sock = assert(socket_unix()) - assert(sock:connect(busted_event_path)) - - local events = {{ 'suite', 'reset' }, - { 'suite', 'start' }, - { 'suite', 'end' }, - { 'file', 'start' }, - { 'file', 'end' }, - { 'test', 'start' }, - { 'test', 'end' }, - { 'pending' }, - { 'failure', 'it' }, - { 'error', 'it' }, - { 'failure' }, - { 'error' }} - for _, event in ipairs(events) do - busted.subscribe(event, function (...) - local args = {} - for i, original in ipairs{...} do - if type(original) == "table" then - args[i] = copyTable(original) - elseif type(original) ~= "function" then - args[i] = original - end - end - - sock:send(cjson.encode({ event = event[1] .. (event[2] and ":" .. event[2] or ""), args = args }) .. "\n") - end) - end -end diff --git a/spec/busted-log-failed.lua b/spec/busted-log-failed.lua new file mode 100644 index 000000000000..7bfe6804b83f --- /dev/null +++ b/spec/busted-log-failed.lua @@ -0,0 +1,33 @@ +-- busted-log-failed.lua + +-- Log which test files run by busted had failures or errors in a +-- file. The file to use for logging is specified in the +-- FAILED_TEST_FILES_FILE environment variable. This is used to +-- reduce test rerun times for flaky tests. + +local busted = require 'busted' +local failed_files_file = assert(os.getenv("FAILED_TEST_FILES_FILE"), + "FAILED_TEST_FILES_FILE environment variable not set") + +local FAILED_FILES = {} + +busted.subscribe({ 'failure' }, function(element, parent, message, debug) + FAILED_FILES[element.trace.source] = true +end) + +busted.subscribe({ 'error' }, function(element, parent, message, debug) + FAILED_FILES[element.trace.source] = true +end) + +busted.subscribe({ 'suite', 'end' }, function(suite, count, total) + local output = assert(io.open(failed_files_file, "w")) + if next(FAILED_FILES) then + for failed_file in pairs(FAILED_FILES) do + if failed_file:sub(1, 1) == '@' then + failed_file = failed_file:sub(2) + end + assert(output:write(failed_file .. "\n")) + end + end + output:close() +end) From f49abd69c70eb719b53b84db21a1756743c089a6 Mon Sep 17 00:00:00 2001 From: chronolaw Date: Fri, 29 Dec 2023 12:07:56 +0800 Subject: [PATCH 236/249] tests(plugins): fix previous `master` test failures Fix `03-http-log/01-log_spec.lua` Fix `13-cors/01-access_spec.lua` Fix `spec/03-plugins/03-http-log/01-log_spec.lua` --- spec/03-plugins/03-http-log/01-log_spec.lua | 2 ++ spec/03-plugins/13-cors/01-access_spec.lua | 1 + 2 files changed, 3 insertions(+) diff --git a/spec/03-plugins/03-http-log/01-log_spec.lua b/spec/03-plugins/03-http-log/01-log_spec.lua index 55591eb85dde..4a69c9b221de 100644 --- a/spec/03-plugins/03-http-log/01-log_spec.lua +++ b/spec/03-plugins/03-http-log/01-log_spec.lua @@ -59,6 +59,7 @@ for _, strategy in helpers.each_strategy() do local route1 = bp.routes:insert { hosts = { "http_logging.test" }, + paths = { "/" }, service = service1 } @@ -627,6 +628,7 @@ for _, strategy in helpers.each_strategy() do local route = bp.routes:insert { hosts = { "http_queue_logging.test" }, + paths = { "/" }, service = service } diff --git a/spec/03-plugins/13-cors/01-access_spec.lua b/spec/03-plugins/13-cors/01-access_spec.lua index 7bba3a82ce88..42692a430893 100644 --- a/spec/03-plugins/13-cors/01-access_spec.lua +++ b/spec/03-plugins/13-cors/01-access_spec.lua @@ -237,6 +237,7 @@ for _, strategy in helpers.each_strategy() do local route1 = bp.routes:insert({ hosts = { "cors1.test" }, + paths = { "/" }, }) local route2 = bp.routes:insert({ From f002a5c74f8a53fcc52c5c53b3d21f304bdd0eca Mon Sep 17 00:00:00 2001 From: chronolaw Date: Fri, 29 Dec 2023 18:37:28 +0800 Subject: [PATCH 237/249] tests(admin-api): change OpenSSL error message to ones from the new version --- spec/02-integration/04-admin_api/15-off_spec.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/02-integration/04-admin_api/15-off_spec.lua b/spec/02-integration/04-admin_api/15-off_spec.lua index 54bb00e7e820..1f618e4cfec1 100644 --- a/spec/02-integration/04-admin_api/15-off_spec.lua +++ b/spec/02-integration/04-admin_api/15-off_spec.lua @@ -1752,7 +1752,7 @@ R6InCcH2Wh8wSeY5AuDXvu2tv9g/PW9wIJmPuKSHMA== entity_type = "certificate", errors = { { field = "cert", - message = "invalid certificate: x509.new: asn1/tasn_dec.c:349:error:0688010A:asn1 encoding routines::nested asn1 error", + message = "invalid certificate: x509.new: error:688010A:asn1 encoding routines:asn1_item_embed_d2i:nested asn1 error:asn1/tasn_dec.c:349:", type = "field" } } }, From a45112fd8325767b12c930ece8fcc70237c226c5 Mon Sep 17 00:00:00 2001 From: xumin Date: Fri, 29 Dec 2023 14:11:46 +0800 Subject: [PATCH 238/249] Revert "feat(templates): enable `status_listen` by default on localhost (#12254)" This reverts commit 1ab6ead0ee9759127d427334d644962e98a667bd. The CI did not alert because of the scheduler's bug --- changelog/unreleased/kong/default_status_port.yml.yml | 3 --- kong.conf.default | 3 +-- kong/templates/kong_defaults.lua | 2 +- 3 files changed, 2 insertions(+), 6 deletions(-) delete mode 100644 changelog/unreleased/kong/default_status_port.yml.yml diff --git a/changelog/unreleased/kong/default_status_port.yml.yml b/changelog/unreleased/kong/default_status_port.yml.yml deleted file mode 100644 index ec3c3a510de8..000000000000 --- a/changelog/unreleased/kong/default_status_port.yml.yml +++ /dev/null @@ -1,3 +0,0 @@ -message: Enable `status_listen` on `127.0.0.1:8007` by default -type: feature -scope: Admin API diff --git a/kong.conf.default b/kong.conf.default index 18c578403b49..6f1fe1f0844f 100644 --- a/kong.conf.default +++ b/kong.conf.default @@ -680,8 +680,7 @@ # # Example: `admin_listen = 127.0.0.1:8444 http2 ssl` -#status_listen = 127.0.0.1:8007 reuseport backlog=16384 - # Comma-separated list of addresses and ports on +#status_listen = off # Comma-separated list of addresses and ports on # which the Status API should listen. # The Status API is a read-only endpoint # allowing monitoring tools to retrieve metrics, diff --git a/kong/templates/kong_defaults.lua b/kong/templates/kong_defaults.lua index 2c0802bc72af..7ff840c17eb3 100644 --- a/kong/templates/kong_defaults.lua +++ b/kong/templates/kong_defaults.lua @@ -28,7 +28,7 @@ proxy_listen = 0.0.0.0:8000 reuseport backlog=16384, 0.0.0.0:8443 http2 ssl reus stream_listen = off admin_listen = 127.0.0.1:8001 reuseport backlog=16384, 127.0.0.1:8444 http2 ssl reuseport backlog=16384 admin_gui_listen = 0.0.0.0:8002, 0.0.0.0:8445 ssl -status_listen = 127.0.0.1:8007 reuseport backlog=16384 +status_listen = off cluster_listen = 0.0.0.0:8005 cluster_control_plane = 127.0.0.1:8005 cluster_cert = NONE From 11d7639bb71326eff5bbcdf73b0e35f03d4763df Mon Sep 17 00:00:00 2001 From: Chrono Date: Tue, 2 Jan 2024 10:12:28 +0800 Subject: [PATCH 239/249] docs(changelog): fix a typo in #11258 (#12266) --- changelog/unreleased/kong/atc_reuse_context.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/unreleased/kong/atc_reuse_context.yml b/changelog/unreleased/kong/atc_reuse_context.yml index 3af76d0a2d72..935993c847a9 100644 --- a/changelog/unreleased/kong/atc_reuse_context.yml +++ b/changelog/unreleased/kong/atc_reuse_context.yml @@ -1,3 +1,3 @@ -message: "Reuse match copntext between requests to avoid frequent memory allocation/deallocation" +message: "Reuse match context between requests to avoid frequent memory allocation/deallocation" type: performance scope: Core From 30154217e03d7b77675716e0728609b19518dc73 Mon Sep 17 00:00:00 2001 From: Xumin <100666470+StarlightIbuki@users.noreply.github.com> Date: Tue, 2 Jan 2024 06:16:32 +0000 Subject: [PATCH 240/249] fix(request-transformer): respect letter case of rename headers' new names (#12244) Request-transformer used to ignore cases when renaming header. This PR makes it case-sensitive when renaming headers. Fix KAG-2599 #11579 --- .../kong/fix_req_transformer_case_sensitive.yml | 3 +++ kong/plugins/request-transformer/access.lua | 2 +- .../36-request-transformer/02-access_spec.lua | 10 +++++----- 3 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml diff --git a/changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml b/changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml new file mode 100644 index 000000000000..02369e95ef44 --- /dev/null +++ b/changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml @@ -0,0 +1,3 @@ +message: "**request-transformer**: now the plugin respect the letter case of new names when renaming headers." +type: bugfix +scope: Plugin diff --git a/kong/plugins/request-transformer/access.lua b/kong/plugins/request-transformer/access.lua index 76c7c5dc0fd8..441cb6b80cd0 100644 --- a/kong/plugins/request-transformer/access.lua +++ b/kong/plugins/request-transformer/access.lua @@ -168,7 +168,7 @@ local function transform_headers(conf, template_env) old_name = old_name:lower() local value = headers[old_name] if value then - headers[new_name:lower()] = value + headers[new_name] = value headers[old_name] = nil headers_to_remove[old_name] = true end diff --git a/spec/03-plugins/36-request-transformer/02-access_spec.lua b/spec/03-plugins/36-request-transformer/02-access_spec.lua index 76687101d62c..945efb7b60e6 100644 --- a/spec/03-plugins/36-request-transformer/02-access_spec.lua +++ b/spec/03-plugins/36-request-transformer/02-access_spec.lua @@ -227,7 +227,7 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() name = "request-transformer", config = { rename = { - headers = {"x-to-rename:x-is-renamed"}, + headers = {"x-to-rename:X-Is-Renamed"}, querystring = {"originalparam:renamedparam"}, body = {"originalparam:renamedparam"} } @@ -712,7 +712,7 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() assert.response(r).has.status(200) assert.response(r).has.jsonbody() assert.request(r).has.no.header("x-to-rename") - assert.request(r).has.header("x-is-renamed") + assert.request(r).has.header("X-Is-Renamed") assert.request(r).has.header("x-another-header") end) it("does not add as new header if header does not exist", function() @@ -738,13 +738,13 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() headers = { host = "test9.test", ["x-to-rename"] = "new-result", - ["x-is-renamed"] = "old-result", + ["X-Is-Renamed"] = "old-result", } }) assert.response(r).has.status(200) assert.response(r).has.jsonbody() assert.request(r).has.no.header("x-to-rename") - local h_is_renamed = assert.request(r).has.header("x-is-renamed") + local h_is_renamed = assert.request(r).has.header("X-Is-Renamed") assert.equals("new-result", h_is_renamed) end) for _, seq in ipairs({ 1, 2, 3, 4, 5, 6}) do @@ -761,7 +761,7 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() assert.response(r).has.status(200) assert.response(r).has.jsonbody() assert.request(r).has.no.header("x-to-rename") - local h_is_renamed = assert.request(r).has.header("x-is-renamed") + local h_is_renamed = assert.request(r).has.header("X-Is-Renamed") assert.equals("new-result", h_is_renamed) end) end From c3c83e838298d82225c0fa7d19a895dc56d42f13 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Thu, 28 Dec 2023 14:33:45 +0800 Subject: [PATCH 241/249] chore(deps): bump openssl from 3.1.4 to 3.2.0 --- .requirements | 2 +- .../openssl/openssl_repositories.bzl | 2 +- changelog/unreleased/kong/bump-openssl.yml | 3 + .../fixtures/alpine-amd64.txt | 145 ------------------ .../fixtures/alpine-arm64.txt | 145 ------------------ .../fixtures/amazonlinux-2-amd64.txt | 2 +- .../fixtures/amazonlinux-2023-amd64.txt | 2 +- .../fixtures/amazonlinux-2023-arm64.txt | 2 +- .../fixtures/debian-10-amd64.txt | 2 +- .../fixtures/debian-11-amd64.txt | 2 +- .../fixtures/debian-12-amd64.txt | 2 +- .../explain_manifest/fixtures/el7-amd64.txt | 2 +- .../explain_manifest/fixtures/el8-amd64.txt | 2 +- .../explain_manifest/fixtures/el9-amd64.txt | 2 +- .../explain_manifest/fixtures/el9-arm64.txt | 2 +- .../fixtures/ubuntu-20.04-amd64.txt | 2 +- .../fixtures/ubuntu-22.04-amd64.txt | 2 +- .../fixtures/ubuntu-22.04-arm64.txt | 2 +- scripts/explain_manifest/suites.py | 16 +- 19 files changed, 26 insertions(+), 313 deletions(-) create mode 100644 changelog/unreleased/kong/bump-openssl.yml delete mode 100644 scripts/explain_manifest/fixtures/alpine-amd64.txt delete mode 100644 scripts/explain_manifest/fixtures/alpine-arm64.txt diff --git a/.requirements b/.requirements index d834d859bd97..e33006c69d57 100644 --- a/.requirements +++ b/.requirements @@ -2,7 +2,7 @@ KONG_PACKAGE_NAME=kong OPENRESTY=1.21.4.3 LUAROCKS=3.9.2 -OPENSSL=3.1.4 +OPENSSL=3.2.0 PCRE=8.45 LIBEXPAT=2.5.0 diff --git a/build/openresty/openssl/openssl_repositories.bzl b/build/openresty/openssl/openssl_repositories.bzl index cab43702d1dd..f06c848fc920 100644 --- a/build/openresty/openssl/openssl_repositories.bzl +++ b/build/openresty/openssl/openssl_repositories.bzl @@ -11,7 +11,7 @@ def openssl_repositories(): http_archive, name = "openssl", build_file = "//build/openresty/openssl:BUILD.bazel", - sha256 = "840af5366ab9b522bde525826be3ef0fb0af81c6a9ebd84caa600fea1731eee3", + sha256 = "14c826f07c7e433706fb5c69fa9e25dab95684844b4c962a2cf1bf183eb4690e", strip_prefix = "openssl-" + version, urls = [ "https://www.openssl.org/source/openssl-" + version + ".tar.gz", diff --git a/changelog/unreleased/kong/bump-openssl.yml b/changelog/unreleased/kong/bump-openssl.yml new file mode 100644 index 000000000000..687f0c70200a --- /dev/null +++ b/changelog/unreleased/kong/bump-openssl.yml @@ -0,0 +1,3 @@ +message: Bumped OpenSSL from 3.1.4 to 3.2.0 +type: dependency +scope: Core diff --git a/scripts/explain_manifest/fixtures/alpine-amd64.txt b/scripts/explain_manifest/fixtures/alpine-amd64.txt deleted file mode 100644 index b5bf1a0fa465..000000000000 --- a/scripts/explain_manifest/fixtures/alpine-amd64.txt +++ /dev/null @@ -1,145 +0,0 @@ -- Path : /usr/local/kong/include/google - Type : directory - -- Path : /usr/local/kong/include/kong - Type : directory - -- Path : /usr/local/kong/lib/engines-1.1/afalg.so - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/engines-1.1/capi.so - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/engines-1.1/padlock.so - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/libcrypto.so.1.1 - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/libexpat.so.1.8.10 - Needed : - - libc.so - -- Path : /usr/local/kong/lib/libssl.so.1.1 - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lfs.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lpeg.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lsyslog.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lua_pack.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lua_system_constants.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lxp.so - Needed : - - libexpat.so.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/mime/core.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/pb.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/socket/core.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/socket/serial.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/socket/unix.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/ssl.so - Needed : - - libssl.so.1.1 - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/yaml.so - Needed : - - libyaml-0.so.2 - - libc.so - -- Path : /usr/local/openresty/lualib/cjson.so - Needed : - - libc.so - -- Path : /usr/local/openresty/lualib/libatc_router.so - Needed : - - libgcc_s.so.1 - - libc.so - -- Path : /usr/local/openresty/lualib/librestysignal.so - Needed : - - libc.so - -- Path : /usr/local/openresty/lualib/rds/parser.so - Needed : - - libc.so - -- Path : /usr/local/openresty/lualib/redis/parser.so - Needed : - - libc.so - -- Path : /usr/local/openresty/nginx/sbin/nginx - Needed : - - libluajit-5.1.so.2 - - libssl.so.1.1 - - libcrypto.so.1.1 - - libz.so.1 - - libc.so - Rpath : /usr/local/openresty/luajit/lib:/usr/local/kong/lib:/usr/local/openresty/lualib - Modules : - - lua-kong-nginx-module - - lua-kong-nginx-module/stream - - lua-resty-events - - lua-resty-lmdb - OpenSSL : OpenSSL 1.1.1t 7 Feb 2023 - DWARF : True - DWARF - ngx_http_request_t related DWARF DIEs: True - diff --git a/scripts/explain_manifest/fixtures/alpine-arm64.txt b/scripts/explain_manifest/fixtures/alpine-arm64.txt deleted file mode 100644 index b5bf1a0fa465..000000000000 --- a/scripts/explain_manifest/fixtures/alpine-arm64.txt +++ /dev/null @@ -1,145 +0,0 @@ -- Path : /usr/local/kong/include/google - Type : directory - -- Path : /usr/local/kong/include/kong - Type : directory - -- Path : /usr/local/kong/lib/engines-1.1/afalg.so - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/engines-1.1/capi.so - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/engines-1.1/padlock.so - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/libcrypto.so.1.1 - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/kong/lib/libexpat.so.1.8.10 - Needed : - - libc.so - -- Path : /usr/local/kong/lib/libssl.so.1.1 - Needed : - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lfs.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lpeg.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lsyslog.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lua_pack.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lua_system_constants.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/lxp.so - Needed : - - libexpat.so.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/mime/core.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/pb.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/socket/core.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/socket/serial.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/socket/unix.so - Needed : - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/ssl.so - Needed : - - libssl.so.1.1 - - libcrypto.so.1.1 - - libc.so - Rpath : /usr/local/kong/lib - -- Path : /usr/local/lib/lua/5.1/yaml.so - Needed : - - libyaml-0.so.2 - - libc.so - -- Path : /usr/local/openresty/lualib/cjson.so - Needed : - - libc.so - -- Path : /usr/local/openresty/lualib/libatc_router.so - Needed : - - libgcc_s.so.1 - - libc.so - -- Path : /usr/local/openresty/lualib/librestysignal.so - Needed : - - libc.so - -- Path : /usr/local/openresty/lualib/rds/parser.so - Needed : - - libc.so - -- Path : /usr/local/openresty/lualib/redis/parser.so - Needed : - - libc.so - -- Path : /usr/local/openresty/nginx/sbin/nginx - Needed : - - libluajit-5.1.so.2 - - libssl.so.1.1 - - libcrypto.so.1.1 - - libz.so.1 - - libc.so - Rpath : /usr/local/openresty/luajit/lib:/usr/local/kong/lib:/usr/local/openresty/lualib - Modules : - - lua-kong-nginx-module - - lua-kong-nginx-module/stream - - lua-resty-events - - lua-resty-lmdb - OpenSSL : OpenSSL 1.1.1t 7 Feb 2023 - DWARF : True - DWARF - ngx_http_request_t related DWARF DIEs: True - diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt index d3bda3284080..b0d0b772ff03 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2-amd64.txt @@ -202,7 +202,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt index e85d7e578527..3c348b455c87 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2023-amd64.txt @@ -188,7 +188,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt b/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt index 0db6e70743c3..48576d505f1f 100644 --- a/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt +++ b/scripts/explain_manifest/fixtures/amazonlinux-2023-arm64.txt @@ -170,7 +170,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/debian-10-amd64.txt b/scripts/explain_manifest/fixtures/debian-10-amd64.txt index 013e8586181c..951fb52d982e 100644 --- a/scripts/explain_manifest/fixtures/debian-10-amd64.txt +++ b/scripts/explain_manifest/fixtures/debian-10-amd64.txt @@ -202,7 +202,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/debian-11-amd64.txt b/scripts/explain_manifest/fixtures/debian-11-amd64.txt index fe586a0c0912..3a9420610de1 100644 --- a/scripts/explain_manifest/fixtures/debian-11-amd64.txt +++ b/scripts/explain_manifest/fixtures/debian-11-amd64.txt @@ -190,7 +190,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/debian-12-amd64.txt b/scripts/explain_manifest/fixtures/debian-12-amd64.txt index fecba88d42b6..d8a45bc54db6 100644 --- a/scripts/explain_manifest/fixtures/debian-12-amd64.txt +++ b/scripts/explain_manifest/fixtures/debian-12-amd64.txt @@ -177,7 +177,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/el7-amd64.txt b/scripts/explain_manifest/fixtures/el7-amd64.txt index d3bda3284080..b0d0b772ff03 100644 --- a/scripts/explain_manifest/fixtures/el7-amd64.txt +++ b/scripts/explain_manifest/fixtures/el7-amd64.txt @@ -202,7 +202,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/el8-amd64.txt b/scripts/explain_manifest/fixtures/el8-amd64.txt index c7933610e0a3..b0817c9bdc33 100644 --- a/scripts/explain_manifest/fixtures/el8-amd64.txt +++ b/scripts/explain_manifest/fixtures/el8-amd64.txt @@ -201,7 +201,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/el9-amd64.txt b/scripts/explain_manifest/fixtures/el9-amd64.txt index e4dbbaa65379..a9eb59444920 100644 --- a/scripts/explain_manifest/fixtures/el9-amd64.txt +++ b/scripts/explain_manifest/fixtures/el9-amd64.txt @@ -188,7 +188,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/el9-arm64.txt b/scripts/explain_manifest/fixtures/el9-arm64.txt index 0db6e70743c3..48576d505f1f 100644 --- a/scripts/explain_manifest/fixtures/el9-arm64.txt +++ b/scripts/explain_manifest/fixtures/el9-arm64.txt @@ -170,7 +170,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt b/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt index e4b2a5396464..f909b112e2af 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-20.04-amd64.txt @@ -194,6 +194,6 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt b/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt index 6d22a3f711b0..b924206af824 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-22.04-amd64.txt @@ -181,7 +181,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt b/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt index 8dc1f94a1b9a..70700de3e9ab 100644 --- a/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt +++ b/scripts/explain_manifest/fixtures/ubuntu-22.04-arm64.txt @@ -179,7 +179,7 @@ - lua-resty-events - lua-resty-lmdb - ngx_wasm_module - OpenSSL : OpenSSL 3.1.4 24 Oct 2023 + OpenSSL : OpenSSL 3.2.0 23 Nov 2023 DWARF : True DWARF - ngx_http_request_t related DWARF DIEs: True diff --git a/scripts/explain_manifest/suites.py b/scripts/explain_manifest/suites.py index b1a19b9c8465..413e92c06536 100644 --- a/scripts/explain_manifest/suites.py +++ b/scripts/explain_manifest/suites.py @@ -71,14 +71,14 @@ def common_suites(expect, libxcrypt_no_obsolete_api: bool = False): expect("/usr/local/openresty/nginx/sbin/nginx", "nginx should link libxcrypt.so.1") \ .needed_libraries.contain("libcrypt.so.1") - expect("/usr/local/openresty/nginx/sbin/nginx", "nginx compiled with OpenSSL 3.1.x") \ - .nginx_compiled_openssl.matches("OpenSSL 3.1.\d") \ - .version_requirement.key("libssl.so.3").less_than("OPENSSL_3.2.0") \ - .version_requirement.key("libcrypto.so.3").less_than("OPENSSL_3.2.0") \ - - expect("**/*.so", "dynamic libraries are compiled with OpenSSL 3.1.x") \ - .version_requirement.key("libssl.so.3").less_than("OPENSSL_3.2.0") \ - .version_requirement.key("libcrypto.so.3").less_than("OPENSSL_3.2.0") \ + expect("/usr/local/openresty/nginx/sbin/nginx", "nginx compiled with OpenSSL 3.2.x") \ + .nginx_compiled_openssl.matches("OpenSSL 3.2.\d") \ + .version_requirement.key("libssl.so.3").less_than("OPENSSL_3.3.0") \ + .version_requirement.key("libcrypto.so.3").less_than("OPENSSL_3.3.0") \ + + expect("**/*.so", "dynamic libraries are compiled with OpenSSL 3.2.x") \ + .version_requirement.key("libssl.so.3").less_than("OPENSSL_3.3.0") \ + .version_requirement.key("libcrypto.so.3").less_than("OPENSSL_3.3.0") \ def libc_libcpp_suites(expect, libc_max_version: str = None, libcxx_max_version: str = None, cxxabi_max_version: str = None): From c1e5af03b21cd1792f8f23888b0a8a69dd82f72a Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Tue, 2 Jan 2024 15:28:19 +0800 Subject: [PATCH 242/249] fix(cd): revert actions versions to work under RHEL 7 --- .github/workflows/release.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 94e957e14dae..0dced5a70e25 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -270,7 +270,7 @@ jobs: tail -n500 bazel-out/**/*/CMake.log || true - name: Upload artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: ${{ matrix.label }}-packages path: bazel-bin/pkg @@ -290,7 +290,7 @@ jobs: - uses: actions/checkout@v3 - name: Download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: ${{ matrix.label }}-packages path: bazel-bin/pkg @@ -322,14 +322,14 @@ jobs: - uses: actions/checkout@v3 - name: Download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: ${{ matrix.artifact-from }}-packages path: bazel-bin/pkg - name: Download artifact (alt) if: matrix.artifact-from-alt != '' - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: ${{ matrix.artifact-from-alt }}-packages path: bazel-bin/pkg @@ -618,7 +618,7 @@ jobs: - uses: actions/checkout@v4 - name: Download artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: ${{ matrix.artifact-from }}-packages path: bazel-bin/pkg From e22ac21be18970bbdf3b919390f7feca351df69f Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Tue, 2 Jan 2024 15:29:57 +0800 Subject: [PATCH 243/249] fix(cd): run full matrix for dependabot PRs --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0dced5a70e25..135987463211 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -35,7 +35,7 @@ env: # official release repo DOCKER_REPOSITORY: kong/kong PRERELEASE_DOCKER_REPOSITORY: kong/kong - FULL_RELEASE: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }} + FULL_RELEASE: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' || github.actor == 'dependabot[bot]'}} # only for pr GHA_CACHE: ${{ github.event_name == 'pull_request' }} From 5175e103b81685a695e6e5a18e879217e1ca7876 Mon Sep 17 00:00:00 2001 From: Niklaus Schen <8458369+Water-Melon@users.noreply.github.com> Date: Wed, 3 Jan 2024 15:15:13 +0800 Subject: [PATCH 244/249] refactor(plugins): replace usage or resty.openssl.hmac with resty.openssl.mac (#12276) Replace all usage of resty.openssl.hmac (which binds HMAC_* low level APIs) with resty.openssl.mac in Kong. KAG-3445 --- kong/plugins/hmac-auth/access.lua | 8 ++++---- kong/plugins/jwt/jwt_parser.lua | 8 ++++---- spec/03-plugins/19-hmac-auth/03-access_spec.lua | 16 ++++++++-------- .../19-hmac-auth/04-invalidations_spec.lua | 4 ++-- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/kong/plugins/hmac-auth/access.lua b/kong/plugins/hmac-auth/access.lua index 44ac3a4875c7..4df53921d525 100644 --- a/kong/plugins/hmac-auth/access.lua +++ b/kong/plugins/hmac-auth/access.lua @@ -1,5 +1,5 @@ local constants = require "kong.constants" -local openssl_hmac = require "resty.openssl.hmac" +local openssl_mac = require "resty.openssl.mac" local sha256_base64 = require("kong.tools.sha256").sha256_base64 @@ -37,13 +37,13 @@ local hmac = { return hmac_sha1(secret, data) end, ["hmac-sha256"] = function(secret, data) - return openssl_hmac.new(secret, "sha256"):final(data) + return openssl_mac.new(secret, "HMAC", nil, "sha256"):final(data) end, ["hmac-sha384"] = function(secret, data) - return openssl_hmac.new(secret, "sha384"):final(data) + return openssl_mac.new(secret, "HMAC", nil, "sha384"):final(data) end, ["hmac-sha512"] = function(secret, data) - return openssl_hmac.new(secret, "sha512"):final(data) + return openssl_mac.new(secret, "HMAC", nil, "sha512"):final(data) end, } diff --git a/kong/plugins/jwt/jwt_parser.lua b/kong/plugins/jwt/jwt_parser.lua index 5bad71635915..502d45a9ff6d 100644 --- a/kong/plugins/jwt/jwt_parser.lua +++ b/kong/plugins/jwt/jwt_parser.lua @@ -9,7 +9,7 @@ local json = require "cjson" local b64 = require "ngx.base64" local buffer = require "string.buffer" local openssl_digest = require "resty.openssl.digest" -local openssl_hmac = require "resty.openssl.hmac" +local openssl_mac = require "resty.openssl.mac" local openssl_pkey = require "resty.openssl.pkey" @@ -33,9 +33,9 @@ local decode_base64url = b64.decode_base64url --- Supported algorithms for signing tokens. local alg_sign = { - HS256 = function(data, key) return openssl_hmac.new(key, "sha256"):final(data) end, - HS384 = function(data, key) return openssl_hmac.new(key, "sha384"):final(data) end, - HS512 = function(data, key) return openssl_hmac.new(key, "sha512"):final(data) end, + HS256 = function(data, key) return openssl_mac.new(key, "HMAC", nil, "sha256"):final(data) end, + HS384 = function(data, key) return openssl_mac.new(key, "HMAC", nil, "sha384"):final(data) end, + HS512 = function(data, key) return openssl_mac.new(key, "HMAC", nil, "sha512"):final(data) end, RS256 = function(data, key) local digest = openssl_digest.new("sha256") assert(digest:update(data)) diff --git a/spec/03-plugins/19-hmac-auth/03-access_spec.lua b/spec/03-plugins/19-hmac-auth/03-access_spec.lua index 9d88f4a50553..643ed1adfcf6 100644 --- a/spec/03-plugins/19-hmac-auth/03-access_spec.lua +++ b/spec/03-plugins/19-hmac-auth/03-access_spec.lua @@ -1,5 +1,5 @@ local cjson = require "cjson" -local openssl_hmac = require "resty.openssl.hmac" +local openssl_mac = require "resty.openssl.mac" local helpers = require "spec.helpers" local utils = require "kong.tools.utils" local resty_sha256 = require "resty.sha256" @@ -8,7 +8,7 @@ local fmt = string.format local hmac_sha1_binary = function(secret, data) - return openssl_hmac.new(secret, "sha1"):final(data) + return openssl_mac.new(secret, "HMAC", nil, "sha1"):final(data) end @@ -816,7 +816,7 @@ for _, strategy in helpers.each_strategy() do it("should not pass with GET with wrong algorithm", function() local date = os.date("!%a, %d %b %Y %H:%M:%S GMT") local encodedSignature = ngx.encode_base64( - openssl_hmac.new("secret", "sha256"):final("date: " .. date .. "\n" + openssl_mac.new("secret", "HMAC", nil, "sha256"):final("date: " .. date .. "\n" .. "content-md5: md5" .. "\nGET /request HTTP/1.1")) local hmacAuth = [[hmac username="bob",algorithm="hmac-sha",]] .. [[ headers="date content-md5 request-line",signature="]] @@ -839,7 +839,7 @@ for _, strategy in helpers.each_strategy() do it("should pass the right headers to the upstream server", function() local date = os.date("!%a, %d %b %Y %H:%M:%S GMT") local encodedSignature = ngx.encode_base64( - openssl_hmac.new("secret", "sha256"):final("date: " .. date .. "\n" + openssl_mac.new("secret", "HMAC", nil, "sha256"):final("date: " .. date .. "\n" .. "content-md5: md5" .. "\nGET /request HTTP/1.1")) local hmacAuth = [[hmac username="bob",algorithm="hmac-sha256",]] .. [[ headers="date content-md5 request-line",signature="]] @@ -1592,7 +1592,7 @@ for _, strategy in helpers.each_strategy() do it("should pass with GET with hmac-sha384", function() local date = os.date("!%a, %d %b %Y %H:%M:%S GMT") local encodedSignature = ngx.encode_base64( - openssl_hmac.new("secret", "sha384"):final("date: " .. date .. "\n" + openssl_mac.new("secret", "HMAC", nil, "sha384"):final("date: " .. date .. "\n" .. "content-md5: md5" .. "\nGET /request HTTP/1.1")) local hmacAuth = [[hmac username="bob", algorithm="hmac-sha384", ]] .. [[headers="date content-md5 request-line", signature="]] @@ -1614,7 +1614,7 @@ for _, strategy in helpers.each_strategy() do it("should pass with GET with hmac-sha512", function() local date = os.date("!%a, %d %b %Y %H:%M:%S GMT") local encodedSignature = ngx.encode_base64( - openssl_hmac.new("secret", "sha512"):final("date: " .. date .. "\n" + openssl_mac.new("secret", "HMAC", nil, "sha512"):final("date: " .. date .. "\n" .. "content-md5: md5" .. "\nGET /request HTTP/1.1")) local hmacAuth = [[hmac username="bob", algorithm="hmac-sha512", ]] .. [[headers="date content-md5 request-line", signature="]] @@ -1636,7 +1636,7 @@ for _, strategy in helpers.each_strategy() do it("should not pass with hmac-sha512", function() local date = os.date("!%a, %d %b %Y %H:%M:%S GMT") local encodedSignature = ngx.encode_base64( - openssl_hmac.new("secret", "sha512"):final("date: " .. date .. "\n" + openssl_mac.new("secret", "HMAC", nil, "sha512"):final("date: " .. date .. "\n" .. "content-md5: md5" .. "\nGET /request HTTP/1.1")) local hmacAuth = [[hmac username="bob", algorithm="hmac-sha512", ]] .. [[headers="date content-md5 request-line", signature="]] @@ -1673,7 +1673,7 @@ for _, strategy in helpers.each_strategy() do it("should pass with hmac-sha1", function() local date = os.date("!%a, %d %b %Y %H:%M:%S GMT") local encodedSignature = ngx.encode_base64( - openssl_hmac.new("secret", "sha1"):final("date: " .. date .. "\n" + openssl_mac.new("secret", "HMAC", nil, "sha1"):final("date: " .. date .. "\n" .. "content-md5: md5" .. "\nGET /request HTTP/1.1")) local hmacAuth = [[hmac username="bob", algorithm="hmac-sha1", ]] .. [[headers="date content-md5 request-line", signature="]] diff --git a/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua b/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua index 08e7a6cdcd28..e235e38e54c0 100644 --- a/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua +++ b/spec/03-plugins/19-hmac-auth/04-invalidations_spec.lua @@ -1,6 +1,6 @@ local helpers = require "spec.helpers" local cjson = require "cjson" -local openssl_hmac = require "resty.openssl.hmac" +local openssl_mac = require "resty.openssl.mac" for _, strategy in helpers.each_strategy() do describe("Plugin: hmac-auth (invalidations) [#" .. strategy .. "]", function() @@ -62,7 +62,7 @@ for _, strategy in helpers.each_strategy() do end) local function hmac_sha1_binary(secret, data) - return openssl_hmac.new(secret, "sha1"):final(data) + return openssl_mac.new(secret, "HMAC", nil, "sha1"):final(data) end local function get_authorization(username) From 064f3f6212e9449200aad08e72ee8d17a662b750 Mon Sep 17 00:00:00 2001 From: Wangchong Zhou Date: Thu, 4 Jan 2024 15:33:51 +0800 Subject: [PATCH 245/249] feat(ci): trigger a workflow for reviewing patches (#12277) This commit adds a workflow that opens a companion PR (the link being displayed as mentioning current PR) when developer opens a PR that modifies openresty patches. The companion PR automatically creates and updates in-place when the PR at kong or kong-ee updates, and displays only the diffs for patches files to help reviewer understand the changes better. --- .../workflows/openresty-patches-companion.yml | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 .github/workflows/openresty-patches-companion.yml diff --git a/.github/workflows/openresty-patches-companion.yml b/.github/workflows/openresty-patches-companion.yml new file mode 100644 index 000000000000..4d79a2276358 --- /dev/null +++ b/.github/workflows/openresty-patches-companion.yml @@ -0,0 +1,20 @@ +name: Openresty patches review companion +on: + pull_request: + paths: + - 'build/openresty/patches/**' + +jobs: + create-pr: + runs-on: ubuntu-latest + steps: + - name: Dispatch the workflow + uses: benc-uk/workflow-dispatch@798e70c97009500150087d30d9f11c5444830385 # v1 + with: + workflow: create-pr.yml + repo: kong/openresty-patches-review + ref: master + token: ${{ secrets.PAT }} + inputs: | + {"pr-branch":"${{ github.event.pull_request.head.repo.owner.login }}:${{ github.head_ref }}", "pr-base":"${{ github.base_ref }}", "ee":${{ contains(github.repository, 'kong-ee') && 'true' || 'false' }}, "pr-id":"${{ github.event.pull_request.number }}"} + From 0a41bed87bae45229604f4d0f9cc8d4bfce40fe5 Mon Sep 17 00:00:00 2001 From: samugi Date: Thu, 4 Jan 2024 13:24:59 +0100 Subject: [PATCH 246/249] tests(actions): fix failure rerun add non empty file check for rerun append failed tests from different runs --- .ci/run_tests.sh | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.ci/run_tests.sh b/.ci/run_tests.sh index 447936f73ff6..55f64dc03dd4 100755 --- a/.ci/run_tests.sh +++ b/.ci/run_tests.sh @@ -10,7 +10,7 @@ function red() { } function get_failed { - if [ ! -z "$FAILED_TEST_FILES_FILE" -a -f "$FAILED_TEST_FILES_FILE" ] + if [ ! -z "$FAILED_TEST_FILES_FILE" -a -s "$FAILED_TEST_FILES_FILE" ] then cat < $FAILED_TEST_FILES_FILE else @@ -103,8 +103,19 @@ if [ "$TEST_SUITE" == "plugins" ]; then echo $TEST_CMD $p || echo "* $p" >> .failed + + # the suite is run multiple times for plugins: collect partial failures + if [ ! -z "$FAILED_TEST_FILES_FILE" ] + then + cat "$FAILED_TEST_FILES_FILE" >> "$FAILED_TEST_FILES_FILE.tmp" + fi done + if [ ! -z "$FAILED_TEST_FILES_FILE.tmp" -a -s "$FAILED_TEST_FILES_FILE.tmp" ] + then + mv "$FAILED_TEST_FILES_FILE.tmp" "$FAILED_TEST_FILES_FILE" + fi + if [[ "$TEST_SPLIT" != first* ]]; then cat kong-*.rockspec | grep kong- | grep -v zipkin | grep -v sidecar | grep "~" | grep -v kong-prometheus-plugin | while read line ; do REPOSITORY=`echo $line | sed "s/\"/ /g" | awk -F" " '{print $1}'` From 9317986d35811f5533aeb719f4e83c9e058c7e7d Mon Sep 17 00:00:00 2001 From: samugi Date: Thu, 4 Jan 2024 16:02:52 +0100 Subject: [PATCH 247/249] tests(rate-limiting): adapt test to new shm api --- spec/03-plugins/23-rate-limiting/02-policies_spec.lua | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/spec/03-plugins/23-rate-limiting/02-policies_spec.lua b/spec/03-plugins/23-rate-limiting/02-policies_spec.lua index 6ee5ef674e71..c3562a52aa61 100644 --- a/spec/03-plugins/23-rate-limiting/02-policies_spec.lua +++ b/spec/03-plugins/23-rate-limiting/02-policies_spec.lua @@ -81,14 +81,17 @@ describe("Plugin: rate-limiting (policies)", function() it("expires after due time", function () local timestamp = 569000048000 + local key = get_local_key(conf, identifier, 'second', timestamp) assert(policies['local'].increment(conf, {second=100}, identifier, timestamp+20, 1)) - local v = assert(shm:ttl(get_local_key(conf, identifier, 'second', timestamp))) + local v = assert(shm:ttl(key)) assert(v > 0, "wrong value") ngx.sleep(1.020) - v = shm:ttl(get_local_key(conf, identifier, 'second', timestamp)) - assert(v == nil, "still there") + v = shm:ttl(key) + assert(v < 0, "expected ttl to be negative") + local val = shm:get(key) + assert.is_nil(val) end) end) From c3abb6aaa6e16136a8ed8b4207e2a022bf1d64a6 Mon Sep 17 00:00:00 2001 From: xumin Date: Fri, 5 Jan 2024 10:49:47 +0800 Subject: [PATCH 248/249] Revert "fix(request-transformer): respect letter case of rename headers' new names (#12244)" This reverts commit 30154217e03d7b77675716e0728609b19518dc73. --- .../kong/fix_req_transformer_case_sensitive.yml | 3 --- kong/plugins/request-transformer/access.lua | 2 +- .../36-request-transformer/02-access_spec.lua | 10 +++++----- 3 files changed, 6 insertions(+), 9 deletions(-) delete mode 100644 changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml diff --git a/changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml b/changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml deleted file mode 100644 index 02369e95ef44..000000000000 --- a/changelog/unreleased/kong/fix_req_transformer_case_sensitive.yml +++ /dev/null @@ -1,3 +0,0 @@ -message: "**request-transformer**: now the plugin respect the letter case of new names when renaming headers." -type: bugfix -scope: Plugin diff --git a/kong/plugins/request-transformer/access.lua b/kong/plugins/request-transformer/access.lua index 441cb6b80cd0..76c7c5dc0fd8 100644 --- a/kong/plugins/request-transformer/access.lua +++ b/kong/plugins/request-transformer/access.lua @@ -168,7 +168,7 @@ local function transform_headers(conf, template_env) old_name = old_name:lower() local value = headers[old_name] if value then - headers[new_name] = value + headers[new_name:lower()] = value headers[old_name] = nil headers_to_remove[old_name] = true end diff --git a/spec/03-plugins/36-request-transformer/02-access_spec.lua b/spec/03-plugins/36-request-transformer/02-access_spec.lua index 945efb7b60e6..76687101d62c 100644 --- a/spec/03-plugins/36-request-transformer/02-access_spec.lua +++ b/spec/03-plugins/36-request-transformer/02-access_spec.lua @@ -227,7 +227,7 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() name = "request-transformer", config = { rename = { - headers = {"x-to-rename:X-Is-Renamed"}, + headers = {"x-to-rename:x-is-renamed"}, querystring = {"originalparam:renamedparam"}, body = {"originalparam:renamedparam"} } @@ -712,7 +712,7 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() assert.response(r).has.status(200) assert.response(r).has.jsonbody() assert.request(r).has.no.header("x-to-rename") - assert.request(r).has.header("X-Is-Renamed") + assert.request(r).has.header("x-is-renamed") assert.request(r).has.header("x-another-header") end) it("does not add as new header if header does not exist", function() @@ -738,13 +738,13 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() headers = { host = "test9.test", ["x-to-rename"] = "new-result", - ["X-Is-Renamed"] = "old-result", + ["x-is-renamed"] = "old-result", } }) assert.response(r).has.status(200) assert.response(r).has.jsonbody() assert.request(r).has.no.header("x-to-rename") - local h_is_renamed = assert.request(r).has.header("X-Is-Renamed") + local h_is_renamed = assert.request(r).has.header("x-is-renamed") assert.equals("new-result", h_is_renamed) end) for _, seq in ipairs({ 1, 2, 3, 4, 5, 6}) do @@ -761,7 +761,7 @@ describe("Plugin: request-transformer(access) [#" .. strategy .. "]", function() assert.response(r).has.status(200) assert.response(r).has.jsonbody() assert.request(r).has.no.header("x-to-rename") - local h_is_renamed = assert.request(r).has.header("X-Is-Renamed") + local h_is_renamed = assert.request(r).has.header("x-is-renamed") assert.equals("new-result", h_is_renamed) end) end From 428ff45d010b212ed35fce1d7a0efa8203e52d37 Mon Sep 17 00:00:00 2001 From: Zachary Hu Date: Wed, 3 Jan 2024 22:16:25 +0800 Subject: [PATCH 249/249] tests(rate-limiting): flush expired rate limiting counters from shared dict If we do not flush, the `ttl` value may be negative. ```bash ~ $ resty --http-conf 'lua_shared_dict jim 1m;' -e 'local shm = ngx.shared.jim; shm:set("age", 17, 1); local v = shm:get("age"); print(v); ngx.sleep(1.001); print(shm:ttl("age"))' 17 -0.032 ``` --- .../23-rate-limiting/02-policies_spec.lua | 26 ++++++++++++------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/spec/03-plugins/23-rate-limiting/02-policies_spec.lua b/spec/03-plugins/23-rate-limiting/02-policies_spec.lua index c3562a52aa61..b221da87582c 100644 --- a/spec/03-plugins/23-rate-limiting/02-policies_spec.lua +++ b/spec/03-plugins/23-rate-limiting/02-policies_spec.lua @@ -5,7 +5,7 @@ local timestamp = require "kong.tools.timestamp" local SYNC_RATE_REALTIME = -1 --[[ - basically a copy of `get_local_key()` + basically a copy of `get_local_key()` in `kong/plugins/rate-limiting/policies/init.lua` --]] local EMPTY_UUID = "00000000-0000-0000-0000-000000000000" @@ -41,7 +41,7 @@ describe("Plugin: rate-limiting (policies)", function() lazy_setup(function() package.loaded["kong.plugins.rate-limiting.policies"] = nil policies = require "kong.plugins.rate-limiting.policies" - + if not _G.kong then _G.kong.db = {} end @@ -80,18 +80,24 @@ describe("Plugin: rate-limiting (policies)", function() end) it("expires after due time", function () - local timestamp = 569000048000 - local key = get_local_key(conf, identifier, 'second', timestamp) + local current_timestamp = 1553263548 + local periods = timestamp.get_timestamps(current_timestamp) - assert(policies['local'].increment(conf, {second=100}, identifier, timestamp+20, 1)) - local v = assert(shm:ttl(key)) + local limits = { + second = 100, + } + local cache_key = get_local_key(conf, identifier, 'second', periods.second) + + assert(policies['local'].increment(conf, limits, identifier, current_timestamp, 1)) + local v = assert(shm:ttl(cache_key)) assert(v > 0, "wrong value") ngx.sleep(1.020) - v = shm:ttl(key) - assert(v < 0, "expected ttl to be negative") - local val = shm:get(key) - assert.is_nil(val) + shm:flush_expired() + local err + v, err = shm:ttl(cache_key) + assert(v == nil, "still there") + assert.matches("not found", err) end) end)