This is an automated email from the ASF dual-hosted git repository. wenming pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/apisix.git
The following commit(s) were added to refs/heads/master by this push: new 53de201e9 fix: skipped failing bailedout tests in CI (#12462) 53de201e9 is described below commit 53de201e952135c2d80348921b99bbd07f048ddc Author: Ashish Tiwari <ashishjaitiwari15112...@gmail.com> AuthorDate: Mon Jul 28 07:44:54 2025 +0530 fix: skipped failing bailedout tests in CI (#12462) --- apisix/plugins/ai-drivers/openai-base.lua | 2 +- apisix/plugins/ai-request-rewrite.lua | 26 +++++--------------------- apisix/plugins/chaitin-waf.lua | 4 ++-- ci/common.sh | 9 +++++++++ ci/linux_openresty_common_runner.sh | 1 + ci/redhat-ci.sh | 1 + t/plugin/ai-proxy.openai-compatible.t | 19 +++++++++++++++++++ t/plugin/ai-rate-limiting.t | 25 ++----------------------- t/plugin/ai-request-rewrite2.t | 1 + t/plugin/ai3.t | 9 ++++++--- t/plugin/chaitin-waf-reject.t | 1 + 11 files changed, 48 insertions(+), 50 deletions(-) diff --git a/apisix/plugins/ai-drivers/openai-base.lua b/apisix/plugins/ai-drivers/openai-base.lua index 09134265e..b1414f388 100644 --- a/apisix/plugins/ai-drivers/openai-base.lua +++ b/apisix/plugins/ai-drivers/openai-base.lua @@ -170,7 +170,7 @@ function _M.request(self, ctx, conf, request_table, extra_opts) end httpc:set_timeout(conf.timeout) - local endpoint = extra_opts.endpoint + local endpoint = extra_opts and extra_opts.endpoint local parsed_url if endpoint then parsed_url = url.parse(endpoint) diff --git a/apisix/plugins/ai-request-rewrite.lua b/apisix/plugins/ai-request-rewrite.lua index 1b850eb7f..4ef5b5b39 100644 --- a/apisix/plugins/ai-request-rewrite.lua +++ b/apisix/plugins/ai-request-rewrite.lua @@ -129,18 +129,7 @@ local function request_to_llm(conf, request_table, ctx) model_options = conf.options } - local res, err, httpc = ai_driver:request(conf, request_table, extra_opts) - if err then - return nil, nil, err - end - - local resp_body, err = res:read_body() - httpc:close() - if err then - return nil, nil, err - end - - return res, resp_body + return ai_driver:request(ctx, conf, request_table, extra_opts) end @@ -206,20 +195,15 @@ function _M.access(conf, ctx) } -- Send request to LLM service - local res, resp_body, err = request_to_llm(conf, ai_request_table, ctx) - if err then - core.log.error("failed to request to LLM service: ", err) - return HTTP_INTERNAL_SERVER_ERROR - end - + local code, body = request_to_llm(conf, ai_request_table, ctx) -- Handle LLM response - if res.status > 299 then - core.log.error("LLM service returned error status: ", res.status) + if code > 299 then + core.log.error("LLM service returned error status: ", code) return HTTP_INTERNAL_SERVER_ERROR end -- Parse LLM response - local llm_response, err = parse_llm_response(resp_body) + local llm_response, err = parse_llm_response(body) if err then core.log.error("failed to parse LLM response: ", err) return HTTP_INTERNAL_SERVER_ERROR diff --git a/apisix/plugins/chaitin-waf.lua b/apisix/plugins/chaitin-waf.lua index 94fb87693..6c82c4a70 100644 --- a/apisix/plugins/chaitin-waf.lua +++ b/apisix/plugins/chaitin-waf.lua @@ -269,7 +269,7 @@ end local function get_conf(conf, metadata) local t = { - mode = "block", + mode = "monitor", real_client_ip = true, } @@ -324,7 +324,7 @@ local function do_access(conf, ctx) extra_headers[HEADER_CHAITIN_WAF_SERVER] = host - local mode = t.mode or "block" + local mode = t.mode or "monitor" if mode == "off" then extra_headers[HEADER_CHAITIN_WAF] = "off" return nil, nil, extra_headers diff --git a/ci/common.sh b/ci/common.sh index 3850460d8..c1eb9876a 100644 --- a/ci/common.sh +++ b/ci/common.sh @@ -66,6 +66,15 @@ rerun_flaky_tests() { FLUSH_ETCD=1 prove --timer -I./test-nginx/lib -I./ $(echo "$tests" | xargs) } +fail_on_bailout() { + local test_output_file="$1" + + # Check for bailout message in test output + if grep -q "Bailout called. Further testing stopped:" "$test_output_file"; then + echo "Error: Bailout detected in test output" + exit 1 + fi +} install_curl () { CURL_VERSION="8.13.0" wget -q https://github.com/stunnel/static-curl/releases/download/${CURL_VERSION}/curl-linux-x86_64-glibc-${CURL_VERSION}.tar.xz diff --git a/ci/linux_openresty_common_runner.sh b/ci/linux_openresty_common_runner.sh index afaf9488e..ac3d6c91f 100755 --- a/ci/linux_openresty_common_runner.sh +++ b/ci/linux_openresty_common_runner.sh @@ -99,6 +99,7 @@ script() { # APISIX_ENABLE_LUACOV=1 PERL5LIB=.:$PERL5LIB prove -Itest-nginx/lib -r t FLUSH_ETCD=1 TEST_EVENTS_MODULE=$TEST_EVENTS_MODULE prove --timer -Itest-nginx/lib -I./ -r $TEST_FILE_SUB_DIR | tee /tmp/test.result + fail_on_bailout /tmp/test.result rerun_flaky_tests /tmp/test.result } diff --git a/ci/redhat-ci.sh b/ci/redhat-ci.sh index 8cd63c871..c7b4f8fee 100755 --- a/ci/redhat-ci.sh +++ b/ci/redhat-ci.sh @@ -102,6 +102,7 @@ run_case() { set_coredns # run test cases FLUSH_ETCD=1 TEST_EVENTS_MODULE=$TEST_EVENTS_MODULE prove --timer -Itest-nginx/lib -I./ -r ${TEST_FILE_SUB_DIR} | tee /tmp/test.result + fail_on_bailout /tmp/test.result rerun_flaky_tests /tmp/test.result } diff --git a/t/plugin/ai-proxy.openai-compatible.t b/t/plugin/ai-proxy.openai-compatible.t index a5147648c..9168816fd 100644 --- a/t/plugin/ai-proxy.openai-compatible.t +++ b/t/plugin/ai-proxy.openai-compatible.t @@ -319,3 +319,22 @@ passed ngx.say(err) return end + + local final_res = {} + while true do + local chunk, err = res.body_reader() -- will read chunk by chunk + if err then + core.log.error("failed to read response chunk: ", err) + break + end + if not chunk then + break + end + core.table.insert_tail(final_res, chunk) + end + + ngx.print(#final_res .. final_res[6]) + } + } +--- response_body_like eval +qr/6data: \[DONE\]\n\n/ diff --git a/t/plugin/ai-rate-limiting.t b/t/plugin/ai-rate-limiting.t index 8ac6677e0..40fe8f1ff 100644 --- a/t/plugin/ai-rate-limiting.t +++ b/t/plugin/ai-rate-limiting.t @@ -891,19 +891,7 @@ passed ], "ssl_verify": false }, - "ai-rate-limiting": { - "instances": [ - { - "name": "openai-gpt3", - "limit": 50, - "time_window": 60 - }, - { - "name": "openai-gpt4", - "limit": 20, - "time_window": 60 - } - ] + "ai-rate-limiting": {"instances": [{"name": "openai-gpt3","limit": 50,"time_window": 60},{"name": "openai-gpt4","limit": 20,"time_window": 60}] } }, "upstream": { @@ -996,16 +984,7 @@ Authorization: Bearer token ], "ssl_verify": false }, - "ai-rate-limiting": { - "limit": 20, - "time_window": 60, - "instances": [ - { - "name": "openai-gpt3", - "limit": 50, - "time_window": 60 - } - ] + "ai-rate-limiting": {"limit": 20, "time_window": 60, "instances": [{"name": "openai-gpt3","limit": 50,"time_window": 60}] } }, "upstream": { diff --git a/t/plugin/ai-request-rewrite2.t b/t/plugin/ai-request-rewrite2.t index f066f214a..ee832df7f 100644 --- a/t/plugin/ai-request-rewrite2.t +++ b/t/plugin/ai-request-rewrite2.t @@ -189,6 +189,7 @@ override.endpoint is required for openai-compatible provider "uri": "/anything", "plugins": { "ai-proxy": { + "provider": "openai", "auth": { "query": { "api_key": "apikey" diff --git a/t/plugin/ai3.t b/t/plugin/ai3.t index b56d10235..2db7cdd77 100644 --- a/t/plugin/ai3.t +++ b/t/plugin/ai3.t @@ -119,6 +119,10 @@ use ai plane to match route === TEST 2: keep route cache as latest data # update the attributes that do not participate in the route cache key to ensure # that the route cache use the latest data +--- yaml_config +plugin_attr: + prometheus: + refresh_interval: 0.1 --- config location /t { content_by_lua_block { @@ -171,7 +175,7 @@ use ai plane to match route ngx.log(ngx.ERR, err) return end - + ngx.sleep(1) local metrics_uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/apisix/prometheus/metrics" local httpc = http.new() local res, err = httpc:request_uri(metrics_uri) @@ -180,7 +184,6 @@ use ai plane to match route ngx.log(ngx.ERR, err) return end - local m, err = ngx.re.match(res.body, "apisix_bandwidth{type=\"ingress\",route=\"foo\"", "jo") ngx.say(m[0]) @@ -204,7 +207,7 @@ use ai plane to match route ngx.log(ngx.ERR, err) return end - + ngx.sleep(1) local metrics_uri = "http://127.0.0.1:" .. ngx.var.server_port .. "/apisix/prometheus/metrics" local httpc = http.new() local res, err = httpc:request_uri(metrics_uri) diff --git a/t/plugin/chaitin-waf-reject.t b/t/plugin/chaitin-waf-reject.t index fcef07c48..97d2113af 100644 --- a/t/plugin/chaitin-waf-reject.t +++ b/t/plugin/chaitin-waf-reject.t @@ -74,6 +74,7 @@ __DATA__ local code, body = t('/apisix/admin/plugin_metadata/chaitin-waf', ngx.HTTP_PUT, [[{ + "mode": "block", "nodes": [ { "host": "127.0.0.1",