From 9929f52a6f0b4949b4867d8963df3843f4bceb45 Mon Sep 17 00:00:00 2001 From: levy liu Date: Fri, 10 Mar 2023 11:55:31 +0800 Subject: [PATCH 1/9] change: change the default router from radixtree uri to radixtree host uri --- conf/config-default.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conf/config-default.yaml b/conf/config-default.yaml index c043ce2de1ac..2765afe9c43f 100755 --- a/conf/config-default.yaml +++ b/conf/config-default.yaml @@ -75,7 +75,7 @@ apisix: # Turn this option on if you want to be compatible with servlet when matching URI path. normalize_uri_like_servlet: false router: - http: radixtree_uri # radixtree_uri: match route by uri(base on radixtree) + http: radixtree_host_uri # radixtree_uri: match route by uri(base on radixtree) # radixtree_host_uri: match route by host + uri(base on radixtree) # radixtree_uri_with_parameter: like radixtree_uri but match uri with parameters, # see https://github.com/api7/lua-resty-radixtree/#parameters-in-path for From fbd91a1d3251f9906e09e7a182f1950789e6cf19 Mon Sep 17 00:00:00 2001 From: levy liu Date: Fri, 10 Mar 2023 12:22:29 +0800 Subject: [PATCH 2/9] docs: add doc --- docs/en/latest/terminology/router.md | 10 ++++++++-- docs/zh/latest/terminology/router.md | 11 +++++++++-- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/docs/en/latest/terminology/router.md b/docs/en/latest/terminology/router.md index 804514f1282f..eb077743446e 100644 --- a/docs/en/latest/terminology/router.md +++ b/docs/en/latest/terminology/router.md @@ -38,13 +38,19 @@ A Router can have the following configurations: - `apisix.router.http`: The HTTP request route. It can take the following values: - - `radixtree_uri`: (Default) Only use the `uri` as the primary index. To learn more about the support for full and deep prefix matching, check [How to use router-radixtree](../router-radixtree.md). + - `radixtree_uri`: Only use the `uri` as the primary index. To learn more about the support for full and deep prefix matching, check [How to use router-radixtree](../router-radixtree.md). - `Absolute match`: Match completely with the given `uri` (`/foo/bar`, `/foo/glo`). - `Prefix match`: Match with the given prefix. Use `*` to represent the given `uri` for prefix matching. For example, `/foo*` can match with `/foo/`, `/foo/a` and `/foo/b`. - `match priority`: First try an absolute match, if it didn't match, try prefix matching. - `Any filter attribute`: This allows you to specify any Nginx built-in variable as a filter, such as URL request parameters, request headers, and cookies. - `radixtree_uri_with_parameter`: Like `radixtree_uri` but also supports parameter match. - - `radixtree_host_uri`: Matches both host and URI of the request. Use `host + uri` as the primary index (based on the `radixtree` engine). + - `radixtree_host_uri`: (Default) Matches both host and URI of the request. Use `host + uri` as the primary index (based on the `radixtree` engine). + +:::note + +In version 3.2 and earlier, APISIX used `radixtree_uri` as the default Router. `radixtree_uri` has better performance than `radixtree_host_uri`, so if you have higher performance requirements and can live with the fact that `radixtree_uri` only matches uri, consider continuing to use `radixtree_uri` as the default Router. + +::: - `apisix.router.ssl`: SSL loads the matching route. - `radixtree_sni`: (Default) Use `SNI` (Server Name Indication) as the primary index (based on the radixtree engine). diff --git a/docs/zh/latest/terminology/router.md b/docs/zh/latest/terminology/router.md index 3bfaf4256c52..f831ce1923f9 100644 --- a/docs/zh/latest/terminology/router.md +++ b/docs/zh/latest/terminology/router.md @@ -38,13 +38,20 @@ Router 具有以下配置: - `apisix.router.http`: HTTP 请求路由。 - - `radixtree_uri`:(默认)只使用 `uri` 作为主索引。基于 `radixtree` 引擎,支持全量和深前缀匹配,更多信息请参考[如何使用 router-radixtree](../../../en/latest/router-radixtree.md)。 + - `radixtree_uri`:只使用 `uri` 作为主索引。基于 `radixtree` 引擎,支持全量和深前缀匹配,更多信息请参考[如何使用 router-radixtree](../../../en/latest/router-radixtree.md)。 - `绝对匹配`:完整匹配给定的 `uri` ,比如 `/foo/bar`,`/foo/glo`。 - `前缀匹配`:末尾使用 `*` 代表给定的 `uri` 是前缀匹配。比如 `/foo*`,则允许匹配 `/foo/`、`/foo/a`和`/foo/b`等。 - `匹配优先级`:优先尝试绝对匹配,若无法命中绝对匹配,再尝试前缀匹配。 - `任意过滤属性`:允许指定任何 Nginx 内置变量作为过滤条件,比如 URL 请求参数、请求头、cookie 等。 - `radixtree_uri_with_parameter`:同 `radixtree_uri` 但额外有参数匹配的功能。 - - `radixtree_host_uri`:使用 `host + uri` 作为主索引(基于 `radixtree` 引擎),对当前请求会同时匹配 `host` 和 `uri`,支持的匹配条件与 `radixtree_uri` 基本一致。 + - `radixtree_host_uri`:(默认)使用 `host + uri` 作为主索引(基于 `radixtree` 引擎),对当前请求会同时匹配 `host` 和 `uri`,支持的匹配条件与 `radixtree_uri` 基本一致。 + +::: 注意 + +在3.2及之前版本,APISIX 使用 radixtree_uri 作为默认路由,radixtree_uri 比 radixtree_host_uri 拥有更好的性能,如果你对性能有更高的要求,并且能够接受 radixtree_uri 只匹配 uri 的特点,可以考虑继续使用 radixtree_uri 作为默认路由 + +::: + - `apisix.router.ssl`:SSL 加载匹配路由。 - `radixtree_sni`:(默认)使用 `SNI` (Server Name Indication) 作为主索引(基于 radixtree 引擎)。 From 22cf1cda4ef63705c2f7a490c745544764259e42 Mon Sep 17 00:00:00 2001 From: levy liu Date: Fri, 10 Mar 2023 14:31:27 +0800 Subject: [PATCH 3/9] test: fix test cases --- apisix/plugins/ai.lua | 1 + t/plugin/ai5.t | 16 ++++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/apisix/plugins/ai.lua b/apisix/plugins/ai.lua index 3195b4bae87f..b46249a9a190 100644 --- a/apisix/plugins/ai.lua +++ b/apisix/plugins/ai.lua @@ -163,6 +163,7 @@ local function routes_analyze(routes) local route_flags = core.table.new(0, 16) local route_up_flags = core.table.new(0, 12) + for _, route in ipairs(routes) do if type(route) == "table" then for key, value in pairs(route.value) do diff --git a/t/plugin/ai5.t b/t/plugin/ai5.t index 24282c4cbbfd..93ee8bbd3bd8 100644 --- a/t/plugin/ai5.t +++ b/t/plugin/ai5.t @@ -171,11 +171,11 @@ done qr/route match mode: \S[^,]+/ --- grep_error_log_out route match mode: ai_match -route match mode: radixtree_uri -route match mode: radixtree_uri -route match mode: radixtree_uri +route match mode: radixtree_host_uri +route match mode: radixtree_host_uri +route match mode: radixtree_host_uri route match mode: ai_match -route match mode: radixtree_uri +route match mode: radixtree_host_uri @@ -263,8 +263,8 @@ done --- grep_error_log eval qr/route match mode: \S[^,]+/ --- grep_error_log_out -route match mode: radixtree_uri -route match mode: radixtree_uri +route match mode: radixtree_host_uri +route match mode: radixtree_host_uri route match mode: ai_match -route match mode: radixtree_uri -route match mode: radixtree_uri +route match mode: radixtree_host_uri +route match mode: radixtree_host_uri From b49c85a80f25b35b550b74768250cb3053f8cabb Mon Sep 17 00:00:00 2001 From: levy liu Date: Fri, 10 Mar 2023 14:37:38 +0800 Subject: [PATCH 4/9] debug --- .github/workflows/build.yml | 33 +---- .github/workflows/centos7-ci.yml | 160 ------------------------ .github/workflows/chaos.yml | 88 -------------- .github/workflows/cli-master.yml | 58 --------- .github/workflows/cli.yml | 66 ---------- .github/workflows/code-lint.yml | 48 -------- .github/workflows/doc-lint.yml | 54 --------- .github/workflows/fips.yml | 152 ----------------------- .github/workflows/fuzzing-ci.yaml | 78 ------------ .github/workflows/gm-cron.yaml | 167 -------------------------- .github/workflows/gm.yml | 92 -------------- .github/workflows/kubernetes-ci.yml | 80 ------------ .github/workflows/license-checker.yml | 37 ------ .github/workflows/lint.yml | 46 ------- .github/workflows/performance.yml | 65 ---------- .github/workflows/semantic.yml | 35 ------ .github/workflows/stale.yml | 52 -------- .github/workflows/tars-ci.yml | 56 --------- 18 files changed, 6 insertions(+), 1361 deletions(-) delete mode 100644 .github/workflows/centos7-ci.yml delete mode 100644 .github/workflows/chaos.yml delete mode 100644 .github/workflows/cli-master.yml delete mode 100644 .github/workflows/cli.yml delete mode 100644 .github/workflows/code-lint.yml delete mode 100644 .github/workflows/doc-lint.yml delete mode 100644 .github/workflows/fips.yml delete mode 100644 .github/workflows/fuzzing-ci.yaml delete mode 100644 .github/workflows/gm-cron.yaml delete mode 100644 .github/workflows/gm.yml delete mode 100644 .github/workflows/kubernetes-ci.yml delete mode 100644 .github/workflows/license-checker.yml delete mode 100644 .github/workflows/lint.yml delete mode 100644 .github/workflows/performance.yml delete mode 100644 .github/workflows/semantic.yml delete mode 100644 .github/workflows/stale.yml delete mode 100644 .github/workflows/tars-ci.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b143a2607f16..df1852a52026 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,12 +28,8 @@ jobs: - ubuntu-20.04 os_name: - linux_openresty - - linux_openresty_1_19 test_dir: - - t/plugin/[a-k]* - - t/plugin/[l-z]* - - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/deployment t/discovery t/error_page t/misc - - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/wasm t/xds-library t/xrpc + - t/plugin/a* runs-on: ${{ matrix.platform }} timeout-minutes: 90 @@ -129,28 +125,6 @@ jobs: make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after echo "Linux launch services, done." - - name: Start Dubbo Backend - if: matrix.os_name == 'linux_openresty' && steps.test_env.outputs.type == 'plugin' - run: | - sudo apt install -y maven - cd t/lib/dubbo-backend - mvn package - cd dubbo-backend-provider/target - java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & - - - name: Build xDS library - if: steps.test_env.outputs.type == 'last' - run: | - cd t/xds-library - go build -o libxds.so -buildmode=c-shared main.go export.go - - - name: Build wasm code - if: matrix.os_name == 'linux_openresty' && steps.test_env.outputs.type == 'last' - run: | - export TINYGO_VER=0.20.0 - wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VER}/tinygo_${TINYGO_VER}_amd64.deb 2>/dev/null - sudo dpkg -i tinygo_${TINYGO_VER}_amd64.deb - cd t/wasm && find . -type f -name "*.go" | xargs -Ip tinygo build -o p.wasm -scheduler=none -target=wasi p - name: Linux Before install run: sudo ./ci/${{ matrix.os_name }}_runner.sh before_install @@ -173,3 +147,8 @@ jobs: echo "start backing up, $(date)" bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} echo "backup done, $(date)" + + - name: Run debugger + if: ${{ failure() }} + uses: mxschmitt/action-tmate@v3 + timeout-minutes: 15 diff --git a/.github/workflows/centos7-ci.yml b/.github/workflows/centos7-ci.yml deleted file mode 100644 index 60ea0a15db7c..000000000000 --- a/.github/workflows/centos7-ci.yml +++ /dev/null @@ -1,160 +0,0 @@ -name: CI Centos7 - -on: - push: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - test_apisix: - name: run ci on centos7 - runs-on: ubuntu-20.04 - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - test_dir: - - t/plugin/[a-k]* - - t/plugin/[l-z]* - - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/deployment t/discovery t/error_page t/misc - - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/wasm t/xds-library - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-centos7-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Extract branch name - if: ${{ startsWith(github.ref, 'refs/heads/release/') }} - id: branch_env - shell: bash - run: | - echo "version=${GITHUB_REF##*/}" >>$GITHUB_OUTPUT - - - name: Extract test type - shell: bash - id: test_env - run: | - test_dir="${{ matrix.test_dir }}" - if [[ $test_dir =~ 't/plugin' ]]; then - echo "type=plugin" >>$GITHUB_OUTPUT - fi - if [[ $test_dir =~ 't/admin ' ]]; then - echo "type=first" >>$GITHUB_OUTPUT - fi - if [[ $test_dir =~ ' t/xds-library' ]]; then - echo "type=last" >>$GITHUB_OUTPUT - fi - - - name: Linux launch common services - run: | - make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml - sudo ./ci/init-common-test-service.sh - - - name: Build rpm package - if: ${{ startsWith(github.ref, 'refs/heads/release/') }} - run: | - export VERSION=${{ steps.branch_env.outputs.version }} - sudo gem install --no-document fpm - git clone --depth 1 https://github.com/api7/apisix-build-tools.git - - # move codes under build tool - mkdir ./apisix-build-tools/apisix - for dir in `ls|grep -v "^apisix-build-tools$"`;do cp -r $dir ./apisix-build-tools/apisix/;done - - cd apisix-build-tools - make package type=rpm app=apisix version=${VERSION} checkout=release/${VERSION} image_base=centos image_tag=7 local_code_path=./apisix - cd .. - rm -rf $(ls -1 --ignore=apisix-build-tools --ignore=t --ignore=utils --ignore=ci --ignore=Makefile --ignore=rockspec) - - - name: Build xDS library - if: steps.test_env.outputs.type == 'last' - run: | - cd t/xds-library - go build -o libxds.so -buildmode=c-shared main.go export.go - - - name: Run centos7 docker and mapping apisix into container - env: - TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} - run: | - docker run -itd -v ${{ github.workspace }}:/apisix --env TEST_FILE_SUB_DIR="$TEST_FILE_SUB_DIR" --name centos7Instance --net="host" --dns 8.8.8.8 --dns-search apache.org docker.io/centos:7 /bin/bash - # docker exec centos7Instance bash -c "cp -r /tmp/apisix ./" - - - name: Cache images - id: cache-images - uses: actions/cache@v3 - env: - cache-name: cache-apisix-docker-images - with: - path: docker-images-backup - key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} - - - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} - name: Load saved docker images - run: | - if [[ -f docker-images-backup/apisix-images.tar ]]; then - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before - docker load --input docker-images-backup/apisix-images.tar - rm docker-images-backup/apisix-images.tar - make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml - echo "loaded docker images" - if [[ ${{ steps.test_env.outputs.type }} != first ]]; then - sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after - fi - fi - - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} - name: Linux launch services - run: | - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before - [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh - make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after - echo "Linux launch services, done." - - - name: Install dependencies - run: | - docker exec centos7Instance bash -c "cd apisix && ./ci/centos7-ci.sh install_dependencies" - - - name: Install rpm package - if: ${{ startsWith(github.ref, 'refs/heads/release/') }} - run: | - docker exec centos7Instance bash -c "cd apisix && rpm -iv --prefix=/apisix ./apisix-build-tools/output/apisix-${{ steps.branch_env.outputs.version }}-0.el7.x86_64.rpm" - # Dependencies are attached with rpm, so revert `make deps` - docker exec centos7Instance bash -c "cd apisix && rm -rf deps" - docker exec centos7Instance bash -c "cd apisix && mv usr/bin . && mv usr/local/apisix/* ." - - - name: Run test cases - run: | - docker exec centos7Instance bash -c "cd apisix && ./ci/centos7-ci.sh run_case" - - - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} - name: Save docker images - run: | - # free disk space - bash ./ci/free_disk_space.sh - echo "start backing up, $(date)" - bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} - echo "backup done, $(date)" diff --git a/.github/workflows/chaos.yml b/.github/workflows/chaos.yml deleted file mode 100644 index 1ea77618000f..000000000000 --- a/.github/workflows/chaos.yml +++ /dev/null @@ -1,88 +0,0 @@ -name: Chaos Test - -on: - pull_request: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - chaos-test: - runs-on: ubuntu-latest - timeout-minutes: 35 - steps: - - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Setup go - uses: actions/setup-go@v3 - with: - go-version: "1.17" - - - uses: actions/cache@v3 - with: - path: | - ~/.cache/go-build - ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - - name: Creating minikube cluster - run: | - bash ./t/chaos/utils/setup_chaos_utils.sh start_minikube - mkdir logs - docker build -t apache/apisix:alpine-local --build-arg APISIX_PATH=. -f ./t/chaos/utils/Dockerfile . - minikube cache add apache/apisix:alpine-local -v 7 --alsologtostderr - - - name: Print cluster information - run: | - kubectl config view - kubectl cluster-info - kubectl get nodes - kubectl get pods -n kube-system - kubectl version - - - name: Deploy etcd with Helm - run: | - curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash - helm repo add bitnami https://charts.bitnami.com/bitnami - helm install etcd bitnami/etcd --set replicaCount=3 --set auth.rbac.create=false - kubectl wait pods -l app.kubernetes.io/instance=etcd --for=condition=Ready --timeout=300s --all - - - - name: Deploy APISIX - run: | - bash ./t/chaos/utils/setup_chaos_utils.sh modify_config - kubectl create configmap apisix-gw-config.yaml --from-file=./conf/config.yaml - kubectl apply -f ./t/chaos/kubernetes/deployment.yaml - kubectl apply -f ./t/chaos/kubernetes/service.yaml - kubectl wait pods -l app=apisix-gw --for=condition=Ready --timeout=300s \ - || (kubectl logs -l app=apisix-gw && exit 1) - kubectl apply -f https://raw.githubusercontent.com/istio/istio/master/samples/httpbin/httpbin.yaml - kubectl wait pods -l app=httpbin --for=condition=Ready --timeout=300s \ - || (kubectl logs -l app=httpbin && exit 1) - bash ./t/chaos/utils/setup_chaos_utils.sh port_forward - - - name: Deploy Chaos Mesh - run: | - curl -sSL https://mirrors.chaos-mesh.org/v2.0.1/install.sh | bash - - - - name: Install Ginkgo - run: | - go get -u github.com/onsi/ginkgo/ginkgo - sudo cp ~/go/bin/ginkgo /usr/local/bin - - - name: Run test - working-directory: ./t/chaos - run: ginkgo -r --v --progress --trace diff --git a/.github/workflows/cli-master.yml b/.github/workflows/cli-master.yml deleted file mode 100644 index dd77dcd1537c..000000000000 --- a/.github/workflows/cli-master.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: CLI Test (master) - -on: - push: - branches: [master] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [master] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - build: - strategy: - fail-fast: false - matrix: - job_name: - - linux_apisix_master_luarocks - runs-on: ubuntu-20.04 - timeout-minutes: 15 - env: - OPENRESTY_VERSION: default - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.job_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Linux launch common services - run: | - project_compose_ci=ci/pod/docker-compose.common.yml make ci-env-up - - - name: Linux Install - run: | - sudo --preserve-env=OPENRESTY_VERSION \ - ./ci/${{ matrix.job_name }}_runner.sh do_install - - - name: Linux Script - run: sudo ./ci/${{ matrix.job_name }}_runner.sh script diff --git a/.github/workflows/cli.yml b/.github/workflows/cli.yml deleted file mode 100644 index 2f76670a0611..000000000000 --- a/.github/workflows/cli.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: CLI Test - -on: - push: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - build: - strategy: - fail-fast: false - matrix: - platform: - - ubuntu-20.04 - job_name: - - linux_apisix_current_luarocks - - linux_apisix_current_luarocks_in_customed_nginx - - runs-on: ${{ matrix.platform }} - timeout-minutes: 15 - env: - SERVER_NAME: ${{ matrix.job_name }} - OPENRESTY_VERSION: default - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.job_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Linux launch common services - run: | - project_compose_ci=ci/pod/docker-compose.common.yml make ci-env-up - - - name: Linux Before install - run: sudo ./ci/${{ matrix.job_name }}_runner.sh before_install - - - name: Linux Install - run: | - sudo --preserve-env=OPENRESTY_VERSION \ - ./ci/${{ matrix.job_name }}_runner.sh do_install - - - name: Linux Script - run: sudo ./ci/${{ matrix.job_name }}_runner.sh script diff --git a/.github/workflows/code-lint.yml b/.github/workflows/code-lint.yml deleted file mode 100644 index 07a1807f811e..000000000000 --- a/.github/workflows/code-lint.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Code Lint - -on: - pull_request: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - -permissions: - contents: read - -jobs: - lint: - runs-on: ubuntu-latest - timeout-minutes: 10 - steps: - - uses: actions/checkout@v3.2.0 - - name: Install - run: | - . ./ci/common.sh - export_or_prefix - export OPENRESTY_VERSION=default - - ./ci/linux-install-openresty.sh - ./utils/linux-install-luarocks.sh - sudo luarocks install luacheck - - - name: Script - run: | - . ./ci/common.sh - export_or_prefix - make lint - - sc-lint: - runs-on: ubuntu-latest - timeout-minutes: 5 - steps: - - name: Checkout code - uses: actions/checkout@v3.2.0 - - - name: Shellcheck code - run: | - scversion="latest" - wget -qO- "https://github.com/koalaman/shellcheck/releases/download/${scversion?}/shellcheck-${scversion?}.linux.x86_64.tar.xz" | tar -xJv - cp -av "shellcheck-${scversion}/shellcheck" /usr/local/bin/ - shellcheck --version - git ls-files -- "*.sh" | xargs -t shellcheck diff --git a/.github/workflows/doc-lint.yml b/.github/workflows/doc-lint.yml deleted file mode 100644 index 7a8b19904def..000000000000 --- a/.github/workflows/doc-lint.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Doc Lint - -on: - push: - paths: - - "docs/**" - - "**/*.md" - pull_request: - branches: [master, "release/**"] - paths: - - "docs/**" - - "**/*.md" - -permissions: - contents: read - -jobs: - markdownlint: - name: 🍇 Markdown - runs-on: ubuntu-latest - timeout-minutes: 1 - steps: - - uses: actions/checkout@v3.2.0 - - name: 🚀 Use Node.js - uses: actions/setup-node@v3.5.1 - with: - node-version: "12.x" - - run: npm install -g markdownlint-cli@0.25.0 - - run: markdownlint '**/*.md' - - name: check category - run: | - ./utils/check-category.py - - name: check Chinese doc - run: | - sudo pip3 install zhon - ./utils/fix-zh-doc-segment.py > \ - /tmp/check.log 2>&1 || (cat /tmp/check.log && exit 1) - if grep "find broken newline in file: " /tmp/check.log; then - cat /tmp/check.log - echo "Newline can't appear in the middle of Chinese sentences." - echo "You need to run ./utils/fix-zh-doc-segment.py to fix them." - exit 1 - fi - - Chinse-Copywriting-lint: - name: Chinese Copywriting - runs-on: ubuntu-latest - timeout-minutes: 1 - steps: - - uses: actions/checkout@v3 - - name: Check Chinese copywriting - uses: huacnlee/autocorrect-action@v2.6.2 - with: - args: --lint --no-diff-bg-color docs/zh/latest/**.md diff --git a/.github/workflows/fips.yml b/.github/workflows/fips.yml deleted file mode 100644 index 854417503cfd..000000000000 --- a/.github/workflows/fips.yml +++ /dev/null @@ -1,152 +0,0 @@ -name: CI FIPS - -on: - push: - branches: [master] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [master] - paths-ignore: - - 'docs/**' - - '**/*.md' - -permissions: - contents: read - -jobs: - build: - strategy: - fail-fast: false - matrix: - platform: - - ubuntu-20.04 - os_name: - - linux_openresty - test_dir: - # all plugins only use three parts of openssl API: RSA via ffi, SHA via ffi and SSL API wrapped by nginx. - # The latter one is already covered by the core tests, so no need to repeat it in plugin tests. - # The RSA and SHA tests are fully covered by jwt-auth and hmac-auth plugin tests, while other plugins only repeat such tests. - - t/plugin/jwt-auth2.t t/plugin/jwt-auth.t t/plugin/hmac-auth.t - # all SSL related core tests are covered by below two lists. - - t/admin/ssl* t/admin/schema.t t/admin/upstream.t t/config-center-yaml/ssl.t t/core/etcd-mtls.t t/core/config_etcd.t t/deployment/conf_server.t t/misc/patch.t - - t/node/grpc-proxy-unary.t t/node/upstream-keepalive-pool.t t/node/upstream-websocket.t t/node/client-mtls.t t/node/upstream-mtls.t t/pubsub/kafka.t t/router/radixtree-sni2.t t/router/multi-ssl-certs.t t/router/radixtree-sni.t t/stream-node/mtls.t t/stream-node/tls.t t/stream-node/upstream-tls.t t/stream-node/sni.t - - runs-on: ${{ matrix.platform }} - timeout-minutes: 90 - env: - SERVER_NAME: ${{ matrix.os_name }} - OPENRESTY_VERSION: default - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Cache openssl-3.0 compilation - id: cache-openssl - uses: actions/cache@v3 - env: - cache-name: cache-openssl - with: - path: ~/openssl-3.0 - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }} - - - name: Toggle openssl compile - id: test_ssl_env - shell: bash - if: steps.cache-openssl.outputs.cache-hit != 'true' - run: | - echo "openssl3=yes" >>$GITHUB_OUTPUT - echo "openssl3_prefix=$HOME" >>$GITHUB_OUTPUT - - - name: Extract test type - shell: bash - id: test_env - run: | - test_dir="${{ matrix.test_dir }}" - if [[ $test_dir =~ 't/plugin' ]]; then - echo "type=plugin" >>$GITHUB_OUTPUT - fi - if [[ $test_dir =~ 't/admin' ]]; then - echo "type=first" >>$GITHUB_OUTPUT - fi - if [[ $test_dir =~ 't/node' ]]; then - echo "type=last" >>$GITHUB_OUTPUT - fi - - - name: Linux launch common services - run: | - make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml - - - name: Cache images - id: cache-images - uses: actions/cache@v3 - env: - cache-name: cache-apisix-docker-images - with: - path: docker-images-backup - key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} - - - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} - name: Load saved docker images - run: | - if [[ -f docker-images-backup/apisix-images.tar ]]; then - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before - docker load --input docker-images-backup/apisix-images.tar - rm docker-images-backup/apisix-images.tar - make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml - echo "loaded docker images" - if [[ ${{ steps.test_env.outputs.type }} != first ]]; then - sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after - fi - fi - - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} - name: Linux launch services - run: | - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before - [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh - make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml - echo "make ci-env-up, done" - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after - echo "Linux launch services, done" - - - name: Linux Before install - run: sudo ./ci/${{ matrix.os_name }}_runner.sh before_install - - - name: Linux Install - env: - COMPILE_OPENSSL3: ${{ steps.test_ssl_env.outputs.openssl3 }} - OPENSSL3_PREFIX: ${{ steps.test_ssl_env.outputs.openssl3_prefix }} - USE_OPENSSL3: yes - run: | - sudo --preserve-env=OPENRESTY_VERSION \ - --preserve-env=COMPILE_OPENSSL3 \ - --preserve-env=OPENSSL3_PREFIX \ - --preserve-env=USE_OPENSSL3 \ - ./ci/${{ matrix.os_name }}_runner.sh do_install - - - name: Linux Script - env: - OPENSSL_FIPS: yes - TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} - run: sudo -E ./ci/${{ matrix.os_name }}_runner.sh script - - - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} - name: Save docker images - run: | - # free disk space - bash ./ci/free_disk_space.sh - echo "start backing up, $(date)" - bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} - echo "backup done, $(date)" diff --git a/.github/workflows/fuzzing-ci.yaml b/.github/workflows/fuzzing-ci.yaml deleted file mode 100644 index ec3701532d77..000000000000 --- a/.github/workflows/fuzzing-ci.yaml +++ /dev/null @@ -1,78 +0,0 @@ -name: fuzzing - -on: - push: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - test_apisix: - name: run fuzzing - runs-on: ubuntu-latest - timeout-minutes: 30 - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Linux launch common services - run: | - project_compose_ci=ci/pod/docker-compose.common.yml make ci-env-up - - - name: run apisix - run: | - wget -qO - https://openresty.org/package/pubkey.gpg | sudo apt-key add - - sudo apt-get update - sudo apt-get -y install software-properties-common - sudo add-apt-repository -y "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main" - sudo apt-get update - sudo apt-get install -y git openresty curl openresty-openssl111-dev unzip make gcc libldap2-dev - ./utils/linux-install-luarocks.sh - - make deps - make init - make run - - - name: run upstream - run: | - sudo openresty -c $PWD/t/fuzzing/upstream/nginx.conf - - - name: install boofuzz - run: | - # Avoid "ERROR: flask has requirement click>=8.0, but you'll have click 7.0 which is incompatible" - sudo apt remove python3-click - pip install -r $PWD/t/fuzzing/requirements.txt - - - name: run tests - run: | - export APISIX_FUZZING_PWD=$PWD - python $PWD/t/fuzzing/simpleroute_test.py - python $PWD/t/fuzzing/serverless_route_test.py - python $PWD/t/fuzzing/vars_route_test.py - python $PWD/t/fuzzing/client_abort.py - python $PWD/t/fuzzing/simple_http.py - python $PWD/t/fuzzing/http_upstream.py diff --git a/.github/workflows/gm-cron.yaml b/.github/workflows/gm-cron.yaml deleted file mode 100644 index 9acb63dbbd59..000000000000 --- a/.github/workflows/gm-cron.yaml +++ /dev/null @@ -1,167 +0,0 @@ -name: CI GM (cron) - -on: - schedule: - # UTC 7:30 every Friday - - cron: "30 7 * * 5" - -permissions: - contents: read - -jobs: - build: - strategy: - fail-fast: false - matrix: - platform: - - ubuntu-20.04 - os_name: - - linux_openresty_tongsuo - test_dir: - - t/plugin/[a-k]* - - t/plugin/[l-z]* - - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/deployment t/discovery t/error_page t/misc - - t/node t/pubsub t/router t/script t/stream-node t/utils t/wasm t/xds-library t/xrpc - - runs-on: ${{ matrix.platform }} - timeout-minutes: 90 - env: - SERVER_NAME: ${{ matrix.os_name }} - OPENRESTY_VERSION: default - - # TODO: refactor the workflows to reduce duplicate parts. Maybe we can write them in shell - # scripts or a separate action? - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Setup Go - uses: actions/setup-go@v3 - with: - go-version: "1.17" - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Cache Tongsuo compilation - id: cache-tongsuo - uses: actions/cache@v3 - env: - cache-name: cache-tongsuo - with: - path: ./tongsuo - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-tongsuo-ver - - - name: Test SSL Env - id: test_ssl_env - shell: bash - if: steps.cache-tongsuo.outputs.cache-hit != 'true' - run: | - echo "compile_tongsuo=true" >>$GITHUB_OUTPUT - - - name: Extract test type - shell: bash - id: test_env - run: | - test_dir="${{ matrix.test_dir }}" - if [[ $test_dir =~ 't/plugin' ]]; then - echo "type=plugin" >>$GITHUB_OUTPUT - fi - if [[ $test_dir =~ 't/admin ' ]]; then - echo "type=first" >>$GITHUB_OUTPUT - fi - if [[ $test_dir =~ ' t/xrpc' ]]; then - echo "type=last" >>$GITHUB_OUTPUT - fi - - - name: Linux launch common services - run: | - make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml - sudo ./ci/init-common-test-service.sh - - - name: Cache images - id: cache-images - uses: actions/cache@v3 - env: - cache-name: cache-apisix-docker-images - with: - path: docker-images-backup - key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} - - - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} - name: Load saved docker images - run: | - if [[ -f docker-images-backup/apisix-images.tar ]]; then - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before - docker load --input docker-images-backup/apisix-images.tar - rm docker-images-backup/apisix-images.tar - make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml - echo "loaded docker images" - if [[ ${{ steps.test_env.outputs.type }} != first ]]; then - sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after - fi - fi - - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} - name: Linux launch services - run: | - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before - [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh - make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml - [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after - echo "Linux launch services, done." - - - name: Start Dubbo Backend - if: steps.test_env.outputs.type == 'plugin' - run: | - sudo apt install -y maven - cd t/lib/dubbo-backend - mvn package - cd dubbo-backend-provider/target - java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & - - - name: Build xDS library - if: steps.test_env.outputs.type == 'last' - run: | - cd t/xds-library - go build -o libxds.so -buildmode=c-shared main.go export.go - - - name: Build wasm code - if: steps.test_env.outputs.type == 'last' - run: | - export TINYGO_VER=0.20.0 - wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VER}/tinygo_${TINYGO_VER}_amd64.deb 2>/dev/null - sudo dpkg -i tinygo_${TINYGO_VER}_amd64.deb - cd t/wasm && find . -type f -name "*.go" | xargs -Ip tinygo build -o p.wasm -scheduler=none -target=wasi p - - - name: Linux Before install - env: - COMPILE_TONGSUO: ${{ steps.test_ssl_env.outputs.compile_tongsuo }} - run: | - sudo --preserve-env=COMPILE_TONGSUO \ - ./ci/${{ matrix.os_name }}_runner.sh before_install - - - name: Linux Install - run: | - sudo --preserve-env=OPENRESTY_VERSION \ - ./ci/${{ matrix.os_name }}_runner.sh do_install - - - name: Linux Script - env: - TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} - run: sudo -E ./ci/${{ matrix.os_name }}_runner.sh script - - - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} - name: Save docker images - run: | - # free disk space - bash ./ci/free_disk_space.sh - echo "start backing up, $(date)" - bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} - echo "backup done, $(date)" diff --git a/.github/workflows/gm.yml b/.github/workflows/gm.yml deleted file mode 100644 index 06663006e942..000000000000 --- a/.github/workflows/gm.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: CI GM - -on: - push: - branches: [master] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [master] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - build: - strategy: - fail-fast: false - matrix: - platform: - - ubuntu-20.04 - os_name: - - linux_openresty_tongsuo - test_dir: - - t/gm - - runs-on: ${{ matrix.platform }} - timeout-minutes: 90 - env: - SERVER_NAME: ${{ matrix.os_name }} - OPENRESTY_VERSION: default - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Cache Tongsuo compilation - id: cache-tongsuo - uses: actions/cache@v3 - env: - cache-name: cache-tongsuo - with: - path: ./tongsuo - # TODO: use a fixed release once they have created one. - # See https://github.com/Tongsuo-Project/Tongsuo/issues/318 - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-tongsuo-ver - - - name: Test SSL Env - id: test_ssl_env - shell: bash - if: steps.cache-tongsuo.outputs.cache-hit != 'true' - run: | - echo "compile_tongsuo=true" >>$GITHUB_OUTPUT - - - name: Linux launch common services - run: | - make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml - - - name: Linux Before install - env: - COMPILE_TONGSUO: ${{ steps.test_ssl_env.outputs.compile_tongsuo }} - run: | - sudo --preserve-env=COMPILE_TONGSUO \ - ./ci/${{ matrix.os_name }}_runner.sh before_install - - - name: Linux Do install - run: | - sudo --preserve-env=OPENRESTY_VERSION \ - ./ci/${{ matrix.os_name }}_runner.sh do_install - - - name: Linux Script - env: - TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} - run: | - sudo -E ./ci/${{ matrix.os_name }}_runner.sh script diff --git a/.github/workflows/kubernetes-ci.yml b/.github/workflows/kubernetes-ci.yml deleted file mode 100644 index ea72fe57144c..000000000000 --- a/.github/workflows/kubernetes-ci.yml +++ /dev/null @@ -1,80 +0,0 @@ -name: CI Kubernetes - -on: - push: - branches: [ master, 'release/**' ] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [ master, 'release/**' ] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - kubernetes-discovery: - strategy: - fail-fast: false - matrix: - platform: - - ubuntu-20.04 - os_name: - - linux_openresty - - linux_openresty_1_19 - - runs-on: ${{ matrix.platform }} - timeout-minutes: 15 - env: - SERVER_NAME: ${{ matrix.os_name }} - OPENRESTY_VERSION: default - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Setup kubernetes cluster - run: | - KUBERNETES_VERSION="v1.22.7" - - kind create cluster --name apisix-test --config ./t/kubernetes/configs/kind.yaml --image kindest/node:${KUBERNETES_VERSION} - - kubectl wait --for=condition=Ready nodes --all --timeout=180s - - kubectl apply -f ./t/kubernetes/configs/account.yaml - - kubectl apply -f ./t/kubernetes/configs/endpoint.yaml - - KUBERNETES_CLIENT_TOKEN_CONTENT=$(kubectl get secrets | grep apisix-test | awk '{system("kubectl get secret -o jsonpath={.data.token} "$1" | base64 --decode")}') - - KUBERNETES_CLIENT_TOKEN_DIR="/tmp/var/run/secrets/kubernetes.io/serviceaccount" - - KUBERNETES_CLIENT_TOKEN_FILE=${KUBERNETES_CLIENT_TOKEN_DIR}/token - - mkdir -p ${KUBERNETES_CLIENT_TOKEN_DIR} - echo -n "$KUBERNETES_CLIENT_TOKEN_CONTENT" > ${KUBERNETES_CLIENT_TOKEN_FILE} - - echo 'KUBERNETES_SERVICE_HOST=127.0.0.1' - echo 'KUBERNETES_SERVICE_PORT=6443' - echo 'KUBERNETES_CLIENT_TOKEN='"${KUBERNETES_CLIENT_TOKEN_CONTENT}" - echo 'KUBERNETES_CLIENT_TOKEN_FILE='${KUBERNETES_CLIENT_TOKEN_FILE} - - kubectl proxy -p 6445 & - - - name: Linux Install - run: | - sudo ./ci/${{ matrix.os_name }}_runner.sh before_install - sudo --preserve-env=OPENRESTY_VERSION ./ci/${{ matrix.os_name }}_runner.sh do_install - - - name: Run test cases - run: | - ./ci/kubernetes-ci.sh run_case diff --git a/.github/workflows/license-checker.yml b/.github/workflows/license-checker.yml deleted file mode 100644 index 2122e0db8fbc..000000000000 --- a/.github/workflows/license-checker.yml +++ /dev/null @@ -1,37 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# -name: License checker - -on: - push: - branches: [master, 'release/**'] - pull_request: - branches: [master, 'release/**'] - -jobs: - check-license: - runs-on: ubuntu-latest - timeout-minutes: 3 - - steps: - - uses: actions/checkout@v3.2.0 - - name: Check License Header - uses: apache/skywalking-eyes@v0.4.0 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 66f898b4201b..000000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: ❄️ Lint - -on: [push, pull_request] - -permissions: - contents: read - -jobs: - misc: - name: misc checker - runs-on: ubuntu-latest - steps: - - name: Check out code. - uses: actions/checkout@v3.2.0 - - name: spell check - run: | - pip install codespell==2.1.0 - # codespell considers some repo name in go.sum are misspelled - git grep --cached -l '' | grep -v go.sum |xargs codespell --ignore-words=.ignore_words - - name: Merge conflict - run: | - bash ./utils/check-merge-conflict.sh - - name: Plugin Code - run: | - bash ./utils/check-plugins-code.sh - - ci-eclint: - runs-on: ubuntu-latest - timeout-minutes: 5 - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - - - name: Setup Nodejs env - uses: actions/setup-node@v3.5.1 - with: - node-version: '12' - - - name: Install eclint - run: | - sudo npm install -g eclint - - - name: Run eclint - run: | - eclint check diff --git a/.github/workflows/performance.yml b/.github/workflows/performance.yml deleted file mode 100644 index c054303ccd74..000000000000 --- a/.github/workflows/performance.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: Performance Test - -on: - pull_request: - branches: [master, 'release/**'] - paths-ignore: - - 'docs/**' - - '**/*.md' - -permissions: - contents: read - -jobs: - performance: - if: ${{ startsWith(github.event.pull_request.title, 'perf:') }} - runs-on: ubuntu-20.04 - timeout-minutes: 45 - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Cache deps - uses: actions/cache@v3 - env: - cache-name: cache-deps - with: - path: deps - key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} - - - name: Install Dependencies - run: sudo ./ci/performance_test.sh install_dependencies - - - name: Install wrk2 - run: sudo ./ci/performance_test.sh install_wrk2 - - - name: Install SystemTap Tools - run: sudo ./ci/performance_test.sh install_stap_tools - - - name: Perf Test - run: ./ci/performance_test.sh run_performance_test - - - name: Upload Performance Test Result - uses: actions/upload-artifact@v3 - with: - name: perf.txt - path: | - output/performance.txt - retention-days: 3 - - - name: Upload flamegrpah - uses: actions/upload-artifact@v3 - with: - name: flamegraph.svg - path: | - output/flamegraph.svg - retention-days: 3 - -# you can view the generated flamegraph by -# 1. open https://github.com/apache/apisix/actions -# 2. click the "Performance Test" button -# 3. choose the workflows that belong to your commits -# we need a way to have this address automatically appear in the comments of the PR running this workflow. diff --git a/.github/workflows/semantic.yml b/.github/workflows/semantic.yml deleted file mode 100644 index a2b606667fad..000000000000 --- a/.github/workflows/semantic.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: "PR Lint" - -on: - pull_request_target: - types: - - opened - - edited - - synchronize - -jobs: - main: - name: Validate PR title - runs-on: ubuntu-latest - steps: - - name: Check out repository code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - uses: ./.github/actions/action-semantic-pull-request - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - types: | - feat - fix - docs - style - refactor - perf - test - build - ci - chore - revert - change diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index 4f751e7e962f..000000000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Stable Test - -on: - workflow_dispatch: - schedule: - - cron: '0 10 * * *' - -permissions: - contents: read - -jobs: - prune_stale: - permissions: - issues: write # for actions/stale to close stale issues - pull-requests: write # for actions/stale to close stale PRs - name: Prune Stale - runs-on: ubuntu-latest - timeout-minutes: 10 - - steps: - - name: Prune Stale - uses: actions/stale@v7 - with: - days-before-issue-stale: 350 - days-before-issue-close: 14 - stale-issue-message: > - This issue has been marked as stale due to 350 days of inactivity. - It will be closed in 2 weeks if no further activity occurs. If this issue is still - relevant, please simply write any comment. Even if closed, you can still revive the - issue at any time or discuss it on the dev@apisix.apache.org list. - Thank you for your contributions. - close-issue-message: > - This issue has been closed due to lack of activity. If you think that - is incorrect, or the issue requires additional review, you can revive the issue at - any time. - days-before-pr-stale: 60 - days-before-pr-close: 28 - stale-pr-message: > - This pull request has been marked as stale due to 60 days of inactivity. - It will be closed in 4 weeks if no further activity occurs. If you think - that's incorrect or this pull request should instead be reviewed, please simply - write any comment. Even if closed, you can still revive the PR at any time or - discuss it on the dev@apisix.apache.org list. - Thank you for your contributions. - close-pr-message: > - This pull request/issue has been closed due to lack of activity. If you think that - is incorrect, or the pull request requires review, you can revive the PR at any time. - # Issues with these labels will never be considered stale. - exempt-issue-labels: 'bug,enhancement,good first issue' - stale-issue-label: 'stale' - stale-pr-label: 'stale' - ascending: true diff --git a/.github/workflows/tars-ci.yml b/.github/workflows/tars-ci.yml deleted file mode 100644 index aa4c1b6e45af..000000000000 --- a/.github/workflows/tars-ci.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: CI Tars - -on: - push: - branches: [ master, 'release/**' ] - paths-ignore: - - 'docs/**' - - '**/*.md' - pull_request: - branches: [ master, 'release/**' ] - paths-ignore: - - 'docs/**' - - '**/*.md' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - tars: - strategy: - fail-fast: false - matrix: - platform: - - ubuntu-20.04 - os_name: - - linux_openresty - - linux_openresty_1_19 - - runs-on: ${{ matrix.platform }} - timeout-minutes: 15 - env: - SERVER_NAME: ${{ matrix.os_name }} - OPENRESTY_VERSION: default - - steps: - - name: Check out code - uses: actions/checkout@v3.2.0 - with: - submodules: recursive - - - name: Setup Tars MySql - run: | - docker run -d -p 3306:3306 -v $PWD/t/tars/conf/tars.sql:/docker-entrypoint-initdb.d/tars.sql -e MYSQL_ROOT_PASSWORD=tars2022 mysql:5.7 - - - name: Linux Install - run: | - sudo ./ci/${{ matrix.os_name }}_runner.sh before_install - sudo --preserve-env=OPENRESTY_VERSION ./ci/${{ matrix.os_name }}_runner.sh do_install - - - name: Run test cases - run: | - ./ci/tars-ci.sh run_case From 461fc128fed0e0ed7a304a606e2a36f336b8046b Mon Sep 17 00:00:00 2001 From: levy liu Date: Fri, 10 Mar 2023 15:53:14 +0800 Subject: [PATCH 5/9] test: fix test cases --- apisix/http/router/radixtree_host_uri.lua | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apisix/http/router/radixtree_host_uri.lua b/apisix/http/router/radixtree_host_uri.lua index 70919763c7e6..7006d6f1a1f4 100644 --- a/apisix/http/router/radixtree_host_uri.lua +++ b/apisix/http/router/radixtree_host_uri.lua @@ -17,6 +17,7 @@ local require = require local router = require("apisix.utils.router") local core = require("apisix.core") +local event = require("apisix.core.event") local get_services = require("apisix.http.service").services local service_fetch = require("apisix.http.service").get local ipairs = ipairs @@ -128,6 +129,9 @@ local function create_radixtree_router(routes) end }) end + + event.push(event.CONST.BUILD_ROUTER, routes) + if #host_router_routes > 0 then host_router = router.new(host_router_routes) end From c163cf07bc1eab09a5f87aba9505069a840fadc2 Mon Sep 17 00:00:00 2001 From: levy liu Date: Fri, 10 Mar 2023 16:20:37 +0800 Subject: [PATCH 6/9] Revert "debug" This reverts commit b49c85a80f25b35b550b74768250cb3053f8cabb. --- .github/workflows/build.yml | 33 ++++- .github/workflows/centos7-ci.yml | 160 ++++++++++++++++++++++++ .github/workflows/chaos.yml | 88 ++++++++++++++ .github/workflows/cli-master.yml | 58 +++++++++ .github/workflows/cli.yml | 66 ++++++++++ .github/workflows/code-lint.yml | 48 ++++++++ .github/workflows/doc-lint.yml | 54 +++++++++ .github/workflows/fips.yml | 152 +++++++++++++++++++++++ .github/workflows/fuzzing-ci.yaml | 78 ++++++++++++ .github/workflows/gm-cron.yaml | 167 ++++++++++++++++++++++++++ .github/workflows/gm.yml | 92 ++++++++++++++ .github/workflows/kubernetes-ci.yml | 80 ++++++++++++ .github/workflows/license-checker.yml | 37 ++++++ .github/workflows/lint.yml | 46 +++++++ .github/workflows/performance.yml | 65 ++++++++++ .github/workflows/semantic.yml | 35 ++++++ .github/workflows/stale.yml | 52 ++++++++ .github/workflows/tars-ci.yml | 56 +++++++++ 18 files changed, 1361 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/centos7-ci.yml create mode 100644 .github/workflows/chaos.yml create mode 100644 .github/workflows/cli-master.yml create mode 100644 .github/workflows/cli.yml create mode 100644 .github/workflows/code-lint.yml create mode 100644 .github/workflows/doc-lint.yml create mode 100644 .github/workflows/fips.yml create mode 100644 .github/workflows/fuzzing-ci.yaml create mode 100644 .github/workflows/gm-cron.yaml create mode 100644 .github/workflows/gm.yml create mode 100644 .github/workflows/kubernetes-ci.yml create mode 100644 .github/workflows/license-checker.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/performance.yml create mode 100644 .github/workflows/semantic.yml create mode 100644 .github/workflows/stale.yml create mode 100644 .github/workflows/tars-ci.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index df1852a52026..b143a2607f16 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,8 +28,12 @@ jobs: - ubuntu-20.04 os_name: - linux_openresty + - linux_openresty_1_19 test_dir: - - t/plugin/a* + - t/plugin/[a-k]* + - t/plugin/[l-z]* + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/deployment t/discovery t/error_page t/misc + - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/wasm t/xds-library t/xrpc runs-on: ${{ matrix.platform }} timeout-minutes: 90 @@ -125,6 +129,28 @@ jobs: make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after echo "Linux launch services, done." + - name: Start Dubbo Backend + if: matrix.os_name == 'linux_openresty' && steps.test_env.outputs.type == 'plugin' + run: | + sudo apt install -y maven + cd t/lib/dubbo-backend + mvn package + cd dubbo-backend-provider/target + java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & + + - name: Build xDS library + if: steps.test_env.outputs.type == 'last' + run: | + cd t/xds-library + go build -o libxds.so -buildmode=c-shared main.go export.go + + - name: Build wasm code + if: matrix.os_name == 'linux_openresty' && steps.test_env.outputs.type == 'last' + run: | + export TINYGO_VER=0.20.0 + wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VER}/tinygo_${TINYGO_VER}_amd64.deb 2>/dev/null + sudo dpkg -i tinygo_${TINYGO_VER}_amd64.deb + cd t/wasm && find . -type f -name "*.go" | xargs -Ip tinygo build -o p.wasm -scheduler=none -target=wasi p - name: Linux Before install run: sudo ./ci/${{ matrix.os_name }}_runner.sh before_install @@ -147,8 +173,3 @@ jobs: echo "start backing up, $(date)" bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} echo "backup done, $(date)" - - - name: Run debugger - if: ${{ failure() }} - uses: mxschmitt/action-tmate@v3 - timeout-minutes: 15 diff --git a/.github/workflows/centos7-ci.yml b/.github/workflows/centos7-ci.yml new file mode 100644 index 000000000000..60ea0a15db7c --- /dev/null +++ b/.github/workflows/centos7-ci.yml @@ -0,0 +1,160 @@ +name: CI Centos7 + +on: + push: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + test_apisix: + name: run ci on centos7 + runs-on: ubuntu-20.04 + timeout-minutes: 90 + strategy: + fail-fast: false + matrix: + test_dir: + - t/plugin/[a-k]* + - t/plugin/[l-z]* + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/deployment t/discovery t/error_page t/misc + - t/node t/pubsub t/router t/script t/secret t/stream-node t/utils t/wasm t/xds-library + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-centos7-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Extract branch name + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + id: branch_env + shell: bash + run: | + echo "version=${GITHUB_REF##*/}" >>$GITHUB_OUTPUT + + - name: Extract test type + shell: bash + id: test_env + run: | + test_dir="${{ matrix.test_dir }}" + if [[ $test_dir =~ 't/plugin' ]]; then + echo "type=plugin" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ 't/admin ' ]]; then + echo "type=first" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ ' t/xds-library' ]]; then + echo "type=last" >>$GITHUB_OUTPUT + fi + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + sudo ./ci/init-common-test-service.sh + + - name: Build rpm package + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + run: | + export VERSION=${{ steps.branch_env.outputs.version }} + sudo gem install --no-document fpm + git clone --depth 1 https://github.com/api7/apisix-build-tools.git + + # move codes under build tool + mkdir ./apisix-build-tools/apisix + for dir in `ls|grep -v "^apisix-build-tools$"`;do cp -r $dir ./apisix-build-tools/apisix/;done + + cd apisix-build-tools + make package type=rpm app=apisix version=${VERSION} checkout=release/${VERSION} image_base=centos image_tag=7 local_code_path=./apisix + cd .. + rm -rf $(ls -1 --ignore=apisix-build-tools --ignore=t --ignore=utils --ignore=ci --ignore=Makefile --ignore=rockspec) + + - name: Build xDS library + if: steps.test_env.outputs.type == 'last' + run: | + cd t/xds-library + go build -o libxds.so -buildmode=c-shared main.go export.go + + - name: Run centos7 docker and mapping apisix into container + env: + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + run: | + docker run -itd -v ${{ github.workspace }}:/apisix --env TEST_FILE_SUB_DIR="$TEST_FILE_SUB_DIR" --name centos7Instance --net="host" --dns 8.8.8.8 --dns-search apache.org docker.io/centos:7 /bin/bash + # docker exec centos7Instance bash -c "cp -r /tmp/apisix ./" + + - name: Cache images + id: cache-images + uses: actions/cache@v3 + env: + cache-name: cache-apisix-docker-images + with: + path: docker-images-backup + key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} + + - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} + name: Load saved docker images + run: | + if [[ -f docker-images-backup/apisix-images.tar ]]; then + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + docker load --input docker-images-backup/apisix-images.tar + rm docker-images-backup/apisix-images.tar + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + echo "loaded docker images" + if [[ ${{ steps.test_env.outputs.type }} != first ]]; then + sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + fi + fi + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Linux launch services + run: | + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + echo "Linux launch services, done." + + - name: Install dependencies + run: | + docker exec centos7Instance bash -c "cd apisix && ./ci/centos7-ci.sh install_dependencies" + + - name: Install rpm package + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + run: | + docker exec centos7Instance bash -c "cd apisix && rpm -iv --prefix=/apisix ./apisix-build-tools/output/apisix-${{ steps.branch_env.outputs.version }}-0.el7.x86_64.rpm" + # Dependencies are attached with rpm, so revert `make deps` + docker exec centos7Instance bash -c "cd apisix && rm -rf deps" + docker exec centos7Instance bash -c "cd apisix && mv usr/bin . && mv usr/local/apisix/* ." + + - name: Run test cases + run: | + docker exec centos7Instance bash -c "cd apisix && ./ci/centos7-ci.sh run_case" + + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Save docker images + run: | + # free disk space + bash ./ci/free_disk_space.sh + echo "start backing up, $(date)" + bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} + echo "backup done, $(date)" diff --git a/.github/workflows/chaos.yml b/.github/workflows/chaos.yml new file mode 100644 index 000000000000..1ea77618000f --- /dev/null +++ b/.github/workflows/chaos.yml @@ -0,0 +1,88 @@ +name: Chaos Test + +on: + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + chaos-test: + runs-on: ubuntu-latest + timeout-minutes: 35 + steps: + - uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Setup go + uses: actions/setup-go@v3 + with: + go-version: "1.17" + + - uses: actions/cache@v3 + with: + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + + - name: Creating minikube cluster + run: | + bash ./t/chaos/utils/setup_chaos_utils.sh start_minikube + mkdir logs + docker build -t apache/apisix:alpine-local --build-arg APISIX_PATH=. -f ./t/chaos/utils/Dockerfile . + minikube cache add apache/apisix:alpine-local -v 7 --alsologtostderr + + - name: Print cluster information + run: | + kubectl config view + kubectl cluster-info + kubectl get nodes + kubectl get pods -n kube-system + kubectl version + + - name: Deploy etcd with Helm + run: | + curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash + helm repo add bitnami https://charts.bitnami.com/bitnami + helm install etcd bitnami/etcd --set replicaCount=3 --set auth.rbac.create=false + kubectl wait pods -l app.kubernetes.io/instance=etcd --for=condition=Ready --timeout=300s --all + + + - name: Deploy APISIX + run: | + bash ./t/chaos/utils/setup_chaos_utils.sh modify_config + kubectl create configmap apisix-gw-config.yaml --from-file=./conf/config.yaml + kubectl apply -f ./t/chaos/kubernetes/deployment.yaml + kubectl apply -f ./t/chaos/kubernetes/service.yaml + kubectl wait pods -l app=apisix-gw --for=condition=Ready --timeout=300s \ + || (kubectl logs -l app=apisix-gw && exit 1) + kubectl apply -f https://raw.githubusercontent.com/istio/istio/master/samples/httpbin/httpbin.yaml + kubectl wait pods -l app=httpbin --for=condition=Ready --timeout=300s \ + || (kubectl logs -l app=httpbin && exit 1) + bash ./t/chaos/utils/setup_chaos_utils.sh port_forward + + - name: Deploy Chaos Mesh + run: | + curl -sSL https://mirrors.chaos-mesh.org/v2.0.1/install.sh | bash + + + - name: Install Ginkgo + run: | + go get -u github.com/onsi/ginkgo/ginkgo + sudo cp ~/go/bin/ginkgo /usr/local/bin + + - name: Run test + working-directory: ./t/chaos + run: ginkgo -r --v --progress --trace diff --git a/.github/workflows/cli-master.yml b/.github/workflows/cli-master.yml new file mode 100644 index 000000000000..dd77dcd1537c --- /dev/null +++ b/.github/workflows/cli-master.yml @@ -0,0 +1,58 @@ +name: CLI Test (master) + +on: + push: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + job_name: + - linux_apisix_master_luarocks + runs-on: ubuntu-20.04 + timeout-minutes: 15 + env: + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.job_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Linux launch common services + run: | + project_compose_ci=ci/pod/docker-compose.common.yml make ci-env-up + + - name: Linux Install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.job_name }}_runner.sh do_install + + - name: Linux Script + run: sudo ./ci/${{ matrix.job_name }}_runner.sh script diff --git a/.github/workflows/cli.yml b/.github/workflows/cli.yml new file mode 100644 index 000000000000..2f76670a0611 --- /dev/null +++ b/.github/workflows/cli.yml @@ -0,0 +1,66 @@ +name: CLI Test + +on: + push: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-20.04 + job_name: + - linux_apisix_current_luarocks + - linux_apisix_current_luarocks_in_customed_nginx + + runs-on: ${{ matrix.platform }} + timeout-minutes: 15 + env: + SERVER_NAME: ${{ matrix.job_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.job_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Linux launch common services + run: | + project_compose_ci=ci/pod/docker-compose.common.yml make ci-env-up + + - name: Linux Before install + run: sudo ./ci/${{ matrix.job_name }}_runner.sh before_install + + - name: Linux Install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.job_name }}_runner.sh do_install + + - name: Linux Script + run: sudo ./ci/${{ matrix.job_name }}_runner.sh script diff --git a/.github/workflows/code-lint.yml b/.github/workflows/code-lint.yml new file mode 100644 index 000000000000..07a1807f811e --- /dev/null +++ b/.github/workflows/code-lint.yml @@ -0,0 +1,48 @@ +name: Code Lint + +on: + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +permissions: + contents: read + +jobs: + lint: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v3.2.0 + - name: Install + run: | + . ./ci/common.sh + export_or_prefix + export OPENRESTY_VERSION=default + + ./ci/linux-install-openresty.sh + ./utils/linux-install-luarocks.sh + sudo luarocks install luacheck + + - name: Script + run: | + . ./ci/common.sh + export_or_prefix + make lint + + sc-lint: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout code + uses: actions/checkout@v3.2.0 + + - name: Shellcheck code + run: | + scversion="latest" + wget -qO- "https://github.com/koalaman/shellcheck/releases/download/${scversion?}/shellcheck-${scversion?}.linux.x86_64.tar.xz" | tar -xJv + cp -av "shellcheck-${scversion}/shellcheck" /usr/local/bin/ + shellcheck --version + git ls-files -- "*.sh" | xargs -t shellcheck diff --git a/.github/workflows/doc-lint.yml b/.github/workflows/doc-lint.yml new file mode 100644 index 000000000000..7a8b19904def --- /dev/null +++ b/.github/workflows/doc-lint.yml @@ -0,0 +1,54 @@ +name: Doc Lint + +on: + push: + paths: + - "docs/**" + - "**/*.md" + pull_request: + branches: [master, "release/**"] + paths: + - "docs/**" + - "**/*.md" + +permissions: + contents: read + +jobs: + markdownlint: + name: 🍇 Markdown + runs-on: ubuntu-latest + timeout-minutes: 1 + steps: + - uses: actions/checkout@v3.2.0 + - name: 🚀 Use Node.js + uses: actions/setup-node@v3.5.1 + with: + node-version: "12.x" + - run: npm install -g markdownlint-cli@0.25.0 + - run: markdownlint '**/*.md' + - name: check category + run: | + ./utils/check-category.py + - name: check Chinese doc + run: | + sudo pip3 install zhon + ./utils/fix-zh-doc-segment.py > \ + /tmp/check.log 2>&1 || (cat /tmp/check.log && exit 1) + if grep "find broken newline in file: " /tmp/check.log; then + cat /tmp/check.log + echo "Newline can't appear in the middle of Chinese sentences." + echo "You need to run ./utils/fix-zh-doc-segment.py to fix them." + exit 1 + fi + + Chinse-Copywriting-lint: + name: Chinese Copywriting + runs-on: ubuntu-latest + timeout-minutes: 1 + steps: + - uses: actions/checkout@v3 + - name: Check Chinese copywriting + uses: huacnlee/autocorrect-action@v2.6.2 + with: + args: --lint --no-diff-bg-color docs/zh/latest/**.md diff --git a/.github/workflows/fips.yml b/.github/workflows/fips.yml new file mode 100644 index 000000000000..854417503cfd --- /dev/null +++ b/.github/workflows/fips.yml @@ -0,0 +1,152 @@ +name: CI FIPS + +on: + push: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-20.04 + os_name: + - linux_openresty + test_dir: + # all plugins only use three parts of openssl API: RSA via ffi, SHA via ffi and SSL API wrapped by nginx. + # The latter one is already covered by the core tests, so no need to repeat it in plugin tests. + # The RSA and SHA tests are fully covered by jwt-auth and hmac-auth plugin tests, while other plugins only repeat such tests. + - t/plugin/jwt-auth2.t t/plugin/jwt-auth.t t/plugin/hmac-auth.t + # all SSL related core tests are covered by below two lists. + - t/admin/ssl* t/admin/schema.t t/admin/upstream.t t/config-center-yaml/ssl.t t/core/etcd-mtls.t t/core/config_etcd.t t/deployment/conf_server.t t/misc/patch.t + - t/node/grpc-proxy-unary.t t/node/upstream-keepalive-pool.t t/node/upstream-websocket.t t/node/client-mtls.t t/node/upstream-mtls.t t/pubsub/kafka.t t/router/radixtree-sni2.t t/router/multi-ssl-certs.t t/router/radixtree-sni.t t/stream-node/mtls.t t/stream-node/tls.t t/stream-node/upstream-tls.t t/stream-node/sni.t + + runs-on: ${{ matrix.platform }} + timeout-minutes: 90 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Cache openssl-3.0 compilation + id: cache-openssl + uses: actions/cache@v3 + env: + cache-name: cache-openssl + with: + path: ~/openssl-3.0 + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }} + + - name: Toggle openssl compile + id: test_ssl_env + shell: bash + if: steps.cache-openssl.outputs.cache-hit != 'true' + run: | + echo "openssl3=yes" >>$GITHUB_OUTPUT + echo "openssl3_prefix=$HOME" >>$GITHUB_OUTPUT + + - name: Extract test type + shell: bash + id: test_env + run: | + test_dir="${{ matrix.test_dir }}" + if [[ $test_dir =~ 't/plugin' ]]; then + echo "type=plugin" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ 't/admin' ]]; then + echo "type=first" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ 't/node' ]]; then + echo "type=last" >>$GITHUB_OUTPUT + fi + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + + - name: Cache images + id: cache-images + uses: actions/cache@v3 + env: + cache-name: cache-apisix-docker-images + with: + path: docker-images-backup + key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} + + - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} + name: Load saved docker images + run: | + if [[ -f docker-images-backup/apisix-images.tar ]]; then + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + docker load --input docker-images-backup/apisix-images.tar + rm docker-images-backup/apisix-images.tar + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + echo "loaded docker images" + if [[ ${{ steps.test_env.outputs.type }} != first ]]; then + sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + fi + fi + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Linux launch services + run: | + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + echo "make ci-env-up, done" + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + echo "Linux launch services, done" + + - name: Linux Before install + run: sudo ./ci/${{ matrix.os_name }}_runner.sh before_install + + - name: Linux Install + env: + COMPILE_OPENSSL3: ${{ steps.test_ssl_env.outputs.openssl3 }} + OPENSSL3_PREFIX: ${{ steps.test_ssl_env.outputs.openssl3_prefix }} + USE_OPENSSL3: yes + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + --preserve-env=COMPILE_OPENSSL3 \ + --preserve-env=OPENSSL3_PREFIX \ + --preserve-env=USE_OPENSSL3 \ + ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Linux Script + env: + OPENSSL_FIPS: yes + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + run: sudo -E ./ci/${{ matrix.os_name }}_runner.sh script + + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Save docker images + run: | + # free disk space + bash ./ci/free_disk_space.sh + echo "start backing up, $(date)" + bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} + echo "backup done, $(date)" diff --git a/.github/workflows/fuzzing-ci.yaml b/.github/workflows/fuzzing-ci.yaml new file mode 100644 index 000000000000..ec3701532d77 --- /dev/null +++ b/.github/workflows/fuzzing-ci.yaml @@ -0,0 +1,78 @@ +name: fuzzing + +on: + push: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + test_apisix: + name: run fuzzing + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Linux launch common services + run: | + project_compose_ci=ci/pod/docker-compose.common.yml make ci-env-up + + - name: run apisix + run: | + wget -qO - https://openresty.org/package/pubkey.gpg | sudo apt-key add - + sudo apt-get update + sudo apt-get -y install software-properties-common + sudo add-apt-repository -y "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main" + sudo apt-get update + sudo apt-get install -y git openresty curl openresty-openssl111-dev unzip make gcc libldap2-dev + ./utils/linux-install-luarocks.sh + + make deps + make init + make run + + - name: run upstream + run: | + sudo openresty -c $PWD/t/fuzzing/upstream/nginx.conf + + - name: install boofuzz + run: | + # Avoid "ERROR: flask has requirement click>=8.0, but you'll have click 7.0 which is incompatible" + sudo apt remove python3-click + pip install -r $PWD/t/fuzzing/requirements.txt + + - name: run tests + run: | + export APISIX_FUZZING_PWD=$PWD + python $PWD/t/fuzzing/simpleroute_test.py + python $PWD/t/fuzzing/serverless_route_test.py + python $PWD/t/fuzzing/vars_route_test.py + python $PWD/t/fuzzing/client_abort.py + python $PWD/t/fuzzing/simple_http.py + python $PWD/t/fuzzing/http_upstream.py diff --git a/.github/workflows/gm-cron.yaml b/.github/workflows/gm-cron.yaml new file mode 100644 index 000000000000..9acb63dbbd59 --- /dev/null +++ b/.github/workflows/gm-cron.yaml @@ -0,0 +1,167 @@ +name: CI GM (cron) + +on: + schedule: + # UTC 7:30 every Friday + - cron: "30 7 * * 5" + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-20.04 + os_name: + - linux_openresty_tongsuo + test_dir: + - t/plugin/[a-k]* + - t/plugin/[l-z]* + - t/admin t/cli t/config-center-yaml t/control t/core t/debug t/deployment t/discovery t/error_page t/misc + - t/node t/pubsub t/router t/script t/stream-node t/utils t/wasm t/xds-library t/xrpc + + runs-on: ${{ matrix.platform }} + timeout-minutes: 90 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + # TODO: refactor the workflows to reduce duplicate parts. Maybe we can write them in shell + # scripts or a separate action? + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Setup Go + uses: actions/setup-go@v3 + with: + go-version: "1.17" + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Cache Tongsuo compilation + id: cache-tongsuo + uses: actions/cache@v3 + env: + cache-name: cache-tongsuo + with: + path: ./tongsuo + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-tongsuo-ver + + - name: Test SSL Env + id: test_ssl_env + shell: bash + if: steps.cache-tongsuo.outputs.cache-hit != 'true' + run: | + echo "compile_tongsuo=true" >>$GITHUB_OUTPUT + + - name: Extract test type + shell: bash + id: test_env + run: | + test_dir="${{ matrix.test_dir }}" + if [[ $test_dir =~ 't/plugin' ]]; then + echo "type=plugin" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ 't/admin ' ]]; then + echo "type=first" >>$GITHUB_OUTPUT + fi + if [[ $test_dir =~ ' t/xrpc' ]]; then + echo "type=last" >>$GITHUB_OUTPUT + fi + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + sudo ./ci/init-common-test-service.sh + + - name: Cache images + id: cache-images + uses: actions/cache@v3 + env: + cache-name: cache-apisix-docker-images + with: + path: docker-images-backup + key: ${{ runner.os }}-${{ env.cache-name }}-${{ steps.test_env.outputs.type }}-${{ hashFiles(format('./ci/pod/docker-compose.{0}.yml', steps.test_env.outputs.type )) }} + + - if: ${{ steps.cache-images.outputs.cache-hit == 'true' }} + name: Load saved docker images + run: | + if [[ -f docker-images-backup/apisix-images.tar ]]; then + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + docker load --input docker-images-backup/apisix-images.tar + rm docker-images-backup/apisix-images.tar + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + echo "loaded docker images" + if [[ ${{ steps.test_env.outputs.type }} != first ]]; then + sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + fi + fi + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Linux launch services + run: | + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh before + [[ ${{ steps.test_env.outputs.type }} == plugin ]] && ./ci/pod/openfunction/build-function-image.sh + make ci-env-up project_compose_ci=ci/pod/docker-compose.${{ steps.test_env.outputs.type }}.yml + [[ ${{ steps.test_env.outputs.type }} != first ]] && sudo ./ci/init-${{ steps.test_env.outputs.type }}-test-service.sh after + echo "Linux launch services, done." + + - name: Start Dubbo Backend + if: steps.test_env.outputs.type == 'plugin' + run: | + sudo apt install -y maven + cd t/lib/dubbo-backend + mvn package + cd dubbo-backend-provider/target + java -Djava.net.preferIPv4Stack=true -jar dubbo-demo-provider.one-jar.jar > /tmp/java.log & + + - name: Build xDS library + if: steps.test_env.outputs.type == 'last' + run: | + cd t/xds-library + go build -o libxds.so -buildmode=c-shared main.go export.go + + - name: Build wasm code + if: steps.test_env.outputs.type == 'last' + run: | + export TINYGO_VER=0.20.0 + wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VER}/tinygo_${TINYGO_VER}_amd64.deb 2>/dev/null + sudo dpkg -i tinygo_${TINYGO_VER}_amd64.deb + cd t/wasm && find . -type f -name "*.go" | xargs -Ip tinygo build -o p.wasm -scheduler=none -target=wasi p + + - name: Linux Before install + env: + COMPILE_TONGSUO: ${{ steps.test_ssl_env.outputs.compile_tongsuo }} + run: | + sudo --preserve-env=COMPILE_TONGSUO \ + ./ci/${{ matrix.os_name }}_runner.sh before_install + + - name: Linux Install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Linux Script + env: + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + run: sudo -E ./ci/${{ matrix.os_name }}_runner.sh script + + - if: ${{ steps.cache-images.outputs.cache-hit != 'true' }} + name: Save docker images + run: | + # free disk space + bash ./ci/free_disk_space.sh + echo "start backing up, $(date)" + bash ./ci/backup-docker-images.sh ${{ steps.test_env.outputs.type }} + echo "backup done, $(date)" diff --git a/.github/workflows/gm.yml b/.github/workflows/gm.yml new file mode 100644 index 000000000000..06663006e942 --- /dev/null +++ b/.github/workflows/gm.yml @@ -0,0 +1,92 @@ +name: CI GM + +on: + push: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [master] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-20.04 + os_name: + - linux_openresty_tongsuo + test_dir: + - t/gm + + runs-on: ${{ matrix.platform }} + timeout-minutes: 90 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Cache Tongsuo compilation + id: cache-tongsuo + uses: actions/cache@v3 + env: + cache-name: cache-tongsuo + with: + path: ./tongsuo + # TODO: use a fixed release once they have created one. + # See https://github.com/Tongsuo-Project/Tongsuo/issues/318 + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-tongsuo-ver + + - name: Test SSL Env + id: test_ssl_env + shell: bash + if: steps.cache-tongsuo.outputs.cache-hit != 'true' + run: | + echo "compile_tongsuo=true" >>$GITHUB_OUTPUT + + - name: Linux launch common services + run: | + make ci-env-up project_compose_ci=ci/pod/docker-compose.common.yml + + - name: Linux Before install + env: + COMPILE_TONGSUO: ${{ steps.test_ssl_env.outputs.compile_tongsuo }} + run: | + sudo --preserve-env=COMPILE_TONGSUO \ + ./ci/${{ matrix.os_name }}_runner.sh before_install + + - name: Linux Do install + run: | + sudo --preserve-env=OPENRESTY_VERSION \ + ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Linux Script + env: + TEST_FILE_SUB_DIR: ${{ matrix.test_dir }} + run: | + sudo -E ./ci/${{ matrix.os_name }}_runner.sh script diff --git a/.github/workflows/kubernetes-ci.yml b/.github/workflows/kubernetes-ci.yml new file mode 100644 index 000000000000..ea72fe57144c --- /dev/null +++ b/.github/workflows/kubernetes-ci.yml @@ -0,0 +1,80 @@ +name: CI Kubernetes + +on: + push: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + kubernetes-discovery: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-20.04 + os_name: + - linux_openresty + - linux_openresty_1_19 + + runs-on: ${{ matrix.platform }} + timeout-minutes: 15 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Setup kubernetes cluster + run: | + KUBERNETES_VERSION="v1.22.7" + + kind create cluster --name apisix-test --config ./t/kubernetes/configs/kind.yaml --image kindest/node:${KUBERNETES_VERSION} + + kubectl wait --for=condition=Ready nodes --all --timeout=180s + + kubectl apply -f ./t/kubernetes/configs/account.yaml + + kubectl apply -f ./t/kubernetes/configs/endpoint.yaml + + KUBERNETES_CLIENT_TOKEN_CONTENT=$(kubectl get secrets | grep apisix-test | awk '{system("kubectl get secret -o jsonpath={.data.token} "$1" | base64 --decode")}') + + KUBERNETES_CLIENT_TOKEN_DIR="/tmp/var/run/secrets/kubernetes.io/serviceaccount" + + KUBERNETES_CLIENT_TOKEN_FILE=${KUBERNETES_CLIENT_TOKEN_DIR}/token + + mkdir -p ${KUBERNETES_CLIENT_TOKEN_DIR} + echo -n "$KUBERNETES_CLIENT_TOKEN_CONTENT" > ${KUBERNETES_CLIENT_TOKEN_FILE} + + echo 'KUBERNETES_SERVICE_HOST=127.0.0.1' + echo 'KUBERNETES_SERVICE_PORT=6443' + echo 'KUBERNETES_CLIENT_TOKEN='"${KUBERNETES_CLIENT_TOKEN_CONTENT}" + echo 'KUBERNETES_CLIENT_TOKEN_FILE='${KUBERNETES_CLIENT_TOKEN_FILE} + + kubectl proxy -p 6445 & + + - name: Linux Install + run: | + sudo ./ci/${{ matrix.os_name }}_runner.sh before_install + sudo --preserve-env=OPENRESTY_VERSION ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Run test cases + run: | + ./ci/kubernetes-ci.sh run_case diff --git a/.github/workflows/license-checker.yml b/.github/workflows/license-checker.yml new file mode 100644 index 000000000000..2122e0db8fbc --- /dev/null +++ b/.github/workflows/license-checker.yml @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +name: License checker + +on: + push: + branches: [master, 'release/**'] + pull_request: + branches: [master, 'release/**'] + +jobs: + check-license: + runs-on: ubuntu-latest + timeout-minutes: 3 + + steps: + - uses: actions/checkout@v3.2.0 + - name: Check License Header + uses: apache/skywalking-eyes@v0.4.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 000000000000..66f898b4201b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,46 @@ +name: ❄️ Lint + +on: [push, pull_request] + +permissions: + contents: read + +jobs: + misc: + name: misc checker + runs-on: ubuntu-latest + steps: + - name: Check out code. + uses: actions/checkout@v3.2.0 + - name: spell check + run: | + pip install codespell==2.1.0 + # codespell considers some repo name in go.sum are misspelled + git grep --cached -l '' | grep -v go.sum |xargs codespell --ignore-words=.ignore_words + - name: Merge conflict + run: | + bash ./utils/check-merge-conflict.sh + - name: Plugin Code + run: | + bash ./utils/check-plugins-code.sh + + ci-eclint: + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + + - name: Setup Nodejs env + uses: actions/setup-node@v3.5.1 + with: + node-version: '12' + + - name: Install eclint + run: | + sudo npm install -g eclint + + - name: Run eclint + run: | + eclint check diff --git a/.github/workflows/performance.yml b/.github/workflows/performance.yml new file mode 100644 index 000000000000..c054303ccd74 --- /dev/null +++ b/.github/workflows/performance.yml @@ -0,0 +1,65 @@ +name: Performance Test + +on: + pull_request: + branches: [master, 'release/**'] + paths-ignore: + - 'docs/**' + - '**/*.md' + +permissions: + contents: read + +jobs: + performance: + if: ${{ startsWith(github.event.pull_request.title, 'perf:') }} + runs-on: ubuntu-20.04 + timeout-minutes: 45 + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Cache deps + uses: actions/cache@v3 + env: + cache-name: cache-deps + with: + path: deps + key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.os_name }}-${{ hashFiles('rockspec/apisix-master-0.rockspec') }} + + - name: Install Dependencies + run: sudo ./ci/performance_test.sh install_dependencies + + - name: Install wrk2 + run: sudo ./ci/performance_test.sh install_wrk2 + + - name: Install SystemTap Tools + run: sudo ./ci/performance_test.sh install_stap_tools + + - name: Perf Test + run: ./ci/performance_test.sh run_performance_test + + - name: Upload Performance Test Result + uses: actions/upload-artifact@v3 + with: + name: perf.txt + path: | + output/performance.txt + retention-days: 3 + + - name: Upload flamegrpah + uses: actions/upload-artifact@v3 + with: + name: flamegraph.svg + path: | + output/flamegraph.svg + retention-days: 3 + +# you can view the generated flamegraph by +# 1. open https://github.com/apache/apisix/actions +# 2. click the "Performance Test" button +# 3. choose the workflows that belong to your commits +# we need a way to have this address automatically appear in the comments of the PR running this workflow. diff --git a/.github/workflows/semantic.yml b/.github/workflows/semantic.yml new file mode 100644 index 000000000000..a2b606667fad --- /dev/null +++ b/.github/workflows/semantic.yml @@ -0,0 +1,35 @@ +name: "PR Lint" + +on: + pull_request_target: + types: + - opened + - edited + - synchronize + +jobs: + main: + name: Validate PR title + runs-on: ubuntu-latest + steps: + - name: Check out repository code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + - uses: ./.github/actions/action-semantic-pull-request + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + types: | + feat + fix + docs + style + refactor + perf + test + build + ci + chore + revert + change diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000000..4f751e7e962f --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,52 @@ +name: Stable Test + +on: + workflow_dispatch: + schedule: + - cron: '0 10 * * *' + +permissions: + contents: read + +jobs: + prune_stale: + permissions: + issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs + name: Prune Stale + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Prune Stale + uses: actions/stale@v7 + with: + days-before-issue-stale: 350 + days-before-issue-close: 14 + stale-issue-message: > + This issue has been marked as stale due to 350 days of inactivity. + It will be closed in 2 weeks if no further activity occurs. If this issue is still + relevant, please simply write any comment. Even if closed, you can still revive the + issue at any time or discuss it on the dev@apisix.apache.org list. + Thank you for your contributions. + close-issue-message: > + This issue has been closed due to lack of activity. If you think that + is incorrect, or the issue requires additional review, you can revive the issue at + any time. + days-before-pr-stale: 60 + days-before-pr-close: 28 + stale-pr-message: > + This pull request has been marked as stale due to 60 days of inactivity. + It will be closed in 4 weeks if no further activity occurs. If you think + that's incorrect or this pull request should instead be reviewed, please simply + write any comment. Even if closed, you can still revive the PR at any time or + discuss it on the dev@apisix.apache.org list. + Thank you for your contributions. + close-pr-message: > + This pull request/issue has been closed due to lack of activity. If you think that + is incorrect, or the pull request requires review, you can revive the PR at any time. + # Issues with these labels will never be considered stale. + exempt-issue-labels: 'bug,enhancement,good first issue' + stale-issue-label: 'stale' + stale-pr-label: 'stale' + ascending: true diff --git a/.github/workflows/tars-ci.yml b/.github/workflows/tars-ci.yml new file mode 100644 index 000000000000..aa4c1b6e45af --- /dev/null +++ b/.github/workflows/tars-ci.yml @@ -0,0 +1,56 @@ +name: CI Tars + +on: + push: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + pull_request: + branches: [ master, 'release/**' ] + paths-ignore: + - 'docs/**' + - '**/*.md' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + tars: + strategy: + fail-fast: false + matrix: + platform: + - ubuntu-20.04 + os_name: + - linux_openresty + - linux_openresty_1_19 + + runs-on: ${{ matrix.platform }} + timeout-minutes: 15 + env: + SERVER_NAME: ${{ matrix.os_name }} + OPENRESTY_VERSION: default + + steps: + - name: Check out code + uses: actions/checkout@v3.2.0 + with: + submodules: recursive + + - name: Setup Tars MySql + run: | + docker run -d -p 3306:3306 -v $PWD/t/tars/conf/tars.sql:/docker-entrypoint-initdb.d/tars.sql -e MYSQL_ROOT_PASSWORD=tars2022 mysql:5.7 + + - name: Linux Install + run: | + sudo ./ci/${{ matrix.os_name }}_runner.sh before_install + sudo --preserve-env=OPENRESTY_VERSION ./ci/${{ matrix.os_name }}_runner.sh do_install + + - name: Run test cases + run: | + ./ci/tars-ci.sh run_case From 85ef20a88ec83551810558a944ae9e15dc2f893f Mon Sep 17 00:00:00 2001 From: levy liu Date: Fri, 10 Mar 2023 18:36:44 +0800 Subject: [PATCH 7/9] refactor: minor tweaks --- apisix/http/router/radixtree_host_uri.lua | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apisix/http/router/radixtree_host_uri.lua b/apisix/http/router/radixtree_host_uri.lua index 7006d6f1a1f4..532576e53d4a 100644 --- a/apisix/http/router/radixtree_host_uri.lua +++ b/apisix/http/router/radixtree_host_uri.lua @@ -105,8 +105,9 @@ local function create_radixtree_router(routes) local host_routes = {} local only_uri_routes = {} host_router = nil + routes = routes or {} - for _, route in ipairs(routes or {}) do + for _, route in ipairs(routes) do local status = core.table.try_read_attr(route, "value", "status") -- check the status if not status or status == 1 then From 82fc3af16fff682439a3bfc6d7e7a7eccd97a5e9 Mon Sep 17 00:00:00 2001 From: levy liu Date: Mon, 13 Mar 2023 13:00:46 +0800 Subject: [PATCH 8/9] refactor: minor tweaks --- docs/en/latest/terminology/router.md | 2 +- docs/zh/latest/terminology/router.md | 2 +- t/config-center-yaml/route.t | 12 ------- t/node/route-status.t | 11 ------ t/router/radixtree-host-uri.t | 22 ------------ t/router/radixtree-host-uri2.t | 16 ++++----- t/router/radixtree-host-uri3.t | 9 ----- t/router/radixtree-method.t | 11 ++++++ t/router/radixtree-sni.t | 47 +++++-------------------- t/router/radixtree-sni2.t | 37 +++++-------------- t/router/radixtree-uri-host.t | 15 ++++++++ t/router/radixtree-uri-keep-end-slash.t | 13 +++++-- t/router/radixtree-uri-multiple.t | 15 ++++++++ t/router/radixtree-uri-priority.t | 14 ++++++++ t/router/radixtree-uri-sanity.t | 15 ++++++++ t/router/radixtree-uri-vars.t | 16 +++++++++ 16 files changed, 124 insertions(+), 133 deletions(-) diff --git a/docs/en/latest/terminology/router.md b/docs/en/latest/terminology/router.md index eb077743446e..0eb49f117159 100644 --- a/docs/en/latest/terminology/router.md +++ b/docs/en/latest/terminology/router.md @@ -48,7 +48,7 @@ A Router can have the following configurations: :::note -In version 3.2 and earlier, APISIX used `radixtree_uri` as the default Router. `radixtree_uri` has better performance than `radixtree_host_uri`, so if you have higher performance requirements and can live with the fact that `radixtree_uri` only matches uri, consider continuing to use `radixtree_uri` as the default Router. +In version 3.2 and earlier, APISIX used `radixtree_uri` as the default Router. `radixtree_uri` has better performance than `radixtree_host_uri`, so if you have higher performance requirements and can live with the fact that `radixtree_uri` only use the `uri` as the primary index, consider continuing to use `radixtree_uri` as the default Router. ::: diff --git a/docs/zh/latest/terminology/router.md b/docs/zh/latest/terminology/router.md index f831ce1923f9..43cf809c0ce9 100644 --- a/docs/zh/latest/terminology/router.md +++ b/docs/zh/latest/terminology/router.md @@ -48,7 +48,7 @@ Router 具有以下配置: ::: 注意 -在3.2及之前版本,APISIX 使用 radixtree_uri 作为默认路由,radixtree_uri 比 radixtree_host_uri 拥有更好的性能,如果你对性能有更高的要求,并且能够接受 radixtree_uri 只匹配 uri 的特点,可以考虑继续使用 radixtree_uri 作为默认路由 +在3.2及之前版本,APISIX 使用 `radixtree_uri` 作为默认路由,`radixtree_uri` 比 `radixtree_host_uri` 拥有更好的性能,如果你对性能有更高的要求,并且能够接受 `radixtree_uri` 只使用 `uri` 作为主索引的特点,可以考虑继续使用 `radixtree_uri` 作为默认路由 ::: diff --git a/t/config-center-yaml/route.t b/t/config-center-yaml/route.t index fa2fc671353a..2da1397dabbb 100644 --- a/t/config-center-yaml/route.t +++ b/t/config-center-yaml/route.t @@ -145,10 +145,6 @@ hello world === TEST 6: route with bad plugin, radixtree_host_uri --- yaml_config -apisix: - node_listen: 1984 - router: - http: "radixtree_host_uri" deployment: role: data_plane role_data_plane: @@ -176,10 +172,6 @@ property "uri" validation failed === TEST 7: fix route with default value --- yaml_config -apisix: - node_listen: 1984 - router: - http: "radixtree_host_uri" deployment: role: data_plane role_data_plane: @@ -206,10 +198,6 @@ GET /hello === TEST 8: invalid route, bad vars operator --- yaml_config -apisix: - node_listen: 1984 - router: - http: "radixtree_host_uri" deployment: role: data_plane role_data_plane: diff --git a/t/node/route-status.t b/t/node/route-status.t index 1f14a02634f6..434afa07d488 100644 --- a/t/node/route-status.t +++ b/t/node/route-status.t @@ -22,13 +22,6 @@ worker_connections(256); no_root_location(); no_shuffle(); -our $yaml_config = <<_EOC_; -apisix: - node_listen: 1984 - router: - http: 'radixtree_host_uri' -_EOC_ - run_tests(); __DATA__ @@ -133,7 +126,6 @@ GET /hello ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -144,7 +136,6 @@ passed === TEST 6: hit route --- request GET /hello ---- yaml_config eval: $::yaml_config --- more_headers Host: foo.com --- response_body @@ -172,7 +163,6 @@ hello world ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -183,7 +173,6 @@ passed === TEST 8: route not found, failed by disable --- request GET /hello ---- yaml_config eval: $::yaml_config --- more_headers Host: foo.com --- error_code: 404 diff --git a/t/router/radixtree-host-uri.t b/t/router/radixtree-host-uri.t index a65df25a3a09..8ab19ae034ab 100644 --- a/t/router/radixtree-host-uri.t +++ b/t/router/radixtree-host-uri.t @@ -22,13 +22,6 @@ worker_connections(256); no_root_location(); no_shuffle(); -our $yaml_config = <<_EOC_; -apisix: - node_listen: 1984 - router: - http: 'radixtree_host_uri' -_EOC_ - run_tests(); __DATA__ @@ -58,7 +51,6 @@ __DATA__ ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -69,7 +61,6 @@ passed === TEST 2: /not_found --- request GET /not_found ---- yaml_config eval: $::yaml_config --- error_code: 404 --- response_body {"error_msg":"404 Route Not Found"} @@ -79,7 +70,6 @@ GET /not_found === TEST 3: /not_found --- request GET /hello ---- yaml_config eval: $::yaml_config --- error_code: 404 --- response_body {"error_msg":"404 Route Not Found"} @@ -89,7 +79,6 @@ GET /hello === TEST 4: /not_found --- request GET /hello ---- yaml_config eval: $::yaml_config --- more_headers Host: not_found.com --- error_code: 404 @@ -101,7 +90,6 @@ Host: not_found.com === TEST 5: hit routes --- request GET /hello ---- yaml_config eval: $::yaml_config --- more_headers Host: foo.com --- response_body @@ -112,7 +100,6 @@ hello world === TEST 6: hit routes --- request GET /hello ---- yaml_config eval: $::yaml_config --- more_headers Host: foo.com --- response_body @@ -144,7 +131,6 @@ hello world ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -155,7 +141,6 @@ passed === TEST 8: /not_found --- request GET /hello ---- yaml_config eval: $::yaml_config --- error_code: 404 --- response_body {"error_msg":"404 Route Not Found"} @@ -165,7 +150,6 @@ GET /hello === TEST 9: hit routes --- request GET /server_port ---- yaml_config eval: $::yaml_config --- more_headers Host: anydomain.com --- response_body_like eval @@ -197,7 +181,6 @@ qr/1981/ ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -208,7 +191,6 @@ passed === TEST 11: /not_found --- request GET /hello2 ---- yaml_config eval: $::yaml_config --- error_code: 404 --- response_body {"error_msg":"404 Route Not Found"} @@ -218,7 +200,6 @@ GET /hello2 === TEST 12: hit routes --- request GET /hello ---- yaml_config eval: $::yaml_config --- more_headers Host: anydomain.com --- response_body @@ -241,7 +222,6 @@ hello world ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -274,7 +254,6 @@ passed ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -285,7 +264,6 @@ passed === TEST 15: hit routes --- request GET /hello ---- yaml_config eval: $::yaml_config --- more_headers Host: www.foo.com --- response_body diff --git a/t/router/radixtree-host-uri2.t b/t/router/radixtree-host-uri2.t index 7be88f82d7f2..2a6aa42f0a9e 100644 --- a/t/router/radixtree-host-uri2.t +++ b/t/router/radixtree-host-uri2.t @@ -34,12 +34,19 @@ deployment: config_provider: yaml _EOC_ +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); + run_tests(); __DATA__ === TEST 1: test.com ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -69,7 +76,6 @@ use config_provider: yaml === TEST 2: *.test.com + uri ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -99,7 +105,6 @@ use config_provider: yaml === TEST 3: *.test.com + /* ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -129,7 +134,6 @@ use config_provider: yaml === TEST 4: filter_func(not match) ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -160,7 +164,6 @@ use config_provider: yaml === TEST 5: filter_func(match) ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -318,7 +321,6 @@ Host: t.com === TEST 12: request host with uppercase ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -337,7 +339,6 @@ Host: tEst.com === TEST 13: configure host with uppercase ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -356,7 +357,6 @@ Host: test.com === TEST 14: inherit hosts from services ---- yaml_config eval: $::yaml_config --- apisix_yaml services: - id: 1 diff --git a/t/router/radixtree-host-uri3.t b/t/router/radixtree-host-uri3.t index 2db4bb437c1e..5b8df17720ac 100644 --- a/t/router/radixtree-host-uri3.t +++ b/t/router/radixtree-host-uri3.t @@ -16,18 +16,9 @@ # use t::APISIX 'no_plan'; -our $yaml_config = <<_EOC_; -apisix: - node_listen: 1984 - router: - http: 'radixtree_host_uri' -_EOC_ - add_block_preprocessor(sub { my ($block) = @_; - $block->set_value("yaml_config", $yaml_config); - if (!$block->request) { $block->set_value("request", "GET /t"); } diff --git a/t/router/radixtree-method.t b/t/router/radixtree-method.t index 30a51c21861a..f17826e57ed7 100644 --- a/t/router/radixtree-method.t +++ b/t/router/radixtree-method.t @@ -16,9 +16,20 @@ # use t::APISIX 'no_plan'; +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_uri' +_EOC_ + add_block_preprocessor(sub { my ($block) = @_; + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } + if (!defined $block->request) { $block->set_value("request", "GET /t"); } diff --git a/t/router/radixtree-sni.t b/t/router/radixtree-sni.t index 57f9ed360762..9a28e3d78f78 100644 --- a/t/router/radixtree-sni.t +++ b/t/router/radixtree-sni.t @@ -21,6 +21,15 @@ no_root_location(); $ENV{TEST_NGINX_HTML_DIR} ||= html_dir(); +add_block_preprocessor(sub { + my ($block) = @_; + + if (!$block->request) { + $block->set_value("request", "GET /t"); + } + +}); + run_tests; __DATA__ @@ -51,8 +60,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -82,8 +89,6 @@ passed ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -144,8 +149,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body eval qr{connected: 1 ssl handshake: true @@ -195,8 +198,6 @@ location /t { end } } ---- request -GET /t --- response_body connected: 1 failed to do SSL handshake: handshake failed @@ -231,8 +232,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -293,8 +292,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body eval qr{connected: 1 ssl handshake: true @@ -340,8 +337,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -402,8 +397,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body eval qr{connected: 1 ssl handshake: true @@ -449,8 +442,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -489,8 +480,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body_like connected: 1 failed to do SSL handshake: 18: self[- ]signed certificate @@ -535,8 +524,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body connected: 1 failed to do SSL handshake: handshake failed @@ -572,8 +559,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -612,8 +597,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body connected: 1 failed to do SSL handshake: handshake failed @@ -648,8 +631,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -688,8 +669,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body_like connected: 1 failed to do SSL handshake: 18: self[- ]signed certificate @@ -727,8 +706,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -767,8 +744,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body_like connected: 1 failed to do SSL handshake: 18: self[- ]signed certificate @@ -813,8 +788,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body connected: 1 failed to do SSL handshake: handshake failed @@ -846,8 +819,6 @@ location /t { ngx.print(body) } } ---- request -GET /t --- response_body {"error_msg":"failed to decrypt previous encrypted key"} --- error_code: 400 diff --git a/t/router/radixtree-sni2.t b/t/router/radixtree-sni2.t index 1c910198a0a8..b34d0b725a3e 100644 --- a/t/router/radixtree-sni2.t +++ b/t/router/radixtree-sni2.t @@ -21,6 +21,15 @@ no_root_location(); $ENV{TEST_NGINX_HTML_DIR} ||= html_dir(); +add_block_preprocessor(sub { + my ($block) = @_; + + if (!$block->request) { + $block->set_value("request", "GET /t"); + } + +}); + run_tests; __DATA__ @@ -59,8 +68,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -100,8 +107,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body connected: 1 ssl handshake: true @@ -143,8 +148,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body connected: 1 ssl handshake: true @@ -177,8 +180,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -222,8 +223,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body eval qr{connected: 1 ssl handshake: true @@ -269,8 +268,6 @@ location /t { ngx.print(body) } } ---- request -GET /t --- error_code: 400 --- response_body {"error_msg":"failed to handle cert-key pair[1]: failed to decrypt previous encrypted key"} @@ -299,8 +296,6 @@ location /t { ngx.print(body) } } ---- request -GET /t --- response_body {"error_msg":"failed to parse cert: PEM_read_bio_X509_AUX() failed"} --- error_code: 400 @@ -338,8 +333,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body failed to do SSL handshake: handshake failed --- error_log @@ -384,8 +377,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body ssl handshake: true @@ -411,8 +402,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -445,8 +434,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body ssl handshake: true @@ -472,8 +459,6 @@ location /t { ngx.say(body) } } ---- request -GET /t --- response_body passed @@ -506,8 +491,6 @@ location /t { -- collectgarbage() } } ---- request -GET /t --- response_body ssl handshake: true @@ -553,8 +536,6 @@ location /t { ngx.log(ngx.WARN, "release table ", name) return old_release(name, ...) end ---- request -GET /t --- response_body ssl handshake: true --- grep_error_log eval diff --git a/t/router/radixtree-uri-host.t b/t/router/radixtree-uri-host.t index e6a16a6c7038..b9f290141f80 100644 --- a/t/router/radixtree-uri-host.t +++ b/t/router/radixtree-uri-host.t @@ -22,6 +22,21 @@ worker_connections(256); no_root_location(); no_shuffle(); +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_uri' +_EOC_ + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); + run_tests(); __DATA__ diff --git a/t/router/radixtree-uri-keep-end-slash.t b/t/router/radixtree-uri-keep-end-slash.t index 533b480f0290..910ef99b04b8 100644 --- a/t/router/radixtree-uri-keep-end-slash.t +++ b/t/router/radixtree-uri-keep-end-slash.t @@ -25,9 +25,19 @@ no_shuffle(); our $yaml_config = <<_EOC_; apisix: node_listen: 1984 + router: + http: 'radixtree_uri' delete_uri_tail_slash: true _EOC_ +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); + run_tests(); __DATA__ @@ -56,7 +66,6 @@ __DATA__ ngx.say(body) } } ---- yaml_config eval: $::yaml_config --- request GET /t --- response_body @@ -67,7 +76,6 @@ passed === TEST 2: hit route --- request GET /hello ---- yaml_config eval: $::yaml_config --- response_body hello world @@ -76,5 +84,4 @@ hello world === TEST 3: hit route --- request GET /hello/ ---- yaml_config eval: $::yaml_config --- error_code: 404 diff --git a/t/router/radixtree-uri-multiple.t b/t/router/radixtree-uri-multiple.t index 001d970808c0..8f124fe44a34 100644 --- a/t/router/radixtree-uri-multiple.t +++ b/t/router/radixtree-uri-multiple.t @@ -22,6 +22,21 @@ worker_connections(256); no_root_location(); no_shuffle(); +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_uri' +_EOC_ + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); + run_tests(); __DATA__ diff --git a/t/router/radixtree-uri-priority.t b/t/router/radixtree-uri-priority.t index 865b341f64ce..93c2464ca0a1 100644 --- a/t/router/radixtree-uri-priority.t +++ b/t/router/radixtree-uri-priority.t @@ -22,6 +22,20 @@ worker_connections(256); no_root_location(); no_shuffle(); +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_uri' +_EOC_ + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); run_tests(); diff --git a/t/router/radixtree-uri-sanity.t b/t/router/radixtree-uri-sanity.t index c61721df821a..12f1580c888a 100644 --- a/t/router/radixtree-uri-sanity.t +++ b/t/router/radixtree-uri-sanity.t @@ -28,6 +28,21 @@ apisix: normalize_uri_like_servlet: true _EOC_ +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_uri' +_EOC_ + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); + run_tests(); __DATA__ diff --git a/t/router/radixtree-uri-vars.t b/t/router/radixtree-uri-vars.t index f9b3e2af1e65..6e4b78712f6d 100644 --- a/t/router/radixtree-uri-vars.t +++ b/t/router/radixtree-uri-vars.t @@ -22,6 +22,22 @@ worker_connections(256); no_root_location(); no_shuffle(); +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_uri' +_EOC_ + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } + +}); + run_tests(); __DATA__ From cc967d7839be320888682e0b99ea0de80d83c6f1 Mon Sep 17 00:00:00 2001 From: levy liu Date: Mon, 13 Mar 2023 14:34:29 +0800 Subject: [PATCH 9/9] refactor: minor tweaks --- t/router/radixtree-host-uri-priority.t | 12 ++++++++---- t/router/radixtree-host-uri.t | 15 +++++++++++++++ t/router/radixtree-host-uri3.t | 11 +++++++++++ t/router/radixtree-uri-sanity.t | 2 ++ 4 files changed, 36 insertions(+), 4 deletions(-) diff --git a/t/router/radixtree-host-uri-priority.t b/t/router/radixtree-host-uri-priority.t index 50e5ff81e159..a2bb56c85f52 100644 --- a/t/router/radixtree-host-uri-priority.t +++ b/t/router/radixtree-host-uri-priority.t @@ -34,12 +34,19 @@ deployment: config_provider: yaml _EOC_ +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); + run_tests(); __DATA__ === TEST 1: hit routes(priority: 1 + priority: 2) ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -72,7 +79,6 @@ use config_provider: yaml === TEST 2: hit routes(priority: 2 + priority: 1) ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -105,7 +111,6 @@ use config_provider: yaml === TEST 3: hit routes(priority: default_value + priority: 1) ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - @@ -137,7 +142,6 @@ use config_provider: yaml === TEST 4: hit routes(priority: 1 + priority: default_value) ---- yaml_config eval: $::yaml_config --- apisix_yaml routes: - diff --git a/t/router/radixtree-host-uri.t b/t/router/radixtree-host-uri.t index 8ab19ae034ab..be29464d5ce8 100644 --- a/t/router/radixtree-host-uri.t +++ b/t/router/radixtree-host-uri.t @@ -22,6 +22,21 @@ worker_connections(256); no_root_location(); no_shuffle(); +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_host_uri' +_EOC_ + +add_block_preprocessor(sub { + my ($block) = @_; + + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } +}); + run_tests(); __DATA__ diff --git a/t/router/radixtree-host-uri3.t b/t/router/radixtree-host-uri3.t index 5b8df17720ac..0d57788a2b25 100644 --- a/t/router/radixtree-host-uri3.t +++ b/t/router/radixtree-host-uri3.t @@ -16,6 +16,13 @@ # use t::APISIX 'no_plan'; +our $yaml_config = <<_EOC_; +apisix: + node_listen: 1984 + router: + http: 'radixtree_host_uri' +_EOC_ + add_block_preprocessor(sub { my ($block) = @_; @@ -23,6 +30,10 @@ add_block_preprocessor(sub { $block->set_value("request", "GET /t"); } + if (!defined $block->yaml_config) { + $block->set_value("yaml_config", $yaml_config); + } + if (!$block->error_log && !$block->no_error_log && (defined $block->error_code && $block->error_code != 502)) { diff --git a/t/router/radixtree-uri-sanity.t b/t/router/radixtree-uri-sanity.t index 12f1580c888a..5d36a3db1f78 100644 --- a/t/router/radixtree-uri-sanity.t +++ b/t/router/radixtree-uri-sanity.t @@ -25,6 +25,8 @@ no_shuffle(); our $servlet_yaml_config = <<_EOC_; apisix: node_listen: 1984 + router: + http: 'radixtree_uri' normalize_uri_like_servlet: true _EOC_