Skip to content

Performance tests #274

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 32 additions & 5 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,23 @@ on:
types: [published]

jobs:
check-skip:
runs-on: ubuntu-latest
outputs:
should-skip: ${{ github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }}
steps:
- name: Check for PERF-TEST-ONLY label
id: check
run: |
if [[ "${{ github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }}" == "true" ]]; then
echo "Label PERF-TEST-ONLY is present, skipping build"
else
echo "Proceeding with build"
fi

check-access:
needs: check-skip
if: needs.check-skip.outputs.should-skip != 'true'
runs-on: ubuntu-latest
outputs:
has-token-access: ${{ steps.check.outputs.has-token-access }}
Expand Down Expand Up @@ -634,10 +650,11 @@ jobs:

install-deps:
needs:
- check-skip
- build-demoapp
- merge-router
runs-on: ubuntu-latest
if: ${{ !failure() && (needs.build-demoapp.result == 'skipped' || needs.build-demoapp.result == 'success') && (needs.merge-router.result == 'skipped' || needs.merge-router.result == 'success') }}
if: ${{ !failure() && needs.check-skip.outputs.should-skip != 'true' && (needs.build-demoapp.result == 'skipped' || needs.build-demoapp.result == 'success') && (needs.merge-router.result == 'skipped' || needs.merge-router.result == 'success') }}
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/configure-nodejs
Expand All @@ -659,7 +676,7 @@ jobs:
outputs:
stack-url-suffix: ${{ steps.getDeployUrl.outputs.stack-url-suffix }}
url: https://lambdadispatch${{ steps.getDeployUrl.outputs.stack-url-suffix }}.ghpublic.pwrdrvr.com
if: ${{ !cancelled() && needs.install-deps.result != 'failed' }}
if: ${{ needs.install-deps.result == 'success' }}
env:
DEMO_APP_REGISTRY_IMAGE: public.ecr.aws/pwrdrvr/lambda-dispatch-demo-app${{ github.event_name == 'pull_request' && '-dev' || '' }}
ROUTER_REGISTRY_IMAGE: public.ecr.aws/pwrdrvr/lambda-dispatch-router${{ github.event_name == 'pull_request' && '-dev' || '' }}
Expand Down Expand Up @@ -706,7 +723,7 @@ jobs:
smoke-test:
needs: [deploy]
runs-on: ubuntu-latest
if: ${{ !cancelled() && needs.deploy.result != 'failed' }}
if: ${{ needs.deploy.result == 'success' }}
steps:
- name: Smoke Test
run: |
Expand All @@ -723,10 +740,20 @@ jobs:
curl ${BASE_URL}/ping
done

# Dummy comment
performance-test:
needs:
- smoke-test
- deploy
if: ${{ needs.smoke-test.result == 'success' && github.event_name == 'pull_request' }}
uses: ./.github/workflows/performance-test.yml
with:
pr_number: ${{ github.event.pull_request.number }}

create-status-checks:
needs: [deploy, smoke-test]
needs: [deploy, smoke-test, performance-test]
runs-on: ubuntu-latest
if: ${{ !cancelled() && needs.deploy.result != 'failed' && github.event_name == 'pull_request' }}
if: ${{ needs.deploy.result == 'success' && github.event_name == 'pull_request' }}
steps:
- name: Generate URLs
id: generate-urls
Expand Down
27 changes: 27 additions & 0 deletions .github/workflows/perf-test-only.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Performance Test Only

on:
pull_request:
types: [opened, synchronize, reopened, labeled]

jobs:
check-label:
runs-on: ubuntu-latest
outputs:
should-run: ${{ contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }}
steps:
- name: Check for PERF-TEST-ONLY label
id: check
run: |
if [[ "${{ contains(github.event.pull_request.labels.*.name, 'PERF-TEST-ONLY') }}" == "true" ]]; then
echo "Label PERF-TEST-ONLY is present"
else
echo "Label PERF-TEST-ONLY is not present"
fi

perf-test:
needs: check-label
if: needs.check-label.outputs.should-run == 'true'
uses: ./.github/workflows/performance-test.yml
with:
pr_number: ${{ github.event.pull_request.number }}
95 changes: 95 additions & 0 deletions .github/workflows/performance-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
name: Performance Test

on:
workflow_call:
inputs:
pr_number:
required: true
type: string
description: 'PR number, used to construct the test URL'

jobs:
performance-test:
runs-on: ubuntu-latest
steps:
- name: Install oha
run: |
wget https://github.com/hatoo/oha/releases/download/v0.5.8/oha-linux-amd64 -O oha
chmod +x oha
sudo mv oha /usr/local/bin/

- name: Get Test URL
id: get-url
run: |
if [ "${{ inputs.pr_number }}" != "" ]; then
echo "url=https://lambdadispatch-pr-${{ inputs.pr_number }}.ghpublic.pwrdrvr.com" >> $GITHUB_OUTPUT
else
echo "url=https://lambdadispatch.ghpublic.pwrdrvr.com" >> $GITHUB_OUTPUT
fi

- name: Run Performance Tests
id: perf-test
run: |
# Create results directory
mkdir -p test-results

# Run performance tests and save results
echo "Running latency test (20 concurrent, 60s)..."
oha --no-tui -j -c 20 -z 60s ${{ steps.get-url.outputs.url }}/ping > test-results/latency.json
cat test-results/latency.json

echo "Running throughput test (100 concurrent, 60s)..."
oha --no-tui -j -c 100 -z 60s ${{ steps.get-url.outputs.url }}/ping > test-results/throughput.json
cat test-results/throughput.json

# Parse results and create markdown table
node -e '
const fs = require("fs");
const latencyResults = JSON.parse(fs.readFileSync("test-results/latency.json"));
const throughputResults = JSON.parse(fs.readFileSync("test-results/throughput.json"));

const formatNumber = (num) => Number(num).toLocaleString(undefined, { maximumFractionDigits: 2 });

const table = [
"### 🚀 Performance Test Results",
"",
"| Metric | Latency Test (20 concurrent) | Throughput Test (100 concurrent) |",
"|--------|----------------------------|--------------------------------|",
`| Duration | ${formatNumber(latencyResults.summary.total)} | ${formatNumber(throughputResults.summary.total)} |`,
`| Total Requests | ${formatNumber(latencyResults.summary.total * latencyResults.summary.requestsPerSec)} | ${formatNumber(throughputResults.summary.total * throughputResults.summary.requestsPerSec)} |`,
`| Total Success | ${formatNumber(latencyResults.statusCodeDistribution["200"])} | ${formatNumber(throughputResults.statusCodeDistribution["200"])} |`,
`| Requests/sec | ${formatNumber(latencyResults.summary.requestsPerSec)} | ${formatNumber(throughputResults.summary.requestsPerSec)} |`,
`| Mean Latency | ${formatNumber(latencyResults.summary.average * 1000)}ms | ${formatNumber(throughputResults.summary.average * 1000)}ms |`,
`| p95 Latency | ${formatNumber(latencyResults.latencyPercentiles.p95 * 1000)}ms | ${formatNumber(throughputResults.latencyPercentiles.p95 * 1000)}ms |`,
`| p99 Latency | ${formatNumber(latencyResults.latencyPercentiles.p99 * 1000)}ms | ${formatNumber(throughputResults.latencyPercentiles.p99 * 1000)}ms |`,
`| Max Latency | ${formatNumber(latencyResults.summary.slowest * 1000)}ms | ${formatNumber(throughputResults.summary.slowest * 1000)}ms |`,
"",
"_Note: Tests run against the /ping endpoint for 60 seconds each._",
"",
`*Last updated: ${new Date().toISOString()}*`,
"" // Add empty line at the end
].join("\n");

fs.writeFileSync("test-results/table.md", table);
'

# Save table content to outputs
echo "performance_results<<EOF" >> $GITHUB_OUTPUT
cat test-results/table.md >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT

- name: Find Performance Results Comment
uses: peter-evans/find-comment@v3
id: find-comment
with:
issue-number: ${{ inputs.pr_number }}
comment-author: 'github-actions[bot]'
body-includes: '### 🚀 Performance Test Results'

- name: Post Performance Results Comment
uses: peter-evans/create-or-update-comment@v4
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
edit-mode: replace
issue-number: ${{ inputs.pr_number }}
body: ${{ steps.perf-test.outputs.performance_results }}
Loading