Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
3b855f6
feat: Implement blobcache optimizations
cursoragent Oct 30, 2025
18b6a6f
feat: Implement CI/CD performance testing and optimize config
cursoragent Oct 30, 2025
73ff90b
feat: Implement CI/CD performance testing and optimizations
cursoragent Oct 30, 2025
6b8c781
run perf tests
luke-beamcloud Oct 30, 2025
d47eb0b
Fix: Remove Redis dependency and improve test stability
cursoragent Oct 30, 2025
5e1465b
Checkpoint before follow-up message
cursoragent Oct 30, 2025
6a6822b
Refactor: Use sync.Once for global metrics and logger initialization
cursoragent Oct 30, 2025
9865139
Fix: Improve disk cache metric error handling and benchmark setup
cursoragent Oct 30, 2025
50b5928
feat: Adjust performance tests for CI and larger messages
cursoragent Oct 30, 2025
af64112
Refactor performance tests and simplify gRPC throughput test
cursoragent Oct 30, 2025
93f4159
Grant permissions for performance tests workflow
cursoragent Oct 30, 2025
d4ac89c
Refactor: Remove validation script and adjust test timeouts
cursoragent Oct 30, 2025
d22e047
Refactor: Optimize CI benchmarks and add local testing notes
cursoragent Oct 30, 2025
7cb4d24
Checkpoint before follow-up message
cursoragent Oct 30, 2025
46560c0
Refactor: Improve read throughput benchmarks and buffer pool usage
cursoragent Oct 30, 2025
8666cf2
Refactor: Remove throughput benchmarks, focus on buffer pool
cursoragent Oct 30, 2025
1af2c4d
Checkpoint before follow-up message
cursoragent Oct 30, 2025
9bef460
Refactor: Adjust memory cache configuration and benchmark
cursoragent Oct 30, 2025
f126942
feat: Add gRPC throughput benchmarking and config tuning
cursoragent Oct 30, 2025
ec8dc9c
Remove redundant FUSE operation latency logging
cursoragent Oct 30, 2025
f3ad57f
Refactor: Make gRPC server config tunable
cursoragent Oct 30, 2025
62e154f
clean up default config
luke-beamcloud Oct 30, 2025
7a6e38c
cleanup
luke-beamcloud Nov 5, 2025
1441b99
update path
luke-beamcloud Nov 5, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
217 changes: 217 additions & 0 deletions .github/workflows/performance-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
name: Performance Tests

on:
push:
branches: [ master, develop ]
pull_request:
branches: [ master, develop ]
workflow_dispatch:
inputs:
test_iterations:
description: 'Number of test iterations'
required: false
default: '3'
regression_threshold:
description: 'Regression threshold (%)'
required: false
default: '10'

permissions:
contents: read
pull-requests: write
issues: write

jobs:
unit-benchmarks:
name: Unit Benchmarks
runs-on: ubuntu-latest
timeout-minutes: 30

steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Full history for comparison

- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.22'

- name: Cache Go modules
uses: actions/cache@v4
with:
path: |
~/go/pkg/mod
~/.cache/go-build
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-

- name: Run benchmarks
run: |
echo "Running performance benchmarks..."
echo ""
echo "=== 1. Buffer Pool (20,000× allocation improvement) ==="
go test -bench=BenchmarkBufferPool -benchmem -benchtime=2s -timeout=2m ./pkg/ | tee benchmark-results.txt
echo ""
echo "=== 2. GetContent End-to-End (real disk-based throughput) ==="
go test -bench=BenchmarkGetContentDiskCache -benchmem -benchtime=500ms -timeout=2m ./pkg/ | tee -a benchmark-results.txt

- name: Upload benchmark results
uses: actions/upload-artifact@v4
with:
name: benchmark-results
path: benchmark-results.txt
retention-days: 30

- name: Check for performance regressions
run: |
BUFFER_POOL_TIME=$(grep "BenchmarkBufferPool.*WithPool" benchmark-results.txt | awk '{print $3}' | head -1)
GETCONTENT_THROUGHPUT=$(grep "BenchmarkGetContentDiskCache" benchmark-results.txt | grep "MB/s" | awk '{print $4}' | head -1)

echo "Buffer Pool: $BUFFER_POOL_TIME"
echo "GetContent: $GETCONTENT_THROUGHPUT"

if [ ! -z "$BUFFER_POOL_TIME" ]; then
TIME_NS=$(echo $BUFFER_POOL_TIME | sed 's/ns\/op//')
if (( $(echo "$TIME_NS > 100" | bc -l) )); then
echo "❌ Buffer pool regression: ${TIME_NS}ns > 100ns"
exit 1
fi
echo "✅ Buffer pool OK: ${TIME_NS}ns"
fi

if [ ! -z "$GETCONTENT_THROUGHPUT" ]; then
THROUGHPUT=$(echo $GETCONTENT_THROUGHPUT | sed 's/MB\/s//')
if (( $(echo "$THROUGHPUT < 2000" | bc -l) )); then
echo "❌ GetContent regression: ${THROUGHPUT} MB/s < 2000 MB/s"
exit 1
fi
echo "✅ GetContent OK: ${THROUGHPUT} MB/s"
fi

- name: Comment benchmark results on PR
if: github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const results = fs.readFileSync('benchmark-results.txt', 'utf8');

const body = `## Benchmark Results\n\n\`\`\`\n${results}\n\`\`\``;

github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: body
});

grpc-throughput-tests:
name: gRPC Throughput Tests
runs-on: ubuntu-latest
timeout-minutes: 10

services:
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 6379:6379

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.22'

- name: Cache Go modules
uses: actions/cache@v4
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}

- name: Install netcat
run: sudo apt-get update && sudo apt-get install -y netcat-openbsd

- name: Run gRPC performance tests
run: |
chmod +x bin/run_grpc_performance_tests.sh
./bin/run_grpc_performance_tests.sh

- name: Upload results
if: always()
uses: actions/upload-artifact@v4
with:
name: grpc-performance-results
path: performance-results/
retention-days: 30

integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
timeout-minutes: 10

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.22'

- name: Run unit tests
run: go test -v -timeout 5m ./pkg/...

performance-summary:
name: Performance Summary
needs: [unit-benchmarks, grpc-throughput-tests, integration-tests]
runs-on: ubuntu-latest
if: always()

steps:
- name: Download all artifacts
uses: actions/download-artifact@v4

- name: Generate summary
run: |
echo "# Performance Test Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

echo "## Job Status" >> $GITHUB_STEP_SUMMARY
echo "- Unit Benchmarks: ${{ needs.unit-benchmarks.result }}" >> $GITHUB_STEP_SUMMARY
echo "- gRPC Throughput: ${{ needs.grpc-throughput-tests.result }}" >> $GITHUB_STEP_SUMMARY
echo "- Integration Tests: ${{ needs.integration-tests.result }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

if [ -f performance-report/report.md ]; then
echo "## Performance Report" >> $GITHUB_STEP_SUMMARY
cat performance-report/report.md >> $GITHUB_STEP_SUMMARY
fi

if [ -f benchmark-results/benchmark-results.txt ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "## Benchmark Results" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
head -50 benchmark-results/benchmark-results.txt >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
fi

- name: Check overall status
run: |
if [ "${{ needs.unit-benchmarks.result }}" != "success" ] || \
[ "${{ needs.grpc-throughput-tests.result }}" != "success" ] || \
[ "${{ needs.integration-tests.result }}" != "success" ]; then
echo "❌ Some performance tests failed"
exit 1
fi
echo "✅ All performance tests passed"
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,15 @@ bin/throughput
bin/fs
bin/testclient
bin/basic
bin/grpc-throughput
build.sh
tmp/
config.yaml
config2.yaml
config3.yaml
e2e/throughput/testdata/*.bin
e2e/fs/testdata/*.bin
e2e/grpc_throughput/grpc-throughput
daemonset.yaml
output.bin
.go-version
105 changes: 105 additions & 0 deletions bin/run_grpc_performance_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
#!/bin/bash
set -e

# Simple gRPC performance test - completes in under 1 minute

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WORKSPACE_DIR="$(dirname "$SCRIPT_DIR")"
RESULTS_DIR="${WORKSPACE_DIR}/performance-results"
CURRENT_FILE="${RESULTS_DIR}/current.json"

echo "========================================"
echo " gRPC Performance Test"
echo "========================================"
echo ""

mkdir -p "$RESULTS_DIR"

# Build binaries
echo "[1/4] Building binaries..."
cd "$WORKSPACE_DIR"
go build -o bin/blobcache cmd/main.go
go build -o bin/grpc-throughput e2e/grpc_throughput/main.go
echo "✓ Build complete"
echo ""

# Setup test environment
echo "[2/4] Starting test server..."
TEST_DIR=$(mktemp -d)
DISK_CACHE_DIR="${TEST_DIR}/cache"
mkdir -p "$DISK_CACHE_DIR"

# Create minimal config
cat > "${TEST_DIR}/config.yaml" << EOF
server:
mode: coordinator
diskCacheDir: ${DISK_CACHE_DIR}
diskCacheMaxUsagePct: 90
maxCachePct: 50
pageSizeBytes: 4194304
metadata:
mode: default
redisAddr: "localhost:6379"

global:
serverPort: 50051
grpcMessageSizeBytes: 268435456
debugMode: false

metrics:
url: ""
EOF

# Start server
CONFIG_PATH="${TEST_DIR}/config.yaml" ./bin/blobcache > "${TEST_DIR}/server.log" 2>&1 &
SERVER_PID=$!

cleanup() {
echo ""
echo "Cleaning up..."
if [ ! -z "$SERVER_PID" ]; then
kill -9 $SERVER_PID 2>/dev/null || true
sleep 1
fi
rm -rf "$TEST_DIR"
echo "✓ Cleanup complete"
}
trap cleanup EXIT INT TERM

# Wait for server
sleep 3
if ! kill -0 $SERVER_PID 2>/dev/null; then
echo "✗ Server failed to start"
cat "${TEST_DIR}/server.log"
exit 1
fi

# Check connectivity
for i in {1..10}; do
if nc -z localhost 50051 2>/dev/null; then
echo "✓ Server ready"
break
fi
if [ $i -eq 10 ]; then
echo "✗ Server not responding"
exit 1
fi
sleep 1
done
echo ""

# Run tests
echo "[3/4] Running throughput tests..."
echo ""

./bin/grpc-throughput -server localhost:50051 -output "$CURRENT_FILE"
TEST_EXIT=$?

echo ""
if [ $TEST_EXIT -eq 0 ]; then
echo "[4/4] ✓ All tests passed"
else
echo "[4/4] ✗ Some tests failed"
fi

exit $TEST_EXIT
Loading
Loading