-
Notifications
You must be signed in to change notification settings - Fork 3
267 lines (224 loc) · 8.81 KB
/
performance-benchmarking.yml
File metadata and controls
267 lines (224 loc) · 8.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
name: Performance Benchmarking
on:
workflow_dispatch:
inputs:
iterations:
description: 'Number of benchmark iterations'
required: false
default: '1000'
type: string
baseline_ref:
description: 'Git reference for baseline comparison (branch/tag/commit)'
required: false
default: 'main'
type: string
pull_request:
paths:
- 'crates/terraphim_*/src/**'
- 'terraphim_server/src/**'
- 'scripts/run-performance-benchmarks.sh'
- '.github/workflows/performance-benchmarking.yml'
push:
branches: [main, develop]
paths:
- 'crates/terraphim_*/src/**'
- 'terraphim_server/src/**'
- 'scripts/run-performance-benchmarks.sh'
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: 1
jobs:
performance-benchmarks:
name: Performance Benchmarks
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch full history for baseline comparison
- name: Set up Rust
uses: dtolnay/rust-toolchain@stable
- name: Cache Rust dependencies
uses: actions/cache@v4
with:
path: |
~/.cargo/registry
~/.cargo/git
target
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y curl jq bc wrk
- name: Download baseline (if comparing)
if: github.event.inputs.baseline_ref || github.event_name == 'pull_request'
run: |
if [ "${{ github.event.inputs.baseline_ref }}" != "" ]; then
BASELINE_REF="${{ github.event.inputs.baseline_ref }}"
elif [ "${{ github.event_name }}" == "pull_request" ]; then
BASELINE_REF="${{ github.event.pull_request.base.ref }}"
else
BASELINE_REF="main"
fi
echo "Downloading baseline from ref: $BASELINE_REF"
# Download baseline results from previous run
# This assumes you have baseline results stored as artifacts or in a separate repo
# For now, create an empty baseline if none exists
mkdir -p benchmark-results
echo '{"timestamp":"2024-01-01T00:00:00Z","results":{}}' > benchmark-results/baseline.json
- name: Start Terraphim server
run: |
# Build and start the server in background
cargo build --release --package terraphim_server
./target/release/terraphim_server &
SERVER_PID=$!
# Wait for server to start
for i in {1..30}; do
if curl -s http://localhost:3000/health > /dev/null; then
echo "Server started successfully"
break
fi
sleep 2
done
# Store PID for cleanup
echo $SERVER_PID > server.pid
- name: Run performance benchmarks
run: |
# Set environment variables
export TERRAPHIM_BENCH_ITERATIONS="${{ github.event.inputs.iterations || '1000' }}"
export TERRAPHIM_SERVER_URL="http://localhost:3000"
# Make script executable
chmod +x scripts/run-performance-benchmarks.sh
# Run benchmarks
./scripts/run-performance-benchmarks.sh --verbose
- name: Stop Terraphim server
if: always()
run: |
if [ -f server.pid ]; then
kill $(cat server.pid) || true
rm server.pid
fi
- name: Upload benchmark results
uses: actions/upload-artifact@v4
if: always()
with:
name: benchmark-results-${{ github.run_id }}
path: benchmark-results/
retention-days: 30
- name: Generate performance report
if: always()
run: |
# Create a summary for GitHub Actions
if [ -f "benchmark-results/*/benchmark_report.md" ]; then
REPORT_FILE=$(find benchmark-results -name "benchmark_report.md" | head -1)
echo "## Performance Benchmark Report" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
cat $REPORT_FILE >> $GITHUB_STEP_SUMMARY
fi
- name: Check performance gates
run: |
# Check if benchmark results meet performance requirements
if [ -f "benchmark-results/*/benchmark_results.json" ]; then
RESULTS_FILE=$(find benchmark-results -name "benchmark_results.json" | head -1)
# Extract SLO compliance percentage
SLO_COMPLIANCE=$(jq -r '.slo_compliance.overall_compliance // 0' "$RESULTS_FILE")
echo "SLO Compliance: ${SLO_COMPLIANCE}%"
# Set output for other jobs
echo "slo-compliance=${SLO_COMPLIANCE}" >> $GITHUB_OUTPUT
# Check critical violations
CRITICAL_VIOLATIONS=$(jq -r '.slo_compliance.critical_violations | length' "$RESULTS_FILE")
if [ "$CRITICAL_VIOLATIONS" -gt 0 ]; then
echo "❌ Critical performance violations detected!"
jq -r '.slo_compliance.critical_violations[] | "🚨 \(.metric): \(.actual_value) (threshold: \(.threshold_value))"' "$RESULTS_FILE"
echo "performance-gates-passed=false" >> $GITHUB_OUTPUT
exit 1
else
echo "✅ All performance gates passed"
echo "performance-gates-passed=true" >> $GITHUB_OUTPUT
fi
else
echo "No benchmark results found"
echo "slo-compliance=0" >> $GITHUB_OUTPUT
echo "performance-gates-passed=false" >> $GITHUB_OUTPUT
exit 1
fi
- name: Comment on PR (if applicable)
if: github.event_name == 'pull_request' && always()
uses: actions/github-script@v8
with:
script: |
const fs = require('fs');
// Find the benchmark report
const reportPath = require('glob').sync('benchmark-results/*/benchmark_report.md')[0];
if (reportPath && fs.existsSync(reportPath)) {
const report = fs.readFileSync(reportPath, 'utf8');
// Extract key metrics for comment
const sloMatch = report.match(/SLO Compliance: (\d+\.?\d*)%/);
const sloCompliance = sloMatch ? sloMatch[1] : 'N/A';
const comment = [
"## 🚀 Performance Benchmark Results",
"",
`**SLO Compliance:** ${sloCompliance}%`,
"",
"### Key Findings:",
report.includes('violations')
? '⚠️ Some performance thresholds were not met'
: '✅ All performance requirements satisfied',
"",
`[View full report](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})`,
].join("\\n");
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: comment
});
}
performance-regression-check:
name: Performance Regression Check
runs-on: ubuntu-latest
needs: performance-benchmarks
if: always() && needs.performance-benchmarks.result == 'success'
steps:
- name: Check for regressions
run: |
# Compare current results with baseline
# This is a simplified check - in practice you'd want more sophisticated analysis
if [ "${{ needs.performance-benchmarks.outputs.performance-gates-passed }}" == "false" ]; then
echo "Performance regression detected!"
exit 1
else
echo "No performance regressions detected"
fi
update-baseline:
name: Update Performance Baseline
runs-on: ubuntu-latest
needs: [performance-benchmarks, performance-regression-check]
if: github.ref == 'refs/heads/main' && needs.performance-regression-check.result == 'success'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download benchmark results
uses: actions/download-artifact@v4
with:
name: benchmark-results-${{ github.run_id }}
- name: Update baseline
run: |
# Copy latest results as new baseline
if [ -f "benchmark-results/*/benchmark_results.json" ]; then
RESULTS_FILE=$(find benchmark-results -name "benchmark_results.json" | head -1)
cp "$RESULTS_FILE" "benchmark-results/baseline.json"
echo "Updated performance baseline"
fi
- name: Commit baseline update
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
git add benchmark-results/baseline.json
git commit -m "chore: update performance baseline
Auto-updated from CI run: ${{ github.run_id }}
SLO Compliance: ${{ needs.performance-benchmarks.outputs.slo-compliance }}%" || echo "No changes to commit"
git push origin main