-
Notifications
You must be signed in to change notification settings - Fork 0
211 lines (189 loc) · 6.58 KB
/
performance-tests.yml
File metadata and controls
211 lines (189 loc) · 6.58 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
name: Performance Tests
on:
push:
branches: [ main, develop ]
paths:
- 'flet-multiplatform-app/src/**'
- 'flet-multiplatform-app/tests/performance/**'
- '.github/workflows/performance-tests.yml'
pull_request:
branches: [ main, develop ]
paths:
- 'flet-multiplatform-app/src/**'
- 'flet-multiplatform-app/tests/performance/**'
- '.github/workflows/performance-tests.yml'
workflow_dispatch:
inputs:
users:
description: 'Number of concurrent users'
required: false
default: '10'
spawn_rate:
description: 'Spawn rate (users per second)'
required: false
default: '2'
duration:
description: 'Test duration (e.g., 5m, 1h)'
required: false
default: '5m'
jobs:
performance-test:
name: Performance Test
runs-on: ubuntu-latest
env:
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/test_db
TEST_ENV: ci
PYTHONPATH: ${{ github.workspace }}
PERF_TEST_USERS: ${{ github.event.inputs.users || '10' }}
PERF_TEST_SPAWN_RATE: ${{ github.event.inputs.spawn_rate || '2' }}
PERF_TEST_DURATION: ${{ github.event.inputs.duration || '5m' }}
PERF_TEST_CONFIG: flet-multiplatform-app/tests/performance/config/performance_config.yaml
services:
postgres:
image: postgres:14
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: test_db
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y python3-dev python3-pip python3-venv libpq-dev
- name: Create and activate virtual environment
run: |
python -m venv .venv
echo "$GITHUB_WORKSPACE/.venv/bin" >> $GITHUB_PATH
- name: Install project dependencies
run: |
. .venv/bin/activate
python -m pip install --upgrade pip
pip install -r flet-multiplatform-app/requirements.txt
pip install -r flet-multiplatform-app/requirements-dev.txt
pip install -r flet-multiplatform-app/tests/performance/requirements-test.txt
- name: Create required directories
run: |
mkdir -p flet-multiplatform-app/test_data/perf
mkdir -p flet-multiplatform-app/results/performance
mkdir -p flet-multiplatform-app/reports/performance/plots
mkdir -p flet-multiplatform-app/test-results/performance
- name: Wait for PostgreSQL
run: |
for i in {1..10}; do
if pg_isready -h localhost -p 5432 -U postgres -d test_db; then
echo "PostgreSQL is ready"
exit 0
fi
echo "Waiting for PostgreSQL to be ready..."
sleep 3
done
echo "Failed to connect to PostgreSQL"
exit 1
- name: Generate test data
run: |
. .venv/bin/activate
cd flet-multiplatform-app
python scripts/generate_perf_test_data.py
- name: Generate sample metrics (for testing)
run: |
. .venv/bin/activate
cd flet-multiplatform-app
python scripts/generate_sample_metrics.py
cp test_data/performance/baseline_metrics.json test-results/performance/
- name: Start application in background
run: |
. .venv/bin/activate
cd flet-multiplatform-app
nohup uvicorn src.backend.app.main:app --host 0.0.0.0 --port 8000 > app.log 2>&1 &
echo $! > app.pid
echo "Waiting for application to start..."
sleep 5
- name: Run load test
run: |
. .venv/bin/activate
cd flet-multiplatform-app
set -x
# Run load test with Locust
locust -f tests/performance/test_load.py \
--headless \
-u $PERF_TEST_USERS \
-r $PERF_TEST_SPAWN_RATE \
-t $PERF_TEST_DURATION \
--csv=results/performance/load_test \
--html=results/performance/load_test_report.html \
--logfile=results/performance/locust_load.log \
--loglevel=INFO
- name: Run stress test
run: |
. .venv/bin/activate
cd flet-multiplatform-app
# Run stress test with higher load
locust -f tests/performance/test_stress.py \
--headless \
-u $(($PERF_TEST_USERS * 5)) \
-r $(($PERF_TEST_SPAWN_RATE * 2)) \
-t $PERF_TEST_DURATION \
--csv=results/performance/stress_test \
--html=results/performance/stress_test_report.html \
--logfile=results/performance/locust_stress.log \
--loglevel=INFO
- name: Analyze performance results
run: |
. .venv/bin/activate
cd flet-multiplatform-app
python scripts/analyze_performance.py \
--input-dir results/performance \
--output-dir test-results/performance \
--format html,json \
--generate-plots
- name: Check for performance regressions
id: check_perf
continue-on-error: true
run: |
. .venv/bin/activate
cd flet-multiplatform-app
python scripts/check_performance.py \
--baseline test-results/performance/baseline_metrics.json \
--current test-results/performance/current_metrics.json \
--threshold 0.15 # 15%以上のパフォーマンス低下を検知
- name: Fail on performance regression
if: steps.check_perf.outcome == 'failure'
run: |
echo "::error::Performance regression detected! Check the performance report for details."
exit 1
- name: Upload performance report
uses: actions/upload-artifact@v3
if: always()
with:
name: performance-report
path: |
flet-multiplatform-app/results/performance/**
flet-multiplatform-app/reports/performance/**
flet-multiplatform-app/test-results/performance/**
retention-days: 7
- name: Upload performance metrics
uses: actions/upload-artifact@v3
if: always()
with:
name: performance-metrics
path: flet-multiplatform-app/test-results/performance/*.json
retention-days: 30
- name: Stop application
if: always()
run: |
if [ -f flet-multiplatform-app/app.pid ]; then
kill $(cat flet-multiplatform-app/app.pid) || true
fi