Skip to content

Commit 5ccf33e

Browse files
committed
test: loadtest with varying reqs and batch_size
Checks performance and CPU/MEM usage of different batch_size and number of requests. The results show on CI, and can be shown the same locally too.
1 parent 1d714e1 commit 5ccf33e

File tree

5 files changed

+56
-17
lines changed

5 files changed

+56
-17
lines changed

.github/workflows/main.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,9 @@ jobs:
5555

5656
loadtest:
5757
runs-on: ubuntu-latest
58+
strategy:
59+
matrix:
60+
params: [ {reqs: 10000, batch: 200}, {reqs: 20000, batch: 400}, {reqs: 40000, batch: 800} ]
5861
steps:
5962
- uses: actions/checkout@v4
6063

@@ -72,8 +75,7 @@ jobs:
7275

7376
- name: Run load test
7477
run: |
75-
nix-shell --run "net-loadtest"
76-
cat psrecord.md >> "$GITHUB_STEP_SUMMARY"
78+
nix-shell --run "net-loadtest ${{ matrix.params.reqs }} ${{ matrix.params.batch }}" >> "$GITHUB_STEP_SUMMARY"
7779
7880
coverage:
7981

.gitignore

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,5 @@ coverage_html
1919
nginx.pid
2020
tags
2121
net_worker.pid
22-
psrecord.*
2322
pg_net--*.sql
2423
!pg_net--*--*.sql

nix/loadtest.nix

Lines changed: 44 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{ writeShellScriptBin, psrecord, writers, python3Packages } :
22

33
let
4-
toMarkdown =
4+
psrecordToMd =
55
writers.writePython3 "psrecord-to-md"
66
{
77
libraries = [ python3Packages.pandas python3Packages.tabulate ];
@@ -35,22 +35,59 @@ let
3535
3636
df.to_markdown(sys.stdout, index=False, tablefmt="github")
3737
'';
38+
39+
csvToMd =
40+
writers.writePython3 "csv-to-md"
41+
{
42+
libraries = [ python3Packages.pandas python3Packages.tabulate ];
43+
}
44+
''
45+
import sys
46+
import pandas as pd
47+
48+
pd.read_csv(sys.stdin) \
49+
.fillna("") \
50+
.convert_dtypes() \
51+
.to_markdown(sys.stdout, index=False, floatfmt='.0f')
52+
'';
53+
3854
in
3955

4056
writeShellScriptBin "net-loadtest" ''
4157
set -euo pipefail
4258
43-
net-with-nginx xpg psql -c "call wait_for_many_gets()" > /dev/null &
59+
reqs=""
60+
batch_size_opt=""
61+
62+
load_dir=test/load
63+
mkdir -p $load_dir
64+
echo "*" >> $load_dir/.gitignore
65+
66+
record_result=$load_dir/psrecord.md
67+
query_result=$load_dir/query_out.md
68+
69+
query_csv=$load_dir/query.csv
70+
record_log=$load_dir/psrecord.log
71+
72+
if [ -n "''${1:-}" ]; then
73+
reqs="$1"
74+
fi
75+
76+
if [ -n "''${2:-}" ]; then
77+
batch_size_opt="-c pg_net.batch_size=$2"
78+
fi
79+
80+
net-with-nginx xpg --options "-c log_min_messages=WARNING $batch_size_opt" \
81+
psql -c "call wait_for_many_gets($reqs)" -c "\pset format csv" -c "\o $query_csv" -c "select * from pg_net_stats" > /dev/null &
4482
4583
# wait for process to start so we can capture it with psrecord
4684
sleep 2
4785
48-
record_log=psrecord.log
49-
record_result=psrecord.md
50-
5186
${psrecord}/bin/psrecord $(cat build-17/bgworker.pid) --interval 1 --log "$record_log" > /dev/null
5287
53-
cat $record_log | ${toMarkdown} > $record_result
88+
echo -e "## Loadtest results\n"
89+
cat $query_csv | ${csvToMd}
5490
55-
echo "generated $record_result"
91+
echo -e "\n\n## Loadtest elapsed seconds vs CPU/MEM\n"
92+
cat $record_log | ${psrecordToMd}
5693
''

nix/xpg.nix

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ let
33
dep = fetchFromGitHub {
44
owner = "steve-chavez";
55
repo = "xpg";
6-
rev = "v1.4.1";
7-
sha256 = "sha256-OI9g78KbguLh+ynOnRmnMM4lVOgNRAWkiI/YMmcMs+k=";
6+
rev = "v1.6.0";
7+
sha256 = "sha256-NsdAmsYIRH/DWIZp93AHGYdPiJOztUIUSYcPikeebvw=";
88
};
99
xpg = import dep;
1010
in

test/utils/loadtest.sql

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
1+
create table time_taken (time_taken interval);
2+
13
create view pg_net_stats as
24
select
35
count(*) filter (where error_msg is null) as request_successes,
46
count(*) filter (where error_msg is not null) as request_failures,
5-
(select error_msg from net._http_response where error_msg is not null order by id desc limit 1) as last_failure_error
7+
(select error_msg from net._http_response where error_msg is not null order by id desc limit 1) as last_failure_error,
8+
(select time_taken from time_taken limit 1) as time_taken
69
from net._http_response;
710

811
-- loadtest using many gets, used to be called `repro_timeouts`
@@ -24,16 +27,14 @@ begin
2427

2528
commit;
2629

27-
raise notice 'Waiting until % requests complete', number_of_requests;
30+
raise notice 'Waiting until % requests complete, using a pg_net.batch_size of %', number_of_requests, current_setting('pg_net.batch_size')::text;
2831

2932
perform net._await_response(last_id);
3033

3134
select clock_timestamp() into second_time;
3235

3336
select age(second_time, first_time) into time_taken;
3437

35-
raise notice 'Stats: %', (select to_json(x) from pg_net_stats x limit 1);
36-
37-
raise notice 'Time taken: %', time_taken;
38+
insert into time_taken values (time_taken);
3839
end;
3940
$$ language plpgsql;

0 commit comments

Comments
 (0)