forked from ml-in-barcelona/server-reason-react
-
Notifications
You must be signed in to change notification settings - Fork 0
97 lines (79 loc) · 2.6 KB
/
benchmark.yml
File metadata and controls
97 lines (79 loc) · 2.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
name: Framework Comparison
on:
push:
tags:
- 'v*'
workflow_dispatch:
inputs:
frameworks:
description: 'Frameworks to test (comma-separated, or "all")'
required: false
default: 'all'
scenarios:
description: 'Scenarios to run (comma-separated, or "all")'
required: false
default: 'trivial,table100,table500'
jobs:
benchmark-frameworks:
name: Compare Frameworks
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup OCaml
uses: ocaml/setup-ocaml@v3
with:
ocaml-compiler: 5.2.x
dune-cache: true
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Setup Bun
uses: oven-sh/setup-bun@v1
- name: Install wrk
run: |
sudo apt-get update
sudo apt-get install -y wrk
- name: Install OCaml dependencies
run: opam install . --deps-only -y
- name: Build native server
run: opam exec -- dune build benchmark/native/server.exe --profile=release
- name: Install JS framework dependencies
working-directory: benchmark/frameworks
run: npm install
- name: Install runner dependencies
working-directory: benchmark/runner
run: npm install
- name: Start native server
run: |
opam exec -- _build/default/benchmark/native/server.exe &
sleep 2
- name: Run framework comparison
working-directory: benchmark/runner
run: |
FRAMEWORKS="${{ github.event.inputs.frameworks || 'all' }}"
SCENARIOS="${{ github.event.inputs.scenarios || 'trivial,table100,table500' }}"
ARGS=""
if [ "$FRAMEWORKS" != "all" ]; then
ARGS="$ARGS --frameworks $FRAMEWORKS"
fi
if [ "$SCENARIOS" != "all" ]; then
ARGS="$ARGS --scenarios $SCENARIOS"
fi
node runner.mjs $ARGS
- name: Upload results
uses: actions/upload-artifact@v4
with:
name: framework-comparison-${{ github.sha }}
path: benchmark/runner/results/
retention-days: 90
- name: Add results to summary
run: |
echo "## Framework Comparison Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
LATEST=$(ls -t benchmark/runner/results/*.md 2>/dev/null | head -1)
if [ -n "$LATEST" ]; then
cat "$LATEST" >> $GITHUB_STEP_SUMMARY
else
echo "No results generated" >> $GITHUB_STEP_SUMMARY
fi