Skip to content

Commit d067316

Browse files
committed
Run performance benchmarks as CI job
To guard against performance regressions introduced by changes, run a fixed set (with this commit: the LLBMC benchmark suite) of benchmarks on each pull request.
1 parent e0afa1f commit d067316

File tree

2 files changed

+165
-0
lines changed

2 files changed

+165
-0
lines changed

Diff for: .github/workflows/performance-compare.py

+79
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
#!/usr/bin/env python3
2+
3+
from collections import defaultdict
4+
5+
import csv
6+
import sys
7+
8+
9+
def compare(results):
10+
differences = []
11+
12+
for bm in results:
13+
if not results[bm].get("base"):
14+
print("Missing base result for " + bm)
15+
differences.append(bm)
16+
continue
17+
if not results[bm].get("pr"):
18+
print("Missing pr result for " + bm)
19+
differences.append(bm)
20+
continue
21+
22+
b = results[bm]["base"]
23+
p = results[bm]["pr"]
24+
25+
if b["Result"] != p["Result"]:
26+
print("Verification results differ for " + bm)
27+
differences.append(bm)
28+
continue
29+
30+
ut_base = float(b["usertime"])
31+
ut_pr = float(p["usertime"])
32+
ut_diff = ut_pr - ut_base
33+
if abs(ut_diff) > 1.5:
34+
change = "improvement" if ut_diff < 0 else "regression"
35+
ut_rel = ut_diff / ut_base * 100.0
36+
print("Usertime {} for {}: {}s ({}%)".format(
37+
change, bm, round(ut_diff, 2), round(ut_rel, 2)))
38+
differences.append(bm)
39+
40+
mem_base = int(b["maxmem"][:-2])
41+
mem_pr = int(p["maxmem"][:-2])
42+
mem_diff = (mem_pr - mem_base) / 1024.0
43+
if abs(mem_diff) > 5.0:
44+
change = "improvement" if mem_diff < 0 else "regression"
45+
mem_rel = mem_diff / mem_base * 100.0
46+
print("Maxmem {} for {}: {}MB ({}%)".format(
47+
change, bm, round(mem_diff, 2), round(mem_rel, 2)))
48+
differences.append(bm)
49+
50+
return differences
51+
52+
53+
def main():
54+
base = sys.argv[1]
55+
pr = sys.argv[2]
56+
57+
results = defaultdict(defaultdict)
58+
59+
with open(base) as base_file, open(pr) as pr_file:
60+
base_reader = csv.DictReader(base_file)
61+
pr_reader = csv.DictReader(pr_file)
62+
63+
for b in base_reader:
64+
results[b["Benchmark"]]["base"] = b
65+
for p in pr_reader:
66+
results[p["Benchmark"]]["pr"] = p
67+
68+
differences = compare(results)
69+
70+
for d in differences:
71+
print("base: " + str(results[d]["base"]))
72+
print("pr: " + str(results[d]["pr"]))
73+
74+
return 1 if len(differences) else 0
75+
76+
77+
if __name__ == "__main__":
78+
rc = main()
79+
sys.exit(rc)

Diff for: .github/workflows/performance.yaml

+86
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
name: Performance benchmarking
2+
3+
on:
4+
pull_request:
5+
branches: [ develop ]
6+
7+
jobs:
8+
run-llbmc-benchmarks:
9+
runs-on: ubuntu-20.04
10+
steps:
11+
- uses: actions/checkout@v2
12+
with:
13+
submodules: recursive
14+
fetch-depth: 0
15+
- name: Fetch dependencies
16+
env:
17+
# This is needed in addition to -yq to prevent apt-get from asking for
18+
# user input
19+
DEBIAN_FRONTEND: noninteractive
20+
run: |
21+
sudo apt-get install --no-install-recommends -y build-essential flex bison maven ccache
22+
sudo apt-get install --no-install-recommends -y bmt poppler-utils texlive-latex-base texlive-pictures
23+
make -C src minisat2-download
24+
- name: Prepare ccache
25+
uses: actions/cache@v2
26+
with:
27+
path: .ccache
28+
key: ${{ runner.os }}-20.04-make-${{ github.ref }}-${{ github.sha }}-Performance
29+
restore-keys: |
30+
${{ runner.os }}-20.04-make-${{ github.ref }}
31+
${{ runner.os }}-20.04-make
32+
- name: ccache environment
33+
run: |
34+
echo "CCACHE_BASEDIR=$PWD" >> $GITHUB_ENV
35+
echo "CCACHE_DIR=$PWD/.ccache" >> $GITHUB_ENV
36+
- name: Zero ccache stats and limit in size
37+
run: ccache -z --max-size=500M
38+
- name: Build with make
39+
run: |
40+
make -C src CXX='ccache /usr/bin/g++' cbmc.dir goto-cc.dir -j2
41+
mkdir pr-bin
42+
cp src/cbmc/cbmc pr-bin/
43+
cp src/goto-cc/goto-cc pr-bin/
44+
cp .github/workflows/performance-compare.py pr-bin/
45+
echo "PR_BIN=$PWD/pr-bin" >> $GITHUB_ENV
46+
- name: Build base with make
47+
run: |
48+
git checkout -b ${{ github.ref }}
49+
git checkout ${{ github.base_ref }}
50+
git checkout -b base ${{ github.base_ref }}
51+
make -C src CXX='ccache /usr/bin/g++' cbmc.dir goto-cc.dir -j2
52+
mkdir base-bin
53+
cp src/cbmc/cbmc base-bin/
54+
cp src/goto-cc/goto-cc base-bin/
55+
echo "BASE_BIN=$PWD/base-bin" >> $GITHUB_ENV
56+
- name: Print ccache stats
57+
run: ccache -s
58+
- name: Run LLBMC benchmarks
59+
run: |
60+
git clone --depth=1 https://github.com/tautschnig/bmt.git bmt.git
61+
cd bmt.git/pkgs
62+
tar czf ../llbmc-bench.cprover-bm.tar.gz llbmc-bench
63+
cd ..
64+
cpbm unpack orig-src/llbmc-bench-vstte-2012.tgz llbmc-bench.cprover-bm.tar.gz
65+
cd llbmc-bench
66+
sed -i 's/\(ex36.*\)/\1 --unwind 11/' cprover/cbmc_opts
67+
export PATH=$BASE_BIN:$PATH
68+
cprover/rules -j2 table CONFIG=cbmc.base
69+
export PATH=$PR_BIN:$PATH
70+
rm -rf build
71+
cprover/rules -j2 table CONFIG=cbmc.pr
72+
# avoid downloading PGF as we use the Ubuntu package
73+
mkdir -p base pgfplots
74+
cpbm graph -s cprover/results.cbmc.base.csv cprover/results.cbmc.pr.csv
75+
pdftoppm -png results.cbmc.base_results.cbmc.pr-scatter.pdf scatter
76+
- uses: actions/upload-artifact@v2
77+
with:
78+
name: scatter.png
79+
path: bmt.git/llbmc-bench/scatter-1.png
80+
if-no-files-found: error
81+
- name: Compare results
82+
run: |
83+
cd bmt.git/llbmc-bench
84+
cut -d, -f2,3,5,10,13,17 cprover/results.cbmc.base.csv > base.csv
85+
cut -d, -f2,3,5,10,13,17 cprover/results.cbmc.pr.csv > pr.csv
86+
$PR_BIN/performance-compare.py base.csv pr.csv

0 commit comments

Comments
 (0)