diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 000000000..249d4a689 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,70 @@ +name: snmalloc Benchmarking CI + +# The following should ensure that the workflow only runs a single set of actions +# for each PR. But it will not apply this to pushes to the main branch. +concurrency: + group: benchmarking${{ github.ref }} + cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} + +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the master branch + push: + branches: [ main ] + pull_request: + branches: [ main ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + benchmark: + runs-on: [self-hosted, 1ES.Pool=snmalloc-perf] + + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Checkout + uses: actions/checkout@v3 + + # Setup docker buildx + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Build and Push Docker Image + id: docker_build + uses: docker/build-push-action@v4 + with: + context: . + file: benchmark/Dockerfile + push: false + load: true + tags: snmalloc-bench + build-args: | + benchs=cfrac + repeats=1 + cache-from: type=gha + cache-to: type=gha,mode=max + + # Extracts the benchmark results from the Docker container + - name: Extract Benchmark Results + run: | + docker cp `docker run -d ${{ steps.docker_build.outputs.imageid }}`:/results.json . + + # Uploads the benchmark results as an artifact + - name: Upload Benchmark Results + uses: actions/upload-artifact@v4 + with: + name: benchmark-results + path: results.json + + # Upload to graphing service + - uses: bencherdev/bencher@main + - name: Upload benchmark results to Bencher + run: | + bencher run \ + --project snmalloc \ + --token '${{ secrets.BENCHER_DEV_API_TOKEN }}' \ + --branch ${{ github.ref_name }} \ + --adapter json \ + --err \ + --file results.json \ No newline at end of file diff --git a/benchmark/Dockerfile b/benchmark/Dockerfile new file mode 100644 index 000000000..2e70c974f --- /dev/null +++ b/benchmark/Dockerfile @@ -0,0 +1,44 @@ +FROM ubuntu:24.04 + +# Pull mimalloc-bench +RUN apt-get update && apt-get install -y --no-install-recommends git gpg ca-certificates python3-numpy +RUN git clone https://github.com/daanx/mimalloc-bench &&\ + cd mimalloc-bench && \ + git reset --hard ffa530dbbe046532dfcb4a1b58ffc06e144aee60 + +WORKDIR /mimalloc-bench +# Install dependencies +RUN ./build-bench-env.sh packages + +# Tidy up apt cache +RUN apt-get clean && rm -rf /var/lib/apt/lists/* + +# Build benchmarks +RUN ./build-bench-env.sh bench + +RUN ./build-bench-env.sh redis + +RUN ./build-bench-env.sh rocksdb \ + && find /mimalloc-bench/extern/rocksdb-8.1.1 -name "*.o" -delete + +RUN ./build-bench-env.sh lean \ + && find /mimalloc-bench/extern/lean -name "*.o" -delete + +RUN echo "sn /snmalloc/build/libsnmallocshim.so" > /allocs.txt + +# Build allocator +RUN mkdir -p /snmalloc +COPY . /snmalloc + +RUN mkdir -p /snmalloc/build +WORKDIR /snmalloc/build +RUN cmake -G Ninja -DCMAKE_BUILD_TYPE=Release .. +RUN ninja libsnmallocshim.so + +# Run benchmarks +ARG benchs=allt +ARG repeats=1 +WORKDIR /mimalloc-bench/out/bench +RUN ../../bench.sh --external=/allocs.txt $benchs -r=$repeats + +RUN python3 /snmalloc/benchmark/bencher.dev.py /mimalloc-bench/out/bench/benchres.csv > /results.json \ No newline at end of file diff --git a/benchmark/bencher.dev.py b/benchmark/bencher.dev.py new file mode 100644 index 000000000..ba6bb3046 --- /dev/null +++ b/benchmark/bencher.dev.py @@ -0,0 +1,75 @@ +# This script is adapted from the mimalloc-bench project. +# It converts the benchmark outputs to the format required by bencher.dev. + +import re +import sys +import collections +try: + import numpy as np +except ImportError: + print('You need to install numpy.') + sys.exit(1) + +if len(sys.argv) != 2: + print('Usage: %s benchres.csv' % sys.argv[0]) + print('Where benchres.csv is the output of the benchmark script. I.e.') + print(' mimalloc-bench/out/bench/benchres.csv') + print() + print('The script generates a single file for submission to bencher.dev.') + sys.exit(1) + +parse_line = re.compile('^([^ ]+) +([^ ]+) +([0-9:.]+) +([0-9]+)') +data = [] +test_names = set() + +# read in the data +with open(sys.argv[1]) as f: + for l in f.readlines(): + match = parse_line.search(l) + if not match: + continue + test_name, alloc_name, time_string, memory = match.groups() + time_split = time_string.split(':') + time_taken = 0 + test_names.add(test_name) + if len(time_split) == 2: + time_taken = int(time_split[0]) * 60 + float(time_split[1]) + else: + time_taken = float(time_split[0]) + data.append({"Benchmark":test_name, "Allocator":alloc_name, "Time":time_taken, "Memory":int(memory)}) + +# Output data in json of the form +# +# { +# "":{ +# "memory":{ +# "value": +# "high-value": +# "low-value": +# } +# "time":{ +# "value": +# "high-value": +# "low-value": +# } +# } +# } + +import json + +output = {} +for test_name in test_names: + output[test_name] = { + "memory": { + "value": float(np.mean([d["Memory"] for d in data if d["Benchmark"] == test_name])), + "high-value": float(np.max([d["Memory"] for d in data if d["Benchmark"] == test_name])), + "low-value": float(np.min([d["Memory"] for d in data if d["Benchmark"] == test_name])), + }, + "time": { + "value": float(np.mean([d["Time"] for d in data if d["Benchmark"] == test_name])), + "high-value": float(np.max([d["Time"] for d in data if d["Benchmark"] == test_name])), + "low-value": float(np.min([d["Time"] for d in data if d["Benchmark"] == test_name])), + } + } + +print(json.dumps(output, indent=2)) \ No newline at end of file