Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 40 additions & 5 deletions .github/actions/build-fixtures/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,15 @@ inputs:
release_name:
description: "Name of the fixture release"
required: true
from_fork:
description: "Fill from this fork (inclusive). Empty for unsplit builds."
default: ""
until_fork:
description: "Fill until this fork (inclusive). Empty for unsplit builds."
default: ""
split_label:
description: "Label for this fork-range split. Empty for unsplit builds."
default: ""
runs:
using: "composite"
steps:
Expand All @@ -25,28 +34,54 @@ runs:
with:
type: ${{ steps.properties.outputs.evm-type }}
- name: Install pigz for parallel tarball compression
if: inputs.split_label == ''
shell: bash
run: sudo apt-get install -y pigz
- name: Generate fixtures using fill
shell: bash
run: |
IS_SPLIT="${{ inputs.split_label }}"

if [ -n "$IS_SPLIT" ]; then
OUTPUT_ARG="--output=fixtures_${{ inputs.release_name }}"
FORK_ARGS="--generate-all-formats --from=${{ inputs.from_fork }} --until=${{ inputs.until_fork }}"
else
OUTPUT_ARG="--output=fixtures_${{ inputs.release_name }}.tar.gz"
FORK_ARGS=""
fi

# Allow exit code 5 (NO_TESTS_COLLECTED) for fork ranges with no tests.
EXIT_CODE=0
if [ "${{ steps.evm-builder.outputs.impl }}" = "eels" ]; then
uv run fill -n ${{ steps.evm-builder.outputs.x-dist }} ${{ steps.properties.outputs.fill-params }} --output=fixtures_${{ inputs.release_name }}.tar.gz --build-name ${{ inputs.release_name }} --no-html --durations=100 --log-level=DEBUG
uv run fill -n ${{ steps.evm-builder.outputs.x-dist }} ${{ steps.properties.outputs.fill-params }} $FORK_ARGS $OUTPUT_ARG --build-name ${{ inputs.release_name }} --no-html --durations=100 --log-level=DEBUG || EXIT_CODE=$?
else
uv run fill -n ${{ steps.evm-builder.outputs.x-dist }} --evm-bin=${{ steps.evm-builder.outputs.evm-bin }} ${{ steps.properties.outputs.fill-params }} --output=fixtures_${{ inputs.release_name }}.tar.gz --build-name ${{ inputs.release_name }} --no-html --durations=100 --log-level=DEBUG
uv run fill -n ${{ steps.evm-builder.outputs.x-dist }} --evm-bin=${{ steps.evm-builder.outputs.evm-bin }} ${{ steps.properties.outputs.fill-params }} $FORK_ARGS $OUTPUT_ARG --build-name ${{ inputs.release_name }} --no-html --durations=100 --log-level=DEBUG || EXIT_CODE=$?
fi
if [ "$EXIT_CODE" -ne 0 ] && [ "$EXIT_CODE" -ne 5 ]; then
exit "$EXIT_CODE"
fi
- name: Generate Benchmark Genesis Files
if: contains(inputs.release_name, 'benchmark')
if: inputs.split_label == '' && contains(inputs.release_name, 'benchmark')
uses: ./.github/actions/build-benchmark-genesis
with:
fixtures_path: fixtures_${{ inputs.release_name }}.tar.gz
- name: Upload Benchmark Genesis Artifact
if: contains(inputs.release_name, 'benchmark')
if: inputs.split_label == '' && contains(inputs.release_name, 'benchmark')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: benchmark_genesis_${{ inputs.release_name }}
path: benchmark_genesis.tar.gz
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- name: Upload fixture tarball (unsplit)
if: inputs.split_label == ''
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: fixtures_${{ inputs.release_name }}
path: fixtures_${{ inputs.release_name }}.tar.gz
- name: Upload fixture directory (split)
if: inputs.split_label != ''
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: fixtures__${{ inputs.split_label }}
include-hidden-files: true
path: fixtures_${{ inputs.release_name }}/
if-no-files-found: ignore
6 changes: 1 addition & 5 deletions .github/configs/evm.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
stable:
impl: eels
repo: null
ref: null
develop:
eels:
impl: eels
repo: null
ref: null
Expand Down
15 changes: 6 additions & 9 deletions .github/configs/feature.yaml
Original file line number Diff line number Diff line change
@@ -1,22 +1,19 @@
# Unless filling for special features, all features should fill for previous forks (starting from Frontier) too
stable:
evm-type: stable
fill-params: --until=Prague --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest

develop:
evm-type: develop
fill-params: --until=BPO4 --fill-static-tests --ignore=tests/static/state_tests/stQuadraticComplexityTest
mainnet:
evm-type: eels
fill-params: --until=BPO2

benchmark:
evm-type: benchmark
fill-params: --fork=Osaka --gas-benchmark-values 1,5,10,30,60,100,150 ./tests/benchmark/compute --maxprocesses=30 --dist=worksteal
feature_only: true

benchmark_fast:
evm-type: benchmark
fill-params: --fork=Osaka --gas-benchmark-values 100 ./tests/benchmark/compute
feature_only: true

bal:
evm-type: develop
fill-params: --fork=Amsterdam --fill-static-tests
evm-type: eels
fill-params: --fork=Amsterdam
feature_only: true
21 changes: 21 additions & 0 deletions .github/configs/fork-ranges.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Shared fork ranges for splitting multi-fork releases across parallel runners.
# Features using --until are automatically split using applicable ranges.
# Features using --fork (single fork) are never split.
- label: pre-cancun
from: Frontier
until: Shanghai
- label: cancun
from: Cancun
until: Cancun
- label: prague
from: Prague
until: Prague
- label: osaka
from: Osaka
until: Osaka
- label: bpo
from: BPO1
until: BPO2
- label: amsterdam
from: Amsterdam
until: Amsterdam
84 changes: 84 additions & 0 deletions .github/scripts/create_release_tarball.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
#!/usr/bin/env -S uv run --script
#
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
"""
Create a release tarball from a merged fixture directory.

Archive all ``.json`` and ``.ini`` files under a ``fixtures/`` prefix,
matching the structure produced by
``execution_testing.cli.pytest_commands.plugins.shared.fixture_output``.

Use ``pigz`` for parallel compression when available, otherwise fall
back to Python's built-in gzip.
"""

import shutil
import subprocess
import sys
import tarfile
import warnings
from pathlib import Path


def create_tarball_with_pigz(source_dir: Path, output_path: Path) -> None:
"""Create tarball using Python tarfile + pigz for parallel compression."""
temp_tar = output_path.with_suffix("") # strip .gz

with tarfile.open(temp_tar, "w") as tar:
for file in sorted(source_dir.rglob("*")):
if file.is_file() and file.suffix in {".json", ".ini"}:
arcname = Path("fixtures") / file.relative_to(source_dir)
tar.add(file, arcname=str(arcname))

subprocess.run(
["pigz", "-f", str(temp_tar)],
check=True,
capture_output=True,
)


def create_tarball_standard(source_dir: Path, output_path: Path) -> None:
"""Create tarball using Python's tarfile module (single-threaded)."""
with tarfile.open(output_path, "w:gz") as tar:
for file in sorted(source_dir.rglob("*")):
if file.is_file() and file.suffix in {".json", ".ini"}:
arcname = Path("fixtures") / file.relative_to(source_dir)
tar.add(file, arcname=str(arcname))


def main() -> None:
"""Entry point."""
if len(sys.argv) != 3:
print(
"Usage: create_release_tarball.py <source_dir> <output.tar.gz>",
file=sys.stderr,
)
sys.exit(1)

source_dir = Path(sys.argv[1])
output_path = Path(sys.argv[2])

if not source_dir.is_dir():
print(f"Error: '{source_dir}' is not a directory.", file=sys.stderr)
sys.exit(1)

if shutil.which("pigz"):
try:
create_tarball_with_pigz(source_dir, output_path)
except (subprocess.CalledProcessError, OSError) as e:
warnings.warn(
f"pigz failed ({type(e).__name__}: {e}), falling back to gzip",
stacklevel=2,
)
create_tarball_standard(source_dir, output_path)
else:
create_tarball_standard(source_dir, output_path)

print(f"Created {output_path}")


if __name__ == "__main__":
main()
160 changes: 160 additions & 0 deletions .github/scripts/generate_build_matrix.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
#!/usr/bin/env -S uv run --script
#
# /// script
# requires-python = ">=3.12"
# dependencies = [
# "pyyaml",
# ]
# ///
"""
Generate the build matrix for release fixture workflows.

Read `.github/configs/feature.yaml` and emit a flat JSON build matrix
suitable for ``strategy.matrix`` in GitHub Actions.

Features whose ``fill-params`` contain ``--until`` are split across the
shared fork ranges defined in `.github/configs/fork-ranges.yaml`.
Features using ``--fork`` (single fork) produce a single unsplit entry.
"""

import json
import re
import sys
from pathlib import Path

import yaml

FEATURE_CONFIG = Path(".github/configs/feature.yaml")
FORK_RANGES_CONFIG = Path(".github/configs/fork-ranges.yaml")

# Canonical fork ordering used to filter fork ranges per feature.
FORK_ORDER = [
"Frontier",
"Homestead",
"DAOFork",
"TangerineWhistle",
"SpuriousDragon",
"Byzantium",
"Constantinople",
"Istanbul",
"MuirGlacier",
"Berlin",
"London",
"ArrowGlacier",
"GrayGlacier",
"Paris",
"Shanghai",
"Cancun",
"Prague",
"Osaka",
"BPO1",
"BPO2",
"Amsterdam",
]

FORK_INDEX = {name: i for i, name in enumerate(FORK_ORDER)}


def load_config(path: Path) -> dict:
"""Load and return the feature configuration."""
with open(path) as f:
return yaml.safe_load(f)


def parse_until_fork(fill_params: str) -> str | None:
"""
Extract the ``--until`` value from fill-params.

Return ``None`` when ``--fork`` is used instead (single-fork
feature that should not be split).
"""
if re.search(r"--fork\b", fill_params):
return None
m = re.search(r"--until[=\s]+(\S+)", fill_params)
return m.group(1) if m else None


def applicable_ranges(fork_ranges: list[dict], until_fork: str) -> list[dict]:
"""
Return fork ranges whose ``from`` is at or before *until_fork*.

Clamp the last applicable range's ``until`` to *until_fork* so we
never fill beyond the feature's declared boundary.
"""
limit = FORK_INDEX[until_fork]
result = []
for r in fork_ranges:
if FORK_INDEX[r["from"]] <= limit:
entry = dict(r)
if FORK_INDEX[r["until"]] > limit:
entry["until"] = until_fork
result.append(entry)
return result


def build_matrix(
feature: dict, name: str, fork_ranges: list[dict]
) -> tuple[list[dict], str]:
"""
Build the matrix for a single feature.

Return (build_entries, combine_labels). Split features produce
one entry per fork range and a space-separated label string for
the combine step. Unsplit features produce a single entry with
empty labels.
"""
until = parse_until_fork(feature["fill-params"])
if until and fork_ranges:
ranges = applicable_ranges(fork_ranges, until)
if len(ranges) > 1:
build = [
{
"feature": name,
"label": r["label"],
"from_fork": r["from"],
"until_fork": r["until"],
}
for r in ranges
]
labels = " ".join(r["label"] for r in ranges)
return build, labels

return [
{
"feature": name,
"label": "",
"from_fork": "",
"until_fork": "",
}
], ""


def main() -> None:
"""Entry point."""
if len(sys.argv) != 2:
print(
"Usage: generate_build_matrix.py <feature>",
file=sys.stderr,
)
sys.exit(1)

config = load_config(FEATURE_CONFIG)
fork_ranges = load_config(FORK_RANGES_CONFIG) or []
name = sys.argv[1]

if name not in config or not isinstance(config[name], dict):
print(
f"Error: feature '{name}' not found in {FEATURE_CONFIG}.",
file=sys.stderr,
)
sys.exit(1)

build, labels = build_matrix(config[name], name, fork_ranges)

print(f"build_matrix={json.dumps(build)}")
print(f"feature_name={name}")
print(f"combine_labels={labels}")


if __name__ == "__main__":
main()
Loading
Loading