Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
188 changes: 188 additions & 0 deletions scripts/run-hive-local.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
#!/usr/bin/env python3
"""Local hive test runner - parses scenarios from .github/workflows/hive.yml

Usage:
./scripts/run-hive-local.py # List available scenarios
./scripts/run-hive-local.py --all # Run all scenarios
./scripts/run-hive-local.py smoke/genesis # Run specific scenario
./scripts/run-hive-local.py --skip-build smoke/genesis # Skip docker build
"""

import argparse
import os
import subprocess
import sys
import time
from pathlib import Path

import yaml

BERA_RETH_DIR = Path(__file__).parent.resolve().parent
HIVE_WORKFLOW = BERA_RETH_DIR / ".github/workflows/hive.yml"
HIVE_DIR = BERA_RETH_DIR.parent / "hive"


def load_scenarios():
with open(HIVE_WORKFLOW) as f:
workflow = yaml.safe_load(f)
return workflow["jobs"]["test"]["strategy"]["matrix"]["scenario"]


def get_filter(scenario):
limit = scenario.get("limit", "")
include = scenario.get("include", [])
tests = "|".join(include) if include else ""

if limit and tests:
return f"{limit}/{tests}"
elif limit:
return limit
elif tests:
return f"/{tests}"
return ""


def list_scenarios(scenarios):
print("Available scenarios (from hive.yml):")
for s in scenarios:
sim = s["sim"]
limit = s.get("limit", "")
print(f" {sim}\t{limit}" if limit else f" {sim}")
print()
print("Usage: run-hive-local.py [--skip-build] [--all | <sim> [limit]]")


def find_scenario(scenarios, sim, limit_arg):
for s in scenarios:
if s["sim"] != sim:
continue
limit = s.get("limit", "")
if limit_arg and limit != limit_arg:
continue
if not limit_arg and limit:
continue
return s
return None


def run_scenario(sim, filter_str):
print()
print(f"==> Running: {sim}" + (f" (filter: {filter_str})" if filter_str else ""))

os.chdir(HIVE_DIR)
args = ["./hive", "--sim", sim, "--client", "bera-reth", "--sim.parallelism", "8"]
if filter_str:
args.extend(["--sim.limit", filter_str])

start_time = time.time()

# The hive process returns non-zero exit code when tests fail, even on expected
# failures so we need to parse the JSON to check if failures are expected
result = subprocess.run(args)

# Find JSON files created after we started
logs_dir = HIVE_DIR / "workspace/logs"
json_files = [f for f in logs_dir.glob("*.json") if f.name != "hive.json" and f.stat().st_mtime > start_time]

# If hive failed and no results generated, it crashed
if result.returncode != 0 and not json_files:
print(f"Hive crashed with exit code {result.returncode}")
return False

if not json_files:
print("No JSON results found")
return True
Copy link

Copilot AI Feb 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If no JSON result files are found, the function prints "No JSON results found" but still returns True, which treats the scenario as a success and skips validation against expected_failures.yaml and ignored_tests.yaml. This can cause misreporting (for example if the logs directory path is wrong or Hive produced no results), so this branch should likely return False or otherwise fail the run to surface the configuration issue.

Suggested change
return True
return False

Copilot uses AI. Check for mistakes.

# Get the newest json file (note: don't run this script in parallel!)
json_file = max(json_files, key=lambda p: p.stat().st_mtime)
print(f"Validating: {json_file.name}")

hive_assets = BERA_RETH_DIR / ".github/assets/hive"
result = subprocess.run(
[
"python3",
str(hive_assets / "parse.py"),
str(json_file),
"--exclusion",
str(hive_assets / "expected_failures.yaml"),
"--ignored",
str(hive_assets / "ignored_tests.yaml"),
]
)
return result.returncode == 0


def main():
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("--skip-build", action="store_true", help="Skip Docker build")
parser.add_argument("--all", action="store_true", help="Run all scenarios")
parser.add_argument("sim", nargs="?", help="Simulator to run")
parser.add_argument("limit", nargs="?", help="Limit filter")
args = parser.parse_args()

scenarios = load_scenarios()

# List scenarios mode
if not args.sim and not args.all:
list_scenarios(scenarios)
return 0

# Check prerequisites
print("==> Checking prerequisites...")
if not (HIVE_DIR / "hive.go").exists():
print(f"Error: Hive not found at {HIVE_DIR}")
print("Set HIVE_DIR or clone hive there")
Copy link

Copilot AI Feb 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The message "Set HIVE_DIR or clone hive there" suggests that HIVE_DIR is configurable (e.g. via an environment variable), but in this script HIVE_DIR is hard-coded to BERA_RETH_DIR.parent / "hive" and never read from the environment or CLI. Either wire up a configurable HIVE_DIR (for example via an env var or argument) or update the error message so it accurately describes how to fix the problem (e.g. only mentioning cloning berachain/hive to the expected sibling directory).

Suggested change
print("Set HIVE_DIR or clone hive there")
print(f"Please clone the berachain/hive repository to {HIVE_DIR}")

Copilot uses AI. Check for mistakes.
return 1

# Build hive if the binary does not exst
Copy link

Copilot AI Feb 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The comment here contains a typo: "exst" should be "exist". Please fix the spelling in the comment for clarity.

Suggested change
# Build hive if the binary does not exst
# Build hive if the binary does not exist

Copilot uses AI. Check for mistakes.
if not (HIVE_DIR / "hive").exists():
print("==> Building hive...")
subprocess.run(["go", "build", "-o", "hive", "."], cwd=HIVE_DIR, check=True)

# Build Docker image
if not args.skip_build:
print("==> Building bera-reth Docker image...")
subprocess.run(
[
"docker",
"build",
"-t",
"ghcr.io/berachain/bera-reth:nightly",
"-f",
str(BERA_RETH_DIR / ".github/assets/hive/Dockerfile"),
"--build-arg",
"CARGO_BIN=bera-reth",
"--build-arg",
"BUILD_PROFILE=hivetests",
str(BERA_RETH_DIR),
],
check=True,
)

# Run scenarios
failed = False
if args.all:
print("==> Running all scenarios...")
for s in scenarios:
filter_str = get_filter(s)
if not run_scenario(s["sim"], filter_str):
failed = True
else:
scenario = find_scenario(scenarios, args.sim, args.limit)
if not scenario:
print(f"Error: Scenario not found: {args.sim} {args.limit or ''}")
return 1
filter_str = get_filter(scenario)
if not run_scenario(args.sim, filter_str):
failed = True

print()
if failed:
print("==> FAILED: Some scenarios had unexpected failures")
return 1
print("==> All scenarios passed!")
return 0


if __name__ == "__main__":
sys.exit(main())
Loading