Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,6 @@


def pytest_addoption(parser):
parser.addoption(
"--upd-fail",
action="store_true",
default=False,
help="Update marks for failing tests",
)
parser.addoption(
"--gen-diagram",
action="store_true",
Expand Down
80 changes: 63 additions & 17 deletions tests/scxml/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,45 +5,91 @@
CURRENT_DIR = Path(__file__).parent
TESTCASES_DIR = CURRENT_DIR

# xfail sets — all tests currently fail identically on both engines
XFAIL_BOTH = {
# mandatory — invoke-related
"test191",
"test192",
"test207",
"test215",
"test216",
"test220",
"test223",
"test224",
"test225",
"test226",
"test228",
"test229",
"test232",
"test233",
"test234",
"test235",
"test236",
"test239",
"test240",
"test241",
"test243",
"test244",
"test245",
"test247",
"test253",
"test276",
"test338",
"test347",
"test422",
"test530",
# optional
"test201",
"test446",
"test509",
"test510",
"test518",
"test519",
"test520",
"test522",
"test531",
"test532",
"test534",
"test557",
"test558",
"test561",
"test567",
"test577",
}
XFAIL_SYNC_ONLY: set[str] = set()
XFAIL_ASYNC_ONLY: set[str] = set()

@pytest.fixture(scope="session")
def update_fail_mark(request):
return request.config.getoption("--upd-fail")
XFAIL_SYNC = XFAIL_BOTH | XFAIL_SYNC_ONLY
XFAIL_ASYNC = XFAIL_BOTH | XFAIL_ASYNC_ONLY


@pytest.fixture(scope="session")
def should_generate_debug_diagram(request):
return request.config.getoption("--gen-diagram")


@pytest.fixture()
def processor(testcase_path: Path):
"""
Construct a StateMachine class from the SCXML file
"""
return processor


def compute_testcase_marks(testcase_path: Path) -> list[pytest.MarkDecorator]:
marks = [pytest.mark.scxml]
if testcase_path.with_name(f"{testcase_path.stem}.fail.md").exists():
def compute_testcase_marks(testcase_path: Path, is_async: bool) -> list[pytest.MarkDecorator]:
marks: list[pytest.MarkDecorator] = [pytest.mark.scxml]
test_id = testcase_path.stem
xfail_set = XFAIL_ASYNC if is_async else XFAIL_SYNC
if test_id in xfail_set:
marks.append(pytest.mark.xfail)
if testcase_path.with_name(f"{testcase_path.stem}.skip.md").exists():
marks.append(pytest.mark.skip)
return marks


def pytest_generate_tests(metafunc):
if "testcase_path" not in metafunc.fixturenames:
return

is_async = "async" in metafunc.function.__name__

metafunc.parametrize(
"testcase_path",
[
pytest.param(
testcase_path,
id=str(testcase_path.relative_to(TESTCASES_DIR)),
marks=compute_testcase_marks(testcase_path),
marks=compute_testcase_marks(testcase_path, is_async),
)
for testcase_path in TESTCASES_DIR.glob("**/*.scxml")
if "sub" not in testcase_path.name
Expand Down
176 changes: 19 additions & 157 deletions tests/scxml/test_scxml_cases.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,8 @@
import traceback
from dataclasses import dataclass
from dataclasses import field
from pathlib import Path
from typing import Any

import pytest
from statemachine.event import Event
from statemachine.io.scxml.processor import SCXMLProcessor

from statemachine import State
from statemachine import StateChart

"""
Expand All @@ -22,45 +16,6 @@
""" # noqa: E501


@dataclass(frozen=True, unsafe_hash=True)
class OnTransition:
source: str
event: str
data: str
target: str


@dataclass(frozen=True, unsafe_hash=True)
class OnEnterState:
state: str
event: str
data: str


@dataclass(frozen=True, unsafe_hash=True)
class DebugListener:
events: list[Any] = field(default_factory=list)

def on_transition(self, event: Event, source: State, target: State, event_data):
self.events.append(
OnTransition(
source=f"{source and source.id}",
event=f"{event and event.id}",
data=f"{event_data.trigger_data.kwargs}",
target=f"{target and target.id}",
)
)

def on_enter_state(self, event: Event, state: State, event_data):
self.events.append(
OnEnterState(
state=f"{state.id}",
event=f"{event and event.id}",
data=f"{event_data.trigger_data.kwargs}",
)
)


class AsyncListener:
"""No-op async listener to trigger AsyncEngine selection."""

Expand All @@ -69,77 +24,9 @@ async def on_enter_state(
): ... # No-op: presence of async callback triggers AsyncEngine selection


@dataclass
class FailedMark:
reason: str
events: list[OnTransition]
is_assertion_error: bool
exception: Exception
logs: str
configuration: list[str] = field(default_factory=list)

@staticmethod
def _get_header(report: str) -> str:
header_end_index = report.find("---")
return report[:header_end_index]

def write_fail_markdown(self, testcase_path: Path):
fail_file_path = testcase_path.with_suffix(".fail.md")
if not self.is_assertion_error:
exception_traceback = "".join(
traceback.format_exception(
type(self.exception), self.exception, self.exception.__traceback__
)
)
else:
exception_traceback = "Assertion of the testcase failed."

report = """# Testcase: {testcase_path.stem}

{reason}

Final configuration: `{configuration}`

---

## Logs
```py
{logs}
```

## "On transition" events
```py
{events}
```

## Traceback
```py
{exception_traceback}
```
""".format(
testcase_path=testcase_path,
reason=self.reason,
configuration=self.configuration if self.configuration else "No configuration",
logs=self.logs if self.logs else "No logs",
events="\n".join(map(repr, self.events)) if self.events else "No events",
exception_traceback=exception_traceback,
)

if fail_file_path.exists():
last_report = fail_file_path.read_text()

if self._get_header(report) == self._get_header(last_report):
return

with fail_file_path.open("w") as fail_file:
fail_file.write(report)


def _run_scxml_testcase(
testcase_path: Path,
update_fail_mark,
should_generate_debug_diagram,
caplog,
*,
async_mode: bool = False,
) -> StateChart:
Expand All @@ -150,65 +37,40 @@ def _run_scxml_testcase(
"""
from statemachine.contrib.diagram import DotGraphMachine

sm: "StateChart | None" = None
try:
debug = DebugListener()
listeners: list = [debug]
if async_mode:
listeners.append(AsyncListener())
processor = SCXMLProcessor()
processor.parse_scxml_file(testcase_path)

sm = processor.start(listeners=listeners)
if should_generate_debug_diagram:
DotGraphMachine(sm).get_graph().write_png(
testcase_path.parent / f"{testcase_path.stem}.png"
)
assert sm is not None
return sm
except Exception as e:
if update_fail_mark:
reason = f"{e.__class__.__name__}: {e.__class__.__doc__}"
is_assertion_error = isinstance(e, AssertionError)
fail_mark = FailedMark(
reason=reason,
is_assertion_error=is_assertion_error,
events=debug.events,
exception=e,
logs=caplog.text,
configuration=[s.id for s in sm.configuration] if sm else [],
)
fail_mark.write_fail_markdown(testcase_path)
raise


def _assert_passed(sm: StateChart, debug: "DebugListener | None" = None):
listeners: list = []
if async_mode:
listeners.append(AsyncListener())
processor = SCXMLProcessor()
processor.parse_scxml_file(testcase_path)

sm = processor.start(listeners=listeners)
if should_generate_debug_diagram:
DotGraphMachine(sm).get_graph().write_png(
testcase_path.parent / f"{testcase_path.stem}.png"
)
assert isinstance(sm, StateChart)
return sm


def _assert_passed(sm: StateChart):
assert isinstance(sm, StateChart)
assert "pass" in {s.id for s in sm.configuration}, debug
assert "pass" in {s.id for s in sm.configuration}


def test_scxml_usecase_sync(
testcase_path: Path, update_fail_mark, should_generate_debug_diagram, caplog
):
def test_scxml_usecase_sync(testcase_path: Path, should_generate_debug_diagram, caplog):
sm = _run_scxml_testcase(
testcase_path,
update_fail_mark,
should_generate_debug_diagram,
caplog,
async_mode=False,
)
_assert_passed(sm)


@pytest.mark.asyncio()
async def test_scxml_usecase_async(
testcase_path: Path, update_fail_mark, should_generate_debug_diagram, caplog
):
async def test_scxml_usecase_async(testcase_path: Path, should_generate_debug_diagram, caplog):
sm = _run_scxml_testcase(
testcase_path,
update_fail_mark,
should_generate_debug_diagram,
caplog,
async_mode=True,
)
# In async context, the engine only queued __initial__ during __init__.
Expand Down
31 changes: 0 additions & 31 deletions tests/scxml/w3c/mandatory/test191.fail.md

This file was deleted.

Loading
Loading