Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ When agents need external documentation (Claude hooks docs, Cursor rules docs, e
# Cache a page (prints the file path to stdout)
skilllint docs fetch URL

# Cache all unique normalized rule authority URLs
skilllint docs fetch-authorities

# List sections in a cached file
skilllint docs sections FILE

Expand Down
22 changes: 22 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ Usage: skilllint docs [OPTIONS] COMMAND [ARGS]...

Commands:
fetch Fetch a documentation page or return a cached copy.
fetch-authorities Fetch documentation for all normalized rule authority URLs.
latest Find the most recent cached file for a page name.
sections Print a table of sections in a cached markdown file.
section Extract the text of a named section from a cached markdown file.
Expand All @@ -311,6 +312,24 @@ Options:
Prints the cached file path to stdout. Status messages go to stderr. Exits 1 when no
cache exists and the network is unavailable.

#### docs fetch-authorities

```
Usage: skilllint docs fetch-authorities [OPTIONS]

Cache Options:
--ttl FLOAT Cache time-to-live in hours before a refresh is attempted. [default: 4.0]
--force Skip the freshness check and always attempt a network fetch.

Options:
--help Show this message and exit.
```

Fetches every unique authority URL declared by the rule registry after normalizing
origin-relative references against each rule authority origin. Prints one cached file
path per successful fetch. Exits 1 if any URL cannot be fetched and no stale cache is
available.

#### docs latest

```
Expand Down Expand Up @@ -402,6 +421,9 @@ sidecar that records the SHA-256 digest, byte count, source URL, and fetch times
# Cache a documentation page (default TTL: 4 hours)
skilllint docs fetch https://docs.anthropic.com/en/docs/claude-code/settings.md

# Pre-fetch all normalized rule authority URLs
skilllint docs fetch-authorities

# Force a network refresh regardless of TTL
skilllint docs fetch https://docs.anthropic.com/en/docs/claude-code/settings.md --force

Expand Down
59 changes: 58 additions & 1 deletion packages/skilllint/cli_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,14 @@

from __future__ import annotations

from pathlib import Path # noqa: TC003
from pathlib import Path
from typing import Annotated

import typer
from rich.console import Console
from rich.panel import Panel

from skilllint.rule_registry import iter_authority_urls
from skilllint.vendor_cache import (
CacheStatus,
IntegrityStatus,
Expand Down Expand Up @@ -96,6 +97,62 @@ def fetch(
console.print(result.path)


# ---------------------------------------------------------------------------
# fetch-authorities
# ---------------------------------------------------------------------------


@docs_app.command("fetch-authorities")
def fetch_authorities(
ttl: Annotated[
float,
typer.Option(
"--ttl", help="Cache time-to-live in hours before a refresh is attempted.", rich_help_panel="Cache Options"
),
] = 4.0,
force: Annotated[
bool,
typer.Option(
"--force",
help="Skip the freshness check and always attempt a network fetch.",
rich_help_panel="Cache Options",
),
] = False,
) -> None:
"""Fetch cached documentation for all normalized rule authority URLs.

Prints one cached file path per successfully fetched authority URL.

Raises:
typer.Exit: Exit code 1 when one or more authority URLs cannot be fetched
and no stale cache can be served.
"""
authority_urls = list(iter_authority_urls(unique=True))
if not authority_urls:
err_console.print(":warning: [yellow]No authority URLs found in the rule registry[/yellow]")
return

had_failure = False
for url in authority_urls:
try:
result = fetch_or_cached(url, ttl_hours=ttl, force=force)
Comment on lines +130 to +138
except NoCacheError as exc:
had_failure = True
err_console.print(f":cross_mark: [red]FAILED[/red] {exc.url} ({exc.reason})")
continue

if result.status is CacheStatus.STALE:
err_console.print(f":warning: [yellow]STALE[/yellow] {url} — serving stale cache")
else:
status_label = result.status.value.upper()
err_console.print(f":white_check_mark: [green]{status_label}[/green] {url}")

console.print(result.path)

if had_failure:
raise typer.Exit(code=1)


# ---------------------------------------------------------------------------
# latest
# ---------------------------------------------------------------------------
Expand Down
48 changes: 47 additions & 1 deletion packages/skilllint/rule_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ def check_name_field(frontmatter: dict, path: Path) -> list[ValidationIssue]:

from __future__ import annotations

from collections.abc import Iterator
from typing import TYPE_CHECKING, Annotated, Any, Literal
from urllib.parse import urljoin

from pydantic import BaseModel, ConfigDict, Field

Expand Down Expand Up @@ -159,4 +161,48 @@ def list_rules(
return sorted(rules, key=lambda r: r.id)


__all__ = ["RULE_REGISTRY", "RuleAuthority", "RuleEntry", "get_rule", "list_rules", "skilllint_rule"]
def iter_authority_urls(*, unique: bool = True) -> Iterator[str]:
"""Iterate normalized authority documentation URLs from registered rules.

Args:
unique: When True, yield each normalized URL at most once while preserving
first-seen order (by sorted rule ID). When False, include duplicates.

Yields:
Absolute authority documentation URLs.
"""
seen: set[str] = set()
for rule in list_rules():
if rule.authority is None:
continue

reference = rule.authority.reference
if not reference:
Comment on lines +164 to +180
continue

normalized = reference
if not normalized.startswith(("https://", "http://")):
origin = rule.authority.origin.strip()
if not origin:
continue
if "://" not in origin:
origin = f"https://{origin}"
normalized = urljoin(f"{origin.rstrip('/')}/", normalized)

if unique:
if normalized in seen:
continue
seen.add(normalized)

yield normalized


__all__ = [
"RULE_REGISTRY",
"RuleAuthority",
"RuleEntry",
"get_rule",
"iter_authority_urls",
"list_rules",
"skilllint_rule",
]
56 changes: 56 additions & 0 deletions packages/skilllint/tests/test_cli_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,10 @@
# ---------------------------------------------------------------------------

_TEST_URL = "https://docs.example.com/en/docs/settings.md"
_TEST_URL_2 = "https://docs.example.com/en/docs/hooks.md"
_TEST_PAGE = "settings"
_TEST_PATH = Path("/tmp/settings-2024-01-01-0000.md")
_TEST_PATH_2 = Path("/tmp/hooks-2024-01-01-0000.md")


def _cache_result(status: CacheStatus, path: Path = _TEST_PATH) -> CacheResult:
Expand Down Expand Up @@ -244,6 +246,60 @@ def test_default_force_is_false(self, cli_runner: CliRunner, mocker: MockerFixtu
assert kwargs.get("force") is False


# ---------------------------------------------------------------------------
# docs fetch-authorities
# ---------------------------------------------------------------------------


class TestDocsFetchAuthorities:
"""Tests for the ``docs fetch-authorities`` subcommand."""

def test_fetches_all_authority_urls(self, cli_runner: CliRunner, mocker: MockerFixture) -> None:
"""Fetches each authority URL and prints one path per success."""
mock_iter = mocker.patch("skilllint.cli_docs.iter_authority_urls")
mock_iter.return_value = iter([_TEST_URL, _TEST_URL_2])

mock_fetch = mocker.patch("skilllint.cli_docs.fetch_or_cached")
mock_fetch.side_effect = [
_cache_result(CacheStatus.FRESH, path=_TEST_PATH),
_cache_result(CacheStatus.NEW, path=_TEST_PATH_2),
]

result = cli_runner.invoke(plugin_validator.app, ["docs", "fetch-authorities"])

assert result.exit_code == 0
mock_iter.assert_called_once_with(unique=True)
assert mock_fetch.call_count == 2
assert str(_TEST_PATH) in result.output
assert str(_TEST_PATH_2) in result.output
Comment on lines +257 to +274

def test_forwards_ttl_and_force_options(self, cli_runner: CliRunner, mocker: MockerFixture) -> None:
"""Forwards --ttl/--force values to fetch_or_cached for each URL."""
mocker.patch("skilllint.cli_docs.iter_authority_urls", return_value=iter([_TEST_URL]))
mock_fetch = mocker.patch("skilllint.cli_docs.fetch_or_cached")
mock_fetch.return_value = _cache_result(CacheStatus.FRESH)

result = cli_runner.invoke(plugin_validator.app, ["docs", "fetch-authorities", "--ttl", "12", "--force"])

assert result.exit_code == 0
mock_fetch.assert_called_once_with(_TEST_URL, ttl_hours=pytest.approx(12.0), force=True)

def test_exits_one_when_any_authority_fetch_fails(self, cli_runner: CliRunner, mocker: MockerFixture) -> None:
"""Continues remaining URLs but exits 1 if any URL has no available cache."""
mocker.patch("skilllint.cli_docs.iter_authority_urls", return_value=iter([_TEST_URL, _TEST_URL_2]))
mock_fetch = mocker.patch("skilllint.cli_docs.fetch_or_cached")
mock_fetch.side_effect = [
NoCacheError(url=_TEST_URL, reason="network down"),
_cache_result(CacheStatus.STALE, path=_TEST_PATH_2),
]

result = cli_runner.invoke(plugin_validator.app, ["docs", "fetch-authorities"])

assert result.exit_code == 1
assert mock_fetch.call_count == 2
assert str(_TEST_PATH_2) in result.output


# ---------------------------------------------------------------------------
# docs latest
# ---------------------------------------------------------------------------
Expand Down
63 changes: 63 additions & 0 deletions packages/skilllint/tests/test_rule_registry.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
"""Tests for rule registry helper iteration APIs."""

from __future__ import annotations

from skilllint.rule_registry import RULE_REGISTRY, RuleAuthority, RuleEntry, iter_authority_urls


def _entry(rule_id: str, reference: str | None, origin: str = "example.test") -> RuleEntry:
authority = None if reference is None else RuleAuthority(origin=origin, reference=reference)
return RuleEntry(
id=rule_id,
fn=lambda: None,
severity="info",
category="test",
platforms=["agentskills"],
docstring=f"Rule {rule_id}",
authority=authority,
)


def test_iter_authority_urls_unique_filters_empty_and_dedupes() -> None:
"""Default unique iteration dedupes while preserving first-seen order."""
RULE_REGISTRY.clear()
RULE_REGISTRY.update({
"AA001": _entry("AA001", "https://example.test/spec#a"),
"AA002": _entry("AA002", "https://example.test/spec#a"),
"AA003": _entry("AA003", None),
"AA004": _entry("AA004", ""),
"AA005": _entry("AA005", "https://example.test/spec#b"),
})

urls = list(iter_authority_urls())

assert urls == ["https://example.test/spec#a", "https://example.test/spec#b"]


def test_iter_authority_urls_non_unique_keeps_duplicates() -> None:
"""unique=False includes duplicated references from multiple rules."""
RULE_REGISTRY.clear()
RULE_REGISTRY.update({
"AA001": _entry("AA001", "https://example.test/spec#a"),
"AA002": _entry("AA002", "https://example.test/spec#a"),
"AA003": _entry("AA003", "https://example.test/spec#b"),
})

urls = list(iter_authority_urls(unique=False))

assert urls == ["https://example.test/spec#a", "https://example.test/spec#a", "https://example.test/spec#b"]


def test_iter_authority_urls_resolves_relative_references() -> None:
"""Relative references are resolved against origin and deduped when normalized."""
RULE_REGISTRY.clear()
RULE_REGISTRY.update({
"AA001": _entry("AA001", "/specification#name"),
"AA002": _entry("AA002", "specification#name"),
"AA003": _entry("AA003", "https://example.test/specification#limits"),
"AA004": _entry("AA004", "/ignored", origin=""),
})

urls = list(iter_authority_urls())

assert urls == ["https://example.test/specification#name", "https://example.test/specification#limits"]
2 changes: 1 addition & 1 deletion scripts/fetch_doc_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from __future__ import annotations

from pathlib import Path # noqa: TC003
from pathlib import Path
from typing import Annotated

import typer
Expand Down