From 3981cea81ebb6dede677420490922a640febdf15 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Mon, 16 Mar 2026 20:01:16 -0400 Subject: [PATCH 01/30] Add LangSmith tracing plugin for Temporal workflows Implements a LangSmith contrib plugin that creates trace hierarchies for Temporal operations (workflows, activities, signals, queries, updates, child workflows, Nexus). Supports ambient @traceable context propagation, replay-safe tracing, and an add_temporal_runs toggle for lightweight context-only mode. Co-Authored-By: Claude Opus 4.6 --- pyproject.toml | 1 + temporalio/contrib/langsmith/__init__.py | 14 + temporalio/contrib/langsmith/_interceptor.py | 641 +++++++++++ temporalio/contrib/langsmith/_plugin.py | 74 ++ tests/contrib/langsmith/__init__.py | 0 tests/contrib/langsmith/conftest.py | 117 ++ tests/contrib/langsmith/test_integration.py | 555 +++++++++ tests/contrib/langsmith/test_interceptor.py | 1081 ++++++++++++++++++ tests/contrib/langsmith/test_plugin.py | 113 ++ uv.lock | 340 ++++++ 10 files changed, 2936 insertions(+) create mode 100644 temporalio/contrib/langsmith/__init__.py create mode 100644 temporalio/contrib/langsmith/_interceptor.py create mode 100644 temporalio/contrib/langsmith/_plugin.py create mode 100644 tests/contrib/langsmith/__init__.py create mode 100644 tests/contrib/langsmith/conftest.py create mode 100644 tests/contrib/langsmith/test_integration.py create mode 100644 tests/contrib/langsmith/test_interceptor.py create mode 100644 tests/contrib/langsmith/test_plugin.py diff --git a/pyproject.toml b/pyproject.toml index 4ee2fed92..342bf3b98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,6 +69,7 @@ dev = [ "googleapis-common-protos==1.70.0", "pytest-rerunfailures>=16.1", "moto[s3,server]>=5", + "langsmith>=0.7.17", ] [tool.poe.tasks] diff --git a/temporalio/contrib/langsmith/__init__.py b/temporalio/contrib/langsmith/__init__.py new file mode 100644 index 000000000..465e36c19 --- /dev/null +++ b/temporalio/contrib/langsmith/__init__.py @@ -0,0 +1,14 @@ +"""LangSmith integration for Temporal SDK. + +This package provides LangSmith tracing integration for Temporal workflows, +activities, and other operations. It includes automatic run creation and +context propagation for distributed tracing in LangSmith. +""" + +from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor +from temporalio.contrib.langsmith._plugin import LangSmithPlugin + +__all__ = [ + "LangSmithInterceptor", + "LangSmithPlugin", +] diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py new file mode 100644 index 000000000..2d34b8c15 --- /dev/null +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -0,0 +1,641 @@ +"""LangSmith interceptor implementation for Temporal SDK.""" + +from __future__ import annotations + +import json +from contextlib import contextmanager +from typing import Any, Iterator, Mapping, NoReturn + +import temporalio.activity +import temporalio.client +import temporalio.converter +import temporalio.worker +import temporalio.workflow +from langsmith import tracing_context +from langsmith.run_helpers import get_current_run_tree +from langsmith.run_trees import RunTree +from temporalio.api.common.v1 import Payload +from temporalio.exceptions import ApplicationError, ApplicationErrorCategory + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +HEADER_KEY = "_langsmith-context" + +# --------------------------------------------------------------------------- +# Context helpers +# --------------------------------------------------------------------------- + +_payload_converter = temporalio.converter.PayloadConverter.default + + +def _inject_context( + headers: Mapping[str, Payload], + run_tree: Any, +) -> dict[str, Payload]: + """Inject LangSmith context into Temporal payload headers. + + Serializes the run's trace context (trace ID, parent run ID, dotted order) + into a Temporal header under ``_langsmith-context``, enabling parent-child + trace nesting across process boundaries (client → worker, workflow → activity). + """ + ls_headers = run_tree.to_headers() + return { + **headers, + HEADER_KEY: _payload_converter.to_payloads([ls_headers])[0], + } + + +def _inject_current_context( + headers: Mapping[str, Payload], +) -> Mapping[str, Payload]: + """Inject the current ambient LangSmith context into Temporal payload headers. + + Reads ``get_current_run_tree()`` and injects if present. Returns headers + unchanged if no context is active. Called unconditionally so that context + propagation is independent of the ``add_temporal_runs`` toggle. + """ + current = get_current_run_tree() + if current is not None: + return _inject_context(headers, current) + return headers + + +def _extract_context( + headers: Mapping[str, Payload], +) -> Any | None: + """Extract LangSmith context from Temporal payload headers. + + Reconstructs a :class:`RunTree` from the ``_langsmith-context`` header on + the receiving side, so inbound interceptors can establish a parent-child + relationship with the sender's run. Returns ``None`` if no header is present. + """ + header = headers.get(HEADER_KEY) + if not header: + return None + ls_headers = _payload_converter.from_payloads([header])[0] + return RunTree.from_headers(ls_headers) + + +def _inject_nexus_context( + headers: dict[str, str], + run_tree: Any, +) -> dict[str, str]: + """Inject LangSmith context into Nexus string headers.""" + ls_headers = run_tree.to_headers() + return { + **headers, + HEADER_KEY: json.dumps(ls_headers), + } + + +def _extract_nexus_context( + headers: dict[str, str], +) -> Any | None: + """Extract LangSmith context from Nexus string headers.""" + raw = headers.get(HEADER_KEY) + if not raw: + return None + ls_headers = json.loads(raw) + return RunTree.from_headers(ls_headers) + + +# --------------------------------------------------------------------------- +# Replay safety +# --------------------------------------------------------------------------- + + +def _is_replaying() -> bool: + """Check if we're currently replaying workflow history.""" + return ( + temporalio.workflow.in_workflow() + and temporalio.workflow.unsafe.is_replaying_history_events() + ) + + +# --------------------------------------------------------------------------- +# Sandbox-safe post/patch helpers +# --------------------------------------------------------------------------- + + +def _safe_post(run_tree: Any, in_workflow: bool) -> None: + if in_workflow: + with temporalio.workflow.unsafe.sandbox_unrestricted(): + run_tree.post() + else: + run_tree.post() + + +def _safe_patch(run_tree: Any, in_workflow: bool) -> None: + if in_workflow: + with temporalio.workflow.unsafe.sandbox_unrestricted(): + run_tree.patch() + else: + run_tree.patch() + + +# --------------------------------------------------------------------------- +# _maybe_run context manager +# --------------------------------------------------------------------------- + + +def _is_benign_error(exc: Exception) -> bool: + """Check if an exception is a benign ApplicationError.""" + return ( + isinstance(exc, ApplicationError) + and getattr(exc, "category", None) == ApplicationErrorCategory.BENIGN + ) + + +@contextmanager +def _maybe_run( + client: Any, + name: str, + *, + add_temporal_runs: bool, + run_type: str = "chain", + inputs: dict[str, Any] | None = None, + metadata: dict[str, Any] | None = None, + tags: list[str] | None = None, + parent: Any | None = None, + project_name: str | None = None, + in_workflow: bool = False, +) -> Iterator[Any | None]: + """Create a LangSmith run, handling errors and replay. + + - If replaying, yields None (skip tracing entirely). + - If add_temporal_runs is False, yields None (no run created). + Context propagation is handled unconditionally by callers. + - When a run IS created, sets it as ambient context via + ``tracing_context(parent=run_tree)`` so ``get_current_run_tree()`` + returns it and ``_inject_current_context()`` can inject it. + - On exception: marks run as errored (unless benign ApplicationError), re-raises. + - If in_workflow is True, wraps post()/patch() in sandbox_unrestricted(). + """ + if _is_replaying(): + yield None + return + + if not add_temporal_runs: + yield None + return + + # If no explicit parent, inherit from ambient @traceable context + if parent is None: + parent = get_current_run_tree() + + kwargs: dict[str, Any] = dict( + name=name, + run_type=run_type, + inputs=inputs or {}, + ls_client=client, + ) + if project_name is not None: + kwargs["project_name"] = project_name + if parent is not None: + kwargs["parent_run"] = parent + if metadata: + kwargs["extra"] = {"metadata": metadata} + if tags: + kwargs["tags"] = tags + run_tree = RunTree(**kwargs) + _safe_post(run_tree, in_workflow) + try: + with tracing_context(parent=run_tree, client=client): + yield run_tree + except Exception as exc: + if not _is_benign_error(exc): + run_tree.end(error=f"{type(exc).__name__}: {exc}") + _safe_patch(run_tree, in_workflow) + raise + else: + run_tree.end(outputs={"status": "ok"}) + _safe_patch(run_tree, in_workflow) + + +# --------------------------------------------------------------------------- +# LangSmithInterceptor +# --------------------------------------------------------------------------- + + +class LangSmithInterceptor( + temporalio.client.Interceptor, temporalio.worker.Interceptor +): + """Interceptor that supports client and worker LangSmith run creation + and context propagation. + """ + + def __init__( + self, + *, + client: Any | None = None, + project_name: str | None = None, + add_temporal_runs: bool = True, + default_metadata: dict[str, Any] | None = None, + default_tags: list[str] | None = None, + ) -> None: + # Import langsmith.Client lazily to avoid hard dependency at import time + if client is None: + import langsmith + + client = langsmith.Client() + self._client = client + self._project_name = project_name + self._add_temporal_runs = add_temporal_runs + self._default_metadata = default_metadata or {} + self._default_tags = default_tags or [] + + def intercept_client( + self, next: temporalio.client.OutboundInterceptor + ) -> temporalio.client.OutboundInterceptor: + return _LangSmithClientOutboundInterceptor(next, self) + + def intercept_activity( + self, next: temporalio.worker.ActivityInboundInterceptor + ) -> temporalio.worker.ActivityInboundInterceptor: + return _LangSmithActivityInboundInterceptor(next, self) + + def workflow_interceptor_class( + self, input: temporalio.worker.WorkflowInterceptorClassInput + ) -> type[_LangSmithWorkflowInboundInterceptor]: + config = self + + class InterceptorWithConfig(_LangSmithWorkflowInboundInterceptor): + _config = config + + return InterceptorWithConfig + + def intercept_nexus_operation( + self, next: temporalio.worker.NexusOperationInboundInterceptor + ) -> temporalio.worker.NexusOperationInboundInterceptor: + return _LangSmithNexusOperationInboundInterceptor(next, self) + + +# --------------------------------------------------------------------------- +# Client Outbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithClientOutboundInterceptor(temporalio.client.OutboundInterceptor): + """Instruments all client-side calls with LangSmith runs.""" + + def __init__( + self, + next: temporalio.client.OutboundInterceptor, + config: LangSmithInterceptor, + ) -> None: + super().__init__(next) + self._config = config + + async def start_workflow(self, input: Any) -> Any: + prefix = ( + "SignalWithStartWorkflow" if input.start_signal else "StartWorkflow" + ) + with _maybe_run( + self._config._client, + f"{prefix}:{input.workflow}", + add_temporal_runs=self._config._add_temporal_runs, + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + project_name=self._config._project_name, + ): + input.headers = _inject_current_context(input.headers) + return await super().start_workflow(input) + + async def query_workflow(self, input: Any) -> Any: + with _maybe_run( + self._config._client, + f"QueryWorkflow:{input.query}", + add_temporal_runs=self._config._add_temporal_runs, + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + project_name=self._config._project_name, + ): + input.headers = _inject_current_context(input.headers) + return await super().query_workflow(input) + + async def signal_workflow(self, input: Any) -> None: + with _maybe_run( + self._config._client, + f"SignalWorkflow:{input.signal}", + add_temporal_runs=self._config._add_temporal_runs, + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + project_name=self._config._project_name, + ): + input.headers = _inject_current_context(input.headers) + return await super().signal_workflow(input) + + async def start_workflow_update(self, input: Any) -> Any: + with _maybe_run( + self._config._client, + f"StartWorkflowUpdate:{input.update}", + add_temporal_runs=self._config._add_temporal_runs, + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + project_name=self._config._project_name, + ): + input.headers = _inject_current_context(input.headers) + return await super().start_workflow_update(input) + + async def start_update_with_start_workflow(self, input: Any) -> Any: + with _maybe_run( + self._config._client, + f"StartUpdateWithStartWorkflow:{input.start_workflow_input.workflow}", + add_temporal_runs=self._config._add_temporal_runs, + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + project_name=self._config._project_name, + ): + input.start_workflow_input.headers = _inject_current_context( + input.start_workflow_input.headers + ) + input.update_workflow_input.headers = _inject_current_context( + input.update_workflow_input.headers + ) + return await super().start_update_with_start_workflow(input) + + +# --------------------------------------------------------------------------- +# Activity Inbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithActivityInboundInterceptor( + temporalio.worker.ActivityInboundInterceptor +): + """Instruments activity execution with LangSmith runs.""" + + def __init__( + self, + next: temporalio.worker.ActivityInboundInterceptor, + config: LangSmithInterceptor, + ) -> None: + super().__init__(next) + self._config = config + + async def execute_activity(self, input: Any) -> Any: + parent = _extract_context(input.headers) + info = temporalio.activity.info() + metadata = { + **self._config._default_metadata, + "temporalWorkflowID": info.workflow_id or "", + "temporalRunID": info.workflow_run_id or "", + "temporalActivityID": info.activity_id, + } + # Unconditionally set tracing context so @traceable functions inside + # activities can use the plugin's LangSmith client and inherit parent. + # When add_temporal_runs=True: _maybe_run overrides with the RunActivity run. + # When add_temporal_runs=False: parent (if any) remains active for @traceable, + # and the client is available even without a parent. + # Override the parent's ls_client so @traceable children (via create_child) + # use the plugin's client rather than lazily creating a real one. + if parent is not None and hasattr(parent, "ls_client"): + parent.ls_client = self._config._client + ctx_kwargs: dict[str, Any] = { + "client": self._config._client, + "enabled": True, + } + if parent: + ctx_kwargs["parent"] = parent + with tracing_context(**ctx_kwargs): + with _maybe_run( + self._config._client, + f"RunActivity:{info.activity_type}", + add_temporal_runs=self._config._add_temporal_runs, + run_type="tool", + metadata=metadata, + tags=list(self._config._default_tags), + parent=parent, + project_name=self._config._project_name, + ): + return await super().execute_activity(input) + + +# --------------------------------------------------------------------------- +# Workflow Inbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithWorkflowInboundInterceptor( + temporalio.worker.WorkflowInboundInterceptor +): + """Instruments workflow execution with LangSmith runs.""" + + _config: LangSmithInterceptor + _current_run: Any | None = None + + def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: + super().init( + _LangSmithWorkflowOutboundInterceptor(outbound, self._config, self) + ) + + @contextmanager + def _workflow_maybe_run( + self, name: str, parent: Any | None = None + ) -> Iterator[Any | None]: + """Workflow-specific run creation with metadata. + + Stores the run (or parent fallback) as ``_current_run`` so the outbound + interceptor can propagate context even when ``add_temporal_runs=False``. + """ + info = temporalio.workflow.info() + metadata = { + **self._config._default_metadata, + "temporalWorkflowID": info.workflow_id, + "temporalRunID": info.run_id, + } + with _maybe_run( + self._config._client, + name, + add_temporal_runs=self._config._add_temporal_runs, + metadata=metadata, + tags=list(self._config._default_tags), + parent=parent, + project_name=self._config._project_name, + in_workflow=True, + ) as run: + self._current_run = run or parent + try: + yield run + finally: + self._current_run = None + + async def execute_workflow(self, input: Any) -> Any: + parent = _extract_context(input.headers) + with self._workflow_maybe_run( + f"RunWorkflow:{temporalio.workflow.info().workflow_type}", + parent=parent, + ): + return await super().execute_workflow(input) + + async def handle_signal(self, input: Any) -> None: + parent = _extract_context(input.headers) + with self._workflow_maybe_run( + f"HandleSignal:{input.signal}", parent=parent + ): + return await super().handle_signal(input) + + async def handle_query(self, input: Any) -> Any: + parent = _extract_context(input.headers) + with self._workflow_maybe_run( + f"HandleQuery:{input.query}", parent=parent + ): + return await super().handle_query(input) + + def handle_update_validator(self, input: Any) -> None: + parent = _extract_context(input.headers) + with self._workflow_maybe_run( + f"ValidateUpdate:{input.update}", parent=parent + ): + return super().handle_update_validator(input) + + async def handle_update_handler(self, input: Any) -> Any: + parent = _extract_context(input.headers) + with self._workflow_maybe_run( + f"HandleUpdate:{input.update}", parent=parent + ): + return await super().handle_update_handler(input) + + +# --------------------------------------------------------------------------- +# Workflow Outbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithWorkflowOutboundInterceptor( + temporalio.worker.WorkflowOutboundInterceptor +): + """Instruments all outbound calls from workflow code.""" + + def __init__( + self, + next: temporalio.worker.WorkflowOutboundInterceptor, + config: LangSmithInterceptor, + inbound: _LangSmithWorkflowInboundInterceptor, + ) -> None: + super().__init__(next) + self._config = config + self._inbound = inbound + + @contextmanager + def _workflow_maybe_run(self, name: str) -> Iterator[Any | None]: + """Outbound workflow run creation, parented under inbound's current run.""" + with _maybe_run( + self._config._client, + name, + add_temporal_runs=self._config._add_temporal_runs, + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + parent=self._inbound._current_run, + project_name=self._config._project_name, + in_workflow=True, + ) as run: + yield run + + def start_activity(self, input: Any) -> Any: + with self._workflow_maybe_run(f"StartActivity:{input.activity}") as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_context(input.headers, context_source) + return super().start_activity(input) + + def start_local_activity(self, input: Any) -> Any: + with self._workflow_maybe_run(f"StartActivity:{input.activity}") as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_context(input.headers, context_source) + return super().start_local_activity(input) + + async def start_child_workflow(self, input: Any) -> Any: + with self._workflow_maybe_run( + f"StartChildWorkflow:{input.workflow}" + ) as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_context(input.headers, context_source) + return await super().start_child_workflow(input) + + async def signal_child_workflow(self, input: Any) -> None: + with self._workflow_maybe_run( + f"SignalChildWorkflow:{input.signal}" + ) as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_context(input.headers, context_source) + return await super().signal_child_workflow(input) + + async def signal_external_workflow(self, input: Any) -> None: + with self._workflow_maybe_run( + f"SignalExternalWorkflow:{input.signal}" + ) as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_context(input.headers, context_source) + return await super().signal_external_workflow(input) + + def continue_as_new(self, input: Any) -> NoReturn: + # No trace created, but inject context from inbound's current run + current_run = getattr(self._inbound, "_current_run", None) + if current_run: + input.headers = _inject_context(input.headers, current_run) + super().continue_as_new(input) + + async def start_nexus_operation(self, input: Any) -> Any: + with self._workflow_maybe_run( + f"StartNexusOperation:{input.service}/{input.operation_name}" + ) as run: + context_source = run or self._inbound._current_run + if context_source: + input.headers = _inject_nexus_context( + input.headers or {}, context_source + ) + return await super().start_nexus_operation(input) + + +# --------------------------------------------------------------------------- +# Nexus Operation Inbound Interceptor +# --------------------------------------------------------------------------- + + +class _LangSmithNexusOperationInboundInterceptor( + temporalio.worker.NexusOperationInboundInterceptor +): + """Instruments Nexus operations with LangSmith runs.""" + + def __init__( + self, + next: temporalio.worker.NexusOperationInboundInterceptor, + config: LangSmithInterceptor, + ) -> None: + super().__init__(next) + self._config = config + + async def execute_nexus_operation_start(self, input: Any) -> Any: + parent = _extract_nexus_context(input.ctx.headers) + with _maybe_run( + self._config._client, + f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", + add_temporal_runs=self._config._add_temporal_runs, + run_type="tool", + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + parent=parent, + project_name=self._config._project_name, + ): + return await self.next.execute_nexus_operation_start(input) + + async def execute_nexus_operation_cancel(self, input: Any) -> Any: + parent = _extract_nexus_context(input.ctx.headers) + with _maybe_run( + self._config._client, + f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", + add_temporal_runs=self._config._add_temporal_runs, + run_type="tool", + metadata={**self._config._default_metadata}, + tags=list(self._config._default_tags), + parent=parent, + project_name=self._config._project_name, + ): + return await self.next.execute_nexus_operation_cancel(input) diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py new file mode 100644 index 000000000..dfac32ece --- /dev/null +++ b/temporalio/contrib/langsmith/_plugin.py @@ -0,0 +1,74 @@ +"""LangSmith plugin for Temporal SDK.""" + +from __future__ import annotations + +import dataclasses +from typing import Any + +from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor +from temporalio.plugin import SimplePlugin +from temporalio.worker import WorkflowRunner +from temporalio.worker.workflow_sandbox import SandboxedWorkflowRunner + + +class LangSmithPlugin(SimplePlugin): + """LangSmith tracing plugin for Temporal SDK. + + Provides automatic LangSmith run creation for workflows, activities, + and other Temporal operations with context propagation. + """ + + def __init__( + self, + *, + client: Any | None = None, + project_name: str | None = None, + add_temporal_runs: bool = True, + metadata: dict[str, Any] | None = None, + tags: list[str] | None = None, + ) -> None: + """Initialize the LangSmith plugin. + + Args: + client: A langsmith.Client instance. If None, one will be created + lazily (using LANGSMITH_API_KEY env var). + project_name: LangSmith project name for traces. + add_temporal_runs: Whether to create LangSmith runs for Temporal + operations. Defaults to True. + metadata: Default metadata to attach to all runs. + tags: Default tags to attach to all runs. + """ + interceptor = LangSmithInterceptor( + client=client, + project_name=project_name, + add_temporal_runs=add_temporal_runs, + default_metadata=metadata, + default_tags=tags, + ) + interceptors = [interceptor] + + def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: + if not runner: + raise ValueError("No WorkflowRunner provided to the LangSmith plugin.") + if isinstance(runner, SandboxedWorkflowRunner): + return dataclasses.replace( + runner, + restrictions=runner.restrictions.with_passthrough_modules( + "langsmith" + ), + ) + return runner + + super().__init__( + "LangSmithPlugin", + interceptors=interceptors, + workflow_runner=workflow_runner, + ) + + async def shutdown(self) -> None: + """Flush the LangSmith client to drain pending runs.""" + if not self.interceptors: + return + interceptor = self.interceptors[0] + if isinstance(interceptor, LangSmithInterceptor) and interceptor._client is not None: + interceptor._client.flush() diff --git a/tests/contrib/langsmith/__init__.py b/tests/contrib/langsmith/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/contrib/langsmith/conftest.py b/tests/contrib/langsmith/conftest.py new file mode 100644 index 000000000..7973f880a --- /dev/null +++ b/tests/contrib/langsmith/conftest.py @@ -0,0 +1,117 @@ +"""Shared test fixtures for LangSmith plugin tests.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any +from unittest.mock import MagicMock + +import pytest + + +@dataclass +class _RunRecord: + """A single recorded run.""" + + id: str + parent_run_id: str | None + name: str + run_type: str + inputs: dict[str, Any] + outputs: dict[str, Any] | None = None + error: str | None = None + + +class InMemoryRunCollector: + """Collects runs from a mock LangSmith client. + + Each call to create_run / update_run appends or updates an entry. + """ + + def __init__(self) -> None: + self.runs: list[_RunRecord] = [] + self._by_id: dict[str, _RunRecord] = {} + + def record_create(self, **kwargs: Any) -> None: + rec = _RunRecord( + id=str(kwargs.get("id", kwargs.get("run_id", ""))), + parent_run_id=( + str(kwargs["parent_run_id"]) if kwargs.get("parent_run_id") else None + ), + name=kwargs.get("name", ""), + run_type=kwargs.get("run_type", "chain"), + inputs=kwargs.get("inputs", {}), + ) + self.runs.append(rec) + self._by_id[rec.id] = rec + + def record_update(self, run_id: str, **kwargs: Any) -> None: + run_id_str = str(run_id) + rec = self._by_id.get(run_id_str) + if rec is None: + return + if "outputs" in kwargs: + rec.outputs = kwargs["outputs"] + if "error" in kwargs: + rec.error = kwargs["error"] + + def clear(self) -> None: + self.runs.clear() + self._by_id.clear() + + +def dump_runs(collector: InMemoryRunCollector) -> list[str]: + """Reconstruct parent-child hierarchy from collected runs. + + Returns a list of indented strings, e.g.: + ["StartWorkflow:MyWf", " RunWorkflow:MyWf", " StartActivity:do_thing"] + """ + runs = collector.runs + children: dict[str | None, list[_RunRecord]] = {} + for r in runs: + children.setdefault(r.parent_run_id, []).append(r) + + result: list[str] = [] + + def _walk(parent_id: str | None, depth: int) -> None: + for child in children.get(parent_id, []): + result.append(" " * depth + child.name) + _walk(child.id, depth + 1) + + # Roots: runs whose parent_run_id is None or not in our set + known_ids = {r.id for r in runs} + root_parents = { + r.parent_run_id + for r in runs + if r.parent_run_id is None or r.parent_run_id not in known_ids + } + for rp in sorted(root_parents, key=lambda x: (x is not None, x)): + _walk(rp, 0) + + return result + + +@pytest.fixture +def collector() -> InMemoryRunCollector: + return InMemoryRunCollector() + + +@pytest.fixture +def mock_ls_client(collector: InMemoryRunCollector) -> MagicMock: + """A mock langsmith.Client that records create_run / update_run calls.""" + client = MagicMock() + client.create_run.side_effect = collector.record_create + client.update_run.side_effect = collector.record_update + # Stub session property (needed by RunTree internals) + client.session = MagicMock() + client.tracing_queue = MagicMock() + return client + + +@pytest.fixture +def langsmith_plugin(mock_ls_client: MagicMock, collector: InMemoryRunCollector): + """Return (plugin, collector) wired to a mock client.""" + from temporalio.contrib.langsmith import LangSmithPlugin + + plugin = LangSmithPlugin(client=mock_ls_client) + return plugin, collector diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py new file mode 100644 index 000000000..133f4377d --- /dev/null +++ b/tests/contrib/langsmith/test_integration.py @@ -0,0 +1,555 @@ +"""Integration tests for LangSmith plugin with real Temporal worker.""" + +from __future__ import annotations + +import uuid +from datetime import timedelta +from typing import Any +from unittest.mock import MagicMock + +import pytest +from langsmith import traceable, tracing_context + +from temporalio import activity, common, workflow +from temporalio.client import Client, WorkflowFailureError +from temporalio.contrib.langsmith import LangSmithPlugin +from temporalio.exceptions import ApplicationError +from temporalio.testing import WorkflowEnvironment + +from tests.contrib.langsmith.conftest import InMemoryRunCollector, dump_runs +from tests.helpers import new_worker + + +# --------------------------------------------------------------------------- +# Shared @traceable functions and activities +# --------------------------------------------------------------------------- + + +@traceable(name="inner_llm_call") +async def _inner_llm_call(prompt: str) -> str: + """Simulates an LLM call decorated with @traceable.""" + return f"response to: {prompt}" + + +@traceable(name="outer_chain") +async def _outer_chain(prompt: str) -> str: + """A @traceable that calls another @traceable.""" + return await _inner_llm_call(prompt) + + +@activity.defn +async def traceable_activity() -> str: + """Activity that calls a @traceable function.""" + result = await _inner_llm_call("hello") + return result + + +@activity.defn +async def nested_traceable_activity() -> str: + """Activity with two levels of @traceable nesting.""" + result = await _outer_chain("hello") + return result + + +# --------------------------------------------------------------------------- +# Shared workflows +# --------------------------------------------------------------------------- + + +@workflow.defn +class TraceableActivityWorkflow: + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + + +# --------------------------------------------------------------------------- +# Simple/basic workflows and activities +# --------------------------------------------------------------------------- + + +@activity.defn +async def simple_activity() -> str: + return "activity-done" + + +@workflow.defn +class SimpleWorkflow: + @workflow.run + async def run(self) -> str: + result = await workflow.execute_activity( + simple_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + return result + + +# --------------------------------------------------------------------------- +# Signal/query/update workflows +# --------------------------------------------------------------------------- + + +@workflow.defn +class ComprehensiveWorkflow: + def __init__(self) -> None: + self._signal_received = False + self._complete = False + + @workflow.run + async def run(self) -> str: + # 1. Regular activity + await workflow.execute_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + # 2. Local activity + await workflow.execute_local_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + # 3. Child workflow + await workflow.execute_child_workflow( + TraceableActivityWorkflow.run, + id=f"child-{workflow.info().workflow_id}", + ) + # 4. Wait for signal + await workflow.wait_condition(lambda: self._signal_received) + # 5. Wait for update to complete + await workflow.wait_condition(lambda: self._complete) + return "comprehensive-done" + + @workflow.signal + def my_signal(self, value: str) -> None: + self._signal_received = True + + @workflow.query + def my_query(self) -> bool: + return self._signal_received + + @workflow.update + def my_update(self, value: str) -> str: + self._complete = True + return f"updated-{value}" + + @my_update.validator + def validate_my_update(self, value: str) -> None: + if not value: + raise ValueError("empty") + + +# --------------------------------------------------------------------------- +# Error workflows and activities +# --------------------------------------------------------------------------- + + +@activity.defn +async def failing_activity() -> str: + raise ApplicationError("activity-failed", non_retryable=True) + + +@activity.defn +async def benign_failing_activity() -> str: + from temporalio.exceptions import ApplicationErrorCategory + + raise ApplicationError( + "benign-fail", + non_retryable=True, + category=ApplicationErrorCategory.BENIGN, + ) + + +@workflow.defn +class FailingWorkflow: + @workflow.run + async def run(self) -> str: + raise ApplicationError("workflow-failed", non_retryable=True) + + +@workflow.defn +class ActivityFailureWorkflow: + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + failing_activity, + start_to_close_timeout=timedelta(seconds=10), + retry_policy=common.RetryPolicy(maximum_attempts=1), + ) + + +@workflow.defn +class BenignErrorWorkflow: + @workflow.run + async def run(self) -> str: + return await workflow.execute_activity( + benign_failing_activity, + start_to_close_timeout=timedelta(seconds=10), + retry_policy=common.RetryPolicy(maximum_attempts=1), + ) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_plugin_and_collector( + **kwargs: Any, +) -> tuple[LangSmithPlugin, InMemoryRunCollector, MagicMock]: + """Create a LangSmithPlugin wired to an InMemoryRunCollector via mock client.""" + collector = InMemoryRunCollector() + mock_ls_client = MagicMock() + mock_ls_client.create_run.side_effect = collector.record_create + mock_ls_client.update_run.side_effect = collector.record_update + mock_ls_client.session = MagicMock() + mock_ls_client.tracing_queue = MagicMock() + plugin = LangSmithPlugin(client=mock_ls_client, **kwargs) + return plugin, collector, mock_ls_client + + +def _make_client_and_collector( + client: Client, **kwargs: Any +) -> tuple[Client, InMemoryRunCollector, MagicMock]: + """Create a Temporal Client with LangSmith plugin and an InMemoryRunCollector.""" + plugin, collector, mock_ls_client = _make_plugin_and_collector(**kwargs) + config = client.config() + config["plugins"] = [plugin] + return Client(**config), collector, mock_ls_client + + +# --------------------------------------------------------------------------- +# TestBasicTracing +# --------------------------------------------------------------------------- + + +class TestBasicTracing: + async def test_workflow_activity_trace_hierarchy( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """StartWorkflow → RunWorkflow → StartActivity → RunActivity hierarchy.""" + temporal_client, collector, _ = _make_client_and_collector(client) + + async with new_worker( + temporal_client, + SimpleWorkflow, + activities=[simple_activity], + ) as worker: + result = await temporal_client.start_workflow( + SimpleWorkflow.run, + id=f"basic-trace-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + assert await result.result() == "activity-done" + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:SimpleWorkflow", + " RunWorkflow:SimpleWorkflow", + " StartActivity:simple_activity", + " RunActivity:simple_activity", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + ) + + # Verify run_type: RunActivity is "tool", others are "chain" + for run in collector.runs: + if run.name == "RunActivity:simple_activity": + assert run.run_type == "tool", ( + f"Expected RunActivity run_type='tool', got '{run.run_type}'" + ) + else: + assert run.run_type == "chain", ( + f"Expected {run.name} run_type='chain', got '{run.run_type}'" + ) + + # Verify successful runs have outputs == {"status": "ok"} + for run in collector.runs: + assert run.outputs == {"status": "ok"}, ( + f"Expected {run.name} outputs={{'status': 'ok'}}, got {run.outputs}" + ) + + + +# --------------------------------------------------------------------------- +# TestReplay +# --------------------------------------------------------------------------- + + +class TestReplay: + async def test_no_duplicate_traces_on_replay( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """With max_cached_workflows=0 (forcing replay), no duplicate runs appear.""" + temporal_client, collector, _ = _make_client_and_collector(client) + + async with new_worker( + temporal_client, + TraceableActivityWorkflow, + activities=[traceable_activity], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + TraceableActivityWorkflow.run, + id=f"replay-test-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + await handle.result() + + # Workflow→activity→@traceable flow should produce exactly these runs + # with no duplicates from replay: + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch (possible replay duplicates).\n" + f"Expected:\n{expected}\nActual:\n{hierarchy}" + ) + + +# --------------------------------------------------------------------------- +# TestErrorTracing +# --------------------------------------------------------------------------- + + +class TestErrorTracing: + async def test_activity_failure_marked( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """A failing activity run is marked with an error.""" + temporal_client, collector, _ = _make_client_and_collector(client) + + async with new_worker( + temporal_client, + ActivityFailureWorkflow, + activities=[failing_activity], + workflow_failure_exception_types=[ApplicationError], + ) as worker: + handle = await temporal_client.start_workflow( + ActivityFailureWorkflow.run, + id=f"act-fail-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + with pytest.raises(WorkflowFailureError): + await handle.result() + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:ActivityFailureWorkflow", + " RunWorkflow:ActivityFailureWorkflow", + " StartActivity:failing_activity", + " RunActivity:failing_activity", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + ) + # Verify the RunActivity run has an error + activity_runs = [ + r for r in collector.runs if r.name == "RunActivity:failing_activity" + ] + assert len(activity_runs) == 1 + assert activity_runs[0].error == "ApplicationError: activity-failed" + + async def test_workflow_failure_marked( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """A failing workflow run is marked with an error.""" + temporal_client, collector, _ = _make_client_and_collector(client) + + async with new_worker( + temporal_client, + FailingWorkflow, + workflow_failure_exception_types=[ApplicationError], + ) as worker: + handle = await temporal_client.start_workflow( + FailingWorkflow.run, + id=f"wf-fail-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + with pytest.raises(WorkflowFailureError): + await handle.result() + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:FailingWorkflow", + " RunWorkflow:FailingWorkflow", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + ) + # Verify the RunWorkflow run has an error + wf_runs = [ + r for r in collector.runs if r.name == "RunWorkflow:FailingWorkflow" + ] + assert len(wf_runs) == 1 + assert wf_runs[0].error == "ApplicationError: workflow-failed" + + async def test_benign_error_not_marked( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """A benign ApplicationError does NOT mark the run as errored.""" + temporal_client, collector, _ = _make_client_and_collector(client) + + async with new_worker( + temporal_client, + BenignErrorWorkflow, + activities=[benign_failing_activity], + workflow_failure_exception_types=[ApplicationError], + ) as worker: + handle = await temporal_client.start_workflow( + BenignErrorWorkflow.run, + id=f"benign-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + with pytest.raises(WorkflowFailureError): + await handle.result() + + hierarchy = dump_runs(collector) + expected = [ + "StartWorkflow:BenignErrorWorkflow", + " RunWorkflow:BenignErrorWorkflow", + " StartActivity:benign_failing_activity", + " RunActivity:benign_failing_activity", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + ) + # The RunActivity run for benign error should NOT have error set + activity_runs = [ + r + for r in collector.runs + if r.name == "RunActivity:benign_failing_activity" + ] + assert len(activity_runs) == 1 + assert activity_runs[0].error is None + + +# --------------------------------------------------------------------------- +# TestComprehensiveTracing +# --------------------------------------------------------------------------- + + +class TestComprehensiveTracing: + async def test_comprehensive_with_temporal_runs( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """Full workflow exercising activity, local activity, child workflow, + signal, query, and update — all nested under an ambient @traceable. + """ + temporal_client, collector, mock_ls_client = _make_client_and_collector(client) + + @traceable(name="user_pipeline") + async def user_pipeline() -> str: + async with new_worker( + temporal_client, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + activities=[nested_traceable_activity, traceable_activity], + ) as worker: + handle = await temporal_client.start_workflow( + ComprehensiveWorkflow.run, + id=f"comprehensive-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + # Query + await handle.query(ComprehensiveWorkflow.my_query) + # Signal + await handle.signal(ComprehensiveWorkflow.my_signal, "hello") + # Update (completes the workflow) + await handle.execute_update( + ComprehensiveWorkflow.my_update, "finish" + ) + return await handle.result() + + with tracing_context(client=mock_ls_client, enabled=True): + result = await user_pipeline() + + assert result == "comprehensive-done" + + hierarchy = dump_runs(collector) + expected = [ + "user_pipeline", + " StartWorkflow:ComprehensiveWorkflow", + " RunWorkflow:ComprehensiveWorkflow", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + " QueryWorkflow:my_query", + " HandleQuery:my_query", + " SignalWorkflow:my_signal", + " HandleSignal:my_signal", + " StartWorkflowUpdate:my_update", + " ValidateUpdate:my_update", + " HandleUpdate:my_update", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + ) + + async def test_comprehensive_without_temporal_runs( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """With add_temporal_runs=False, only @traceable runs appear, + all nested under the ambient user_pipeline. + """ + temporal_client, collector, mock_ls_client = _make_client_and_collector( + client, add_temporal_runs=False + ) + + @traceable(name="user_pipeline") + async def user_pipeline() -> str: + async with new_worker( + temporal_client, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + activities=[nested_traceable_activity, traceable_activity], + ) as worker: + handle = await temporal_client.start_workflow( + ComprehensiveWorkflow.run, + id=f"comprehensive-no-runs-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + # Query + await handle.query(ComprehensiveWorkflow.my_query) + # Signal + await handle.signal(ComprehensiveWorkflow.my_signal, "hello") + # Update (completes the workflow) + await handle.execute_update( + ComprehensiveWorkflow.my_update, "finish" + ) + return await handle.result() + + with tracing_context(client=mock_ls_client, enabled=True): + result = await user_pipeline() + + assert result == "comprehensive-done" + + hierarchy = dump_runs(collector) + expected = [ + "user_pipeline", + " outer_chain", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + " inner_llm_call", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + ) diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py new file mode 100644 index 000000000..2ff8e3eb1 --- /dev/null +++ b/tests/contrib/langsmith/test_interceptor.py @@ -0,0 +1,1081 @@ +"""Tests for LangSmith interceptor points and helper functions.""" + +from __future__ import annotations + +import asyncio +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from temporalio.api.common.v1 import Payload +from temporalio.contrib.langsmith import LangSmithInterceptor +from temporalio.contrib.langsmith._interceptor import ( + HEADER_KEY, + _extract_context, + _inject_context, + _maybe_run, +) + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +# Common patch targets (interceptor module) +_MOD = "temporalio.contrib.langsmith._interceptor" +_PATCH_RUNTREE = f"{_MOD}.RunTree" +_PATCH_IN_WORKFLOW = f"{_MOD}.temporalio.workflow.in_workflow" +_PATCH_IS_REPLAYING = f"{_MOD}.temporalio.workflow.unsafe.is_replaying_history_events" +_PATCH_WF_INFO = f"{_MOD}.temporalio.workflow.info" +_PATCH_SANDBOX = f"{_MOD}.temporalio.workflow.unsafe.sandbox_unrestricted" +_PATCH_TRACING_CTX = f"{_MOD}.tracing_context" +_PATCH_EXTRACT_NEXUS = f"{_MOD}._extract_nexus_context" +_PATCH_INJECT_NEXUS = f"{_MOD}._inject_nexus_context" +_PATCH_GET_CURRENT_RUN = f"{_MOD}.get_current_run_tree" + + +def _make_mock_run() -> MagicMock: + """Create a mock RunTree with working to_headers() for _inject_context.""" + mock_run = MagicMock() + mock_run.to_headers.return_value = {"langsmith-trace": "test-trace-id"} + return mock_run + + +def _mock_workflow_info(**overrides: Any) -> MagicMock: + """Create a mock workflow Info object.""" + info = MagicMock() + info.workflow_id = overrides.get("workflow_id", "test-wf-id") + info.run_id = overrides.get("run_id", "test-run-id") + info.workflow_type = overrides.get("workflow_type", "TestWorkflow") + return info + + +def _mock_activity_info(**overrides: Any) -> MagicMock: + """Create a mock activity Info object.""" + info = MagicMock() + info.workflow_id = overrides.get("workflow_id", "test-wf-id") + info.workflow_run_id = overrides.get("workflow_run_id", "test-run-id") + info.activity_id = overrides.get("activity_id", "test-activity-id") + info.activity_type = overrides.get("activity_type", "test_activity") + return info + + +def _get_runtree_name(MockRunTree: MagicMock) -> str: + """Extract the 'name' kwarg from RunTree constructor call.""" + MockRunTree.assert_called_once() + return MockRunTree.call_args.kwargs["name"] + + +def _get_runtree_metadata(MockRunTree: MagicMock) -> dict[str, Any]: + """Extract metadata from RunTree constructor kwargs. + + The design stores metadata in the 'extra' kwarg as {"metadata": {...}}. + """ + MockRunTree.assert_called_once() + kwargs = MockRunTree.call_args.kwargs + extra = kwargs.get("extra", {}) + if extra and "metadata" in extra: + return extra["metadata"] + # Alternatively, metadata might be passed directly + return kwargs.get("metadata", {}) + + +# =================================================================== +# TestContextPropagation +# =================================================================== + + +class TestContextPropagation: + """Tests for _inject_context / _extract_context roundtrip.""" + + @patch(_PATCH_RUNTREE) + def test_inject_extract_roundtrip(self, MockRunTree: Any) -> None: + """Inject a mock run tree's headers, then extract. Verify roundtrip.""" + mock_run = MagicMock() + mock_run.to_headers.return_value = { + "langsmith-trace": "test-trace-id", + "parent": "abc-123", + } + + headers: dict[str, Payload] = {} + result = _inject_context(headers, mock_run) + + assert HEADER_KEY in result + + # Mock from_headers for extraction (real one needs valid LangSmith header format) + mock_extracted = MagicMock() + MockRunTree.from_headers.return_value = mock_extracted + + extracted = _extract_context(result) + # extracted should be reconstructed from headers + assert extracted is mock_extracted + MockRunTree.from_headers.assert_called_once() + + def test_extract_missing_header(self) -> None: + """When the _langsmith-context header is absent, returns None.""" + headers: dict[str, Payload] = {} + result = _extract_context(headers) + assert result is None + + def test_inject_preserves_existing_headers(self) -> None: + """Injecting LangSmith context does not overwrite other existing headers.""" + mock_run = MagicMock() + mock_run.to_headers.return_value = {"langsmith-trace": "val"} + + existing_payload = Payload(data=b"existing") + headers: dict[str, Payload] = {"my-header": existing_payload} + result = _inject_context(headers, mock_run) + + assert "my-header" in result + assert result["my-header"] is existing_payload + assert HEADER_KEY in result + + +# =================================================================== +# TestReplaySafety +# =================================================================== + + +class TestReplaySafety: + """Tests for replay-safe tracing behavior.""" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=True) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_skip_trace_during_replay( + self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any + ) -> None: + """During replay, _maybe_run yields None — no RunTree created.""" + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + in_workflow=True, + ) as run: + assert run is None + # RunTree should never be instantiated during replay + MockRunTree.assert_not_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_create_trace_when_not_replaying( + self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any + ) -> None: + """When not replaying (but in workflow), _maybe_run creates a RunTree.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + in_workflow=True, + ) as run: + assert run is mock_run + MockRunTree.assert_called_once() + assert MockRunTree.call_args.kwargs["name"] == "TestRun" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_create_trace_outside_workflow( + self, mock_in_wf: Any, MockRunTree: Any + ) -> None: + """Outside workflow (client/activity), RunTree IS created. No is_replaying check.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + in_workflow=False, + ) as run: + assert run is mock_run + MockRunTree.assert_called_once() + + +# =================================================================== +# TestErrorHandling +# =================================================================== + + +class TestErrorHandling: + """Tests for _maybe_run error handling.""" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_exception_marks_run_errored( + self, mock_in_wf: Any, MockRunTree: Any + ) -> None: + """RuntimeError marks the run as errored and re-raises.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(RuntimeError, match="boom"): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is mock_run + raise RuntimeError("boom") + # run.end should have been called with error containing "boom" + mock_run.end.assert_called() + end_kwargs = mock_run.end.call_args.kwargs + assert end_kwargs["error"] == "RuntimeError: boom" + mock_run.patch.assert_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_benign_application_error_not_marked( + self, mock_in_wf: Any, MockRunTree: Any + ) -> None: + """Benign ApplicationError does not mark the run as errored.""" + from temporalio.exceptions import ApplicationError, ApplicationErrorCategory + + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(ApplicationError): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is mock_run + raise ApplicationError( + "benign", + category=ApplicationErrorCategory.BENIGN, + ) + # run.end should NOT have been called with error= + end_calls = mock_run.end.call_args_list + for c in end_calls: + assert "error" not in (c.kwargs or {}) + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_non_benign_application_error_marked( + self, mock_in_wf: Any, MockRunTree: Any + ) -> None: + """Non-benign ApplicationError marks the run as errored.""" + from temporalio.exceptions import ApplicationError + + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(ApplicationError): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is mock_run + raise ApplicationError("bad", non_retryable=True) + mock_run.end.assert_called() + end_kwargs = mock_run.end.call_args.kwargs + assert end_kwargs["error"] == "ApplicationError: bad" + mock_run.patch.assert_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_success_completes_normally( + self, mock_in_wf: Any, MockRunTree: Any + ) -> None: + """On success, run.end(outputs={"status": "ok"}) and run.patch() are called.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is mock_run + mock_run.end.assert_called_once() + end_kwargs = mock_run.end.call_args.kwargs + assert end_kwargs.get("outputs") == {"status": "ok"} + mock_run.patch.assert_called() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_cancelled_error_propagates_without_marking_run( + self, mock_in_wf: Any, MockRunTree: Any + ) -> None: + """CancelledError (BaseException) propagates without marking run as errored. + + _maybe_run catches Exception only, so CancelledError bypasses error marking. + """ + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + mock_client = MagicMock() + with pytest.raises(asyncio.CancelledError): + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ) as run: + assert run is mock_run + raise asyncio.CancelledError() + # run.end should NOT have been called with error= + end_calls = mock_run.end.call_args_list + for c in end_calls: + assert "error" not in (c.kwargs or {}) + + +# =================================================================== +# TestClientOutboundInterceptor +# =================================================================== + + +class TestClientOutboundInterceptor: + """Tests for _LangSmithClientOutboundInterceptor.""" + + def _make_client_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + """Create a client outbound interceptor with a mock next.""" + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + mock_next = MagicMock() + mock_next.start_workflow = AsyncMock() + mock_next.query_workflow = AsyncMock() + mock_next.signal_workflow = AsyncMock() + mock_next.start_workflow_update = AsyncMock() + mock_next.start_update_with_start_workflow = AsyncMock() + interceptor = config.intercept_client(mock_next) + return interceptor, mock_next + + @pytest.mark.parametrize( + "method,input_attrs,expected_name", + [ + ("start_workflow", {"workflow": "MyWorkflow", "start_signal": None}, "StartWorkflow:MyWorkflow"), + ("start_workflow", {"workflow": "MyWorkflow", "start_signal": "my_signal"}, "SignalWithStartWorkflow:MyWorkflow"), + ("query_workflow", {"query": "get_status"}, "QueryWorkflow:get_status"), + ("signal_workflow", {"signal": "my_signal"}, "SignalWorkflow:my_signal"), + ("start_workflow_update", {"update": "my_update"}, "StartWorkflowUpdate:my_update"), + ], + ids=["start_workflow", "signal_with_start", "query", "signal", "update"], + ) + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + async def test_creates_trace_and_injects_headers( + self, MockRunTree: Any, method: str, input_attrs: dict[str, Any], expected_name: str + ) -> None: + """Each client method creates the correct trace and injects headers.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_client_interceptor() + mock_input = MagicMock() + for k, v in input_attrs.items(): + setattr(mock_input, k, v) + mock_input.headers = {} + + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_run): + await getattr(interceptor, method)(mock_input) + + assert _get_runtree_name(MockRunTree) == expected_name + assert HEADER_KEY in mock_input.headers + getattr(mock_next, method).assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + async def test_start_update_with_start_workflow(self, MockRunTree: Any) -> None: + """start_update_with_start_workflow injects headers into BOTH start and update inputs.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_client_interceptor() + mock_input = MagicMock() + mock_input.start_workflow_input = MagicMock() + mock_input.start_workflow_input.workflow = "MyWorkflow" + mock_input.start_workflow_input.headers = {} + mock_input.update_workflow_input = MagicMock() + mock_input.update_workflow_input.headers = {} + + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_run): + await interceptor.start_update_with_start_workflow(mock_input) + + assert ( + _get_runtree_name(MockRunTree) + == "StartUpdateWithStartWorkflow:MyWorkflow" + ) + assert HEADER_KEY in mock_input.start_workflow_input.headers + assert HEADER_KEY in mock_input.update_workflow_input.headers + mock_next.start_update_with_start_workflow.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_GET_CURRENT_RUN, return_value=None) + @patch(_PATCH_RUNTREE) + async def test_add_temporal_runs_false_skips_trace( + self, MockRunTree: Any, mock_get_current: Any + ) -> None: + """With add_temporal_runs=False and no ambient context, no run is created + and no headers are injected. + + _inject_current_context() is called unconditionally, but + get_current_run_tree() returns None so headers are unchanged. + """ + interceptor, mock_next = self._make_client_interceptor( + add_temporal_runs=False + ) + mock_input = MagicMock() + mock_input.workflow = "MyWorkflow" + mock_input.start_signal = None + mock_input.headers = {} + + await interceptor.start_workflow(mock_input) + + # RunTree should NOT be created + MockRunTree.assert_not_called() + # _inject_current_context was called but found no ambient context + mock_get_current.assert_called_once() + # Headers should NOT have been modified (no ambient context) + assert HEADER_KEY not in mock_input.headers + # super() should still be called + mock_next.start_workflow.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + async def test_add_temporal_runs_false_with_ambient_context( + self, MockRunTree: Any + ) -> None: + """With add_temporal_runs=False but user-provided ambient context, + no run is created but the ambient context IS injected into headers. + + This verifies that context propagation works even without plugin-created + runs — if the user wraps the call in langsmith.trace(), that context + gets propagated through Temporal headers. + """ + mock_ambient_run = _make_mock_run() + interceptor, mock_next = self._make_client_interceptor( + add_temporal_runs=False + ) + mock_input = MagicMock() + mock_input.workflow = "MyWorkflow" + mock_input.start_signal = None + mock_input.headers = {} + + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_ambient_run): + await interceptor.start_workflow(mock_input) + + # RunTree should NOT be created (no Temporal run) + MockRunTree.assert_not_called() + # But headers SHOULD be injected from the ambient context + assert HEADER_KEY in mock_input.headers + mock_next.start_workflow.assert_called_once() + + +# =================================================================== +# TestActivityInboundInterceptor +# =================================================================== + + +class TestActivityInboundInterceptor: + """Tests for _LangSmithActivityInboundInterceptor.""" + + def _make_activity_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + mock_next = MagicMock() + mock_next.execute_activity = AsyncMock(return_value="activity_result") + interceptor = config.intercept_activity(mock_next) + return interceptor, mock_next + + @pytest.mark.asyncio + @patch(_PATCH_TRACING_CTX) + @patch(_PATCH_RUNTREE) + @patch("temporalio.activity.info") + async def test_execute_activity_creates_run_with_context_and_metadata( + self, mock_info_fn: Any, MockRunTree: Any, mock_tracing_ctx: Any + ) -> None: + """Activity execution creates a correctly named run with metadata and parent context.""" + mock_info_fn.return_value = _mock_activity_info( + activity_type="do_thing", + workflow_id="wf-123", + workflow_run_id="run-456", + activity_id="act-789", + ) + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_activity_interceptor() + + mock_input = MagicMock() + mock_input.headers = {} + + result = await interceptor.execute_activity(mock_input) + + # Verify trace name and run_type + assert _get_runtree_name(MockRunTree) == "RunActivity:do_thing" + assert MockRunTree.call_args.kwargs.get("run_type") == "tool" + # Verify metadata + metadata = _get_runtree_metadata(MockRunTree) + assert metadata["temporalWorkflowID"] == "wf-123" + assert metadata["temporalRunID"] == "run-456" + assert metadata["temporalActivityID"] == "act-789" + # Verify tracing_context sets parent + mock_tracing_ctx.assert_called() + ctx_kwargs = mock_tracing_ctx.call_args.kwargs + assert ctx_kwargs.get("parent") is mock_run + # Verify super() called and result passed through + mock_next.execute_activity.assert_called_once() + assert result == "activity_result" + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + @patch("temporalio.activity.info") + async def test_execute_activity_no_header( + self, mock_info_fn: Any, MockRunTree: Any + ) -> None: + """When no LangSmith header is present, activity still executes (no parent, no crash).""" + mock_info_fn.return_value = _mock_activity_info() + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_activity_interceptor() + + mock_input = MagicMock() + mock_input.headers = {} # No LangSmith header + + result = await interceptor.execute_activity(mock_input) + + # Should still create a run (just without a parent) + MockRunTree.assert_called_once() + assert MockRunTree.call_args.kwargs.get("parent_run") is None + assert result == "activity_result" + + +# =================================================================== +# TestWorkflowInboundInterceptor +# =================================================================== + + +class TestWorkflowInboundInterceptor: + """Tests for _LangSmithWorkflowInboundInterceptor.""" + + def _make_workflow_interceptors( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + """Create workflow inbound interceptor and a mock next.""" + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + mock_next = MagicMock() + mock_next.execute_workflow = AsyncMock(return_value="wf_result") + mock_next.handle_signal = AsyncMock() + mock_next.handle_query = AsyncMock(return_value="query_result") + mock_next.handle_update_validator = MagicMock() + mock_next.handle_update_handler = AsyncMock(return_value="update_result") + + # Get the workflow interceptor class + wf_class_input = MagicMock() + wf_interceptor_cls = config.workflow_interceptor_class(wf_class_input) + assert wf_interceptor_cls is not None + + # Instantiate with mock next + wf_interceptor = wf_interceptor_cls(mock_next) + + # Initialize with mock outbound + mock_outbound = MagicMock() + wf_interceptor.init(mock_outbound) + + return wf_interceptor, mock_next + + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + @patch(_PATCH_WF_INFO) + async def test_execute_workflow( + self, + mock_wf_info: Any, + mock_in_wf: Any, + mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + ) -> None: + """execute_workflow creates a run named RunWorkflow:{workflow_type}.""" + mock_wf_info.return_value = _mock_workflow_info(workflow_type="MyWorkflow") + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_workflow_interceptors() + + mock_input = MagicMock() + mock_input.headers = {} + + result = await interceptor.execute_workflow(mock_input) + + # Verify trace name + assert _get_runtree_name(MockRunTree) == "RunWorkflow:MyWorkflow" + # Verify metadata includes workflow ID and run ID + metadata = _get_runtree_metadata(MockRunTree) + assert metadata == { + "temporalWorkflowID": "test-wf-id", + "temporalRunID": "test-run-id", + } + # Verify sandbox_unrestricted was called (for post/patch inside workflow) + mock_sandbox.assert_called() + # Verify super() called and result passed through + mock_next.execute_workflow.assert_called_once() + assert result == "wf_result" + + @pytest.mark.parametrize( + "method,input_attr,input_val,expected_name", + [ + ("handle_signal", "signal", "my_signal", "HandleSignal:my_signal"), + ("handle_query", "query", "get_status", "HandleQuery:get_status"), + ("handle_update_validator", "update", "my_update", "ValidateUpdate:my_update"), + ("handle_update_handler", "update", "my_update", "HandleUpdate:my_update"), + ], + ids=["signal", "query", "validator", "update_handler"], + ) + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + @patch(_PATCH_WF_INFO) + async def test_handler_creates_trace( + self, + mock_wf_info: Any, + mock_in_wf: Any, + mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + method: str, + input_attr: str, + input_val: str, + expected_name: str, + ) -> None: + """Each workflow handler creates the correct trace name.""" + mock_wf_info.return_value = _mock_workflow_info() + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_workflow_interceptors() + + mock_input = MagicMock() + setattr(mock_input, input_attr, input_val) + mock_input.headers = {} + + result = getattr(interceptor, method)(mock_input) + if asyncio.iscoroutine(result): + await result + + assert _get_runtree_name(MockRunTree) == expected_name + mock_sandbox.assert_called() + getattr(mock_next, method).assert_called_once() + + +# =================================================================== +# TestWorkflowOutboundInterceptor +# =================================================================== + + +class TestWorkflowOutboundInterceptor: + """Tests for _LangSmithWorkflowOutboundInterceptor.""" + + def _make_outbound_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock, Any]: + """Create outbound interceptor with mock next and inbound reference. + + Returns (outbound_interceptor, mock_next, inbound_interceptor). + """ + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + + # Create mock next for inbound + mock_inbound_next = MagicMock() + mock_inbound_next.execute_workflow = AsyncMock() + mock_inbound_next.handle_signal = AsyncMock() + mock_inbound_next.handle_query = AsyncMock() + mock_inbound_next.handle_update_validator = MagicMock() + mock_inbound_next.handle_update_handler = AsyncMock() + + # Create inbound interceptor + wf_class_input = MagicMock() + wf_interceptor_cls = config.workflow_interceptor_class(wf_class_input) + inbound = wf_interceptor_cls(mock_inbound_next) + + # Create mock outbound next + mock_outbound_next = MagicMock() + mock_outbound_next.start_activity = MagicMock() + mock_outbound_next.start_local_activity = MagicMock() + mock_outbound_next.start_child_workflow = AsyncMock() + mock_outbound_next.signal_child_workflow = AsyncMock() + mock_outbound_next.signal_external_workflow = AsyncMock() + mock_outbound_next.continue_as_new = MagicMock() + mock_outbound_next.start_nexus_operation = AsyncMock() + + # Initialize inbound (which should create the outbound) + inbound.init(mock_outbound_next) + + # Create the outbound directly for unit testing + from temporalio.contrib.langsmith._interceptor import ( + _LangSmithWorkflowOutboundInterceptor, + ) + + outbound = _LangSmithWorkflowOutboundInterceptor( + mock_outbound_next, config, inbound + ) + + # Set a current run on the inbound to simulate active workflow execution + mock_current_run = _make_mock_run() + inbound._current_run = mock_current_run + + return outbound, mock_outbound_next, inbound + + @pytest.mark.parametrize( + "method,input_attr,input_val,expected_name", + [ + ("start_activity", "activity", "do_thing", "StartActivity:do_thing"), + ("start_local_activity", "activity", "local_thing", "StartActivity:local_thing"), + ("start_child_workflow", "workflow", "ChildWorkflow", "StartChildWorkflow:ChildWorkflow"), + ("signal_child_workflow", "signal", "child_signal", "SignalChildWorkflow:child_signal"), + ("signal_external_workflow", "signal", "ext_signal", "SignalExternalWorkflow:ext_signal"), + ], + ids=["activity", "local_activity", "child_workflow", "signal_child", "signal_external"], + ) + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + async def test_creates_trace_and_injects_headers( + self, + mock_in_wf: Any, + mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + method: str, + input_attr: str, + input_val: str, + expected_name: str, + ) -> None: + """Each outbound method creates the correct trace and injects headers.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + outbound, mock_next, _ = self._make_outbound_interceptor() + + mock_input = MagicMock() + setattr(mock_input, input_attr, input_val) + mock_input.headers = {} + + result = getattr(outbound, method)(mock_input) + if asyncio.iscoroutine(result): + await result + + assert _get_runtree_name(MockRunTree) == expected_name + assert HEADER_KEY in mock_input.headers + mock_sandbox.assert_called() + getattr(mock_next, method).assert_called_once() + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_continue_as_new( + self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any + ) -> None: + """continue_as_new does NOT create a new trace, but injects context from current run.""" + outbound, mock_next, inbound = self._make_outbound_interceptor() + + mock_input = MagicMock() + mock_input.headers = {} + + outbound.continue_as_new(mock_input) + + # No new RunTree should be created for continue_as_new + MockRunTree.assert_not_called() + # But headers SHOULD be modified (context from inbound's _current_run) + assert HEADER_KEY in mock_input.headers + mock_next.continue_as_new.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + async def test_start_nexus_operation( + self, + mock_in_wf: Any, + mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + ) -> None: + """start_nexus_operation creates a trace named StartNexusOperation:{service}/{operation}.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + outbound, mock_next, _ = self._make_outbound_interceptor() + + mock_input = MagicMock() + mock_input.service = "MyService" + mock_input.operation_name = "do_op" + mock_input.headers = {} + + await outbound.start_nexus_operation(mock_input) + + assert _get_runtree_name(MockRunTree) == "StartNexusOperation:MyService/do_op" + # Nexus uses string headers, so context injection uses _inject_nexus_context + # The headers dict should be modified + mock_sandbox.assert_called() + mock_next.start_nexus_operation.assert_called_once() + + +# =================================================================== +# TestNexusInboundInterceptor +# =================================================================== + + +class TestNexusInboundInterceptor: + """Tests for _LangSmithNexusOperationInboundInterceptor.""" + + def _make_nexus_interceptor( + self, *, add_temporal_runs: bool = True + ) -> tuple[Any, MagicMock]: + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + mock_next = MagicMock() + mock_next.execute_nexus_operation_start = AsyncMock() + mock_next.execute_nexus_operation_cancel = AsyncMock() + interceptor = config.intercept_nexus_operation(mock_next) + return interceptor, mock_next + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + @patch(_PATCH_EXTRACT_NEXUS) + async def test_execute_nexus_operation_start( + self, mock_extract_nexus: Any, MockRunTree: Any + ) -> None: + """Creates a run named RunStartNexusOperationHandler:{service}/{operation}. + + Uses _extract_nexus_context (not _extract_context) for Nexus string headers. + """ + mock_extract_nexus.return_value = None # no parent + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_nexus_interceptor() + + mock_input = MagicMock() + mock_input.ctx = MagicMock() + mock_input.ctx.service = "MyService" + mock_input.ctx.operation = "start_op" + mock_input.ctx.headers = {} + + await interceptor.execute_nexus_operation_start(mock_input) + + # Verify _extract_nexus_context was called (not _extract_context) + mock_extract_nexus.assert_called_once_with(mock_input.ctx.headers) + # Verify trace name + assert ( + _get_runtree_name(MockRunTree) + == "RunStartNexusOperationHandler:MyService/start_op" + ) + # Verify run_type is "tool" for Nexus operations + assert MockRunTree.call_args.kwargs.get("run_type") == "tool" + mock_next.execute_nexus_operation_start.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_RUNTREE) + @patch(_PATCH_EXTRACT_NEXUS) + async def test_execute_nexus_operation_cancel( + self, mock_extract_nexus: Any, MockRunTree: Any + ) -> None: + """Creates a run named RunCancelNexusOperationHandler:{service}/{operation}. + + Uses _extract_nexus_context for context extraction. + """ + mock_extract_nexus.return_value = None + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + interceptor, mock_next = self._make_nexus_interceptor() + + mock_input = MagicMock() + mock_input.ctx = MagicMock() + mock_input.ctx.service = "MyService" + mock_input.ctx.operation = "cancel_op" + mock_input.ctx.headers = {} + + await interceptor.execute_nexus_operation_cancel(mock_input) + + mock_extract_nexus.assert_called_once_with(mock_input.ctx.headers) + assert ( + _get_runtree_name(MockRunTree) + == "RunCancelNexusOperationHandler:MyService/cancel_op" + ) + assert MockRunTree.call_args.kwargs.get("run_type") == "tool" + mock_next.execute_nexus_operation_cancel.assert_called_once() + + +# =================================================================== +# TestLazyClientPrevention +# =================================================================== + + +class TestLazyClientPrevention: + """Tests that RunTree always receives ls_client= to prevent lazy Client creation.""" + + @patch(_PATCH_IN_WORKFLOW, return_value=False) + @patch(_PATCH_RUNTREE) + def test_runtree_always_receives_ls_client( + self, MockRunTree: Any, mock_in_wf: Any + ) -> None: + """Every RunTree() created by _maybe_run receives ls_client= (pre-created client).""" + mock_client = MagicMock() + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run + + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=True, + ): + pass + + MockRunTree.assert_called_once() + call_kwargs = MockRunTree.call_args.kwargs + assert "ls_client" in call_kwargs + assert call_kwargs["ls_client"] is mock_client + + +# =================================================================== +# TestAddTemporalRunsToggle +# =================================================================== + + +class TestAddTemporalRunsToggle: + """Tests for the add_temporal_runs toggle.""" + + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_false_skips_traces(self, mock_in_wf: Any, MockRunTree: Any) -> None: + """With add_temporal_runs=False, _maybe_run yields None (no run created). + + Callers are responsible for propagating context even when the run is None. + See test_false_still_propagates_context for the full behavior. + """ + mock_client = MagicMock() + with _maybe_run( + mock_client, + "TestRun", + add_temporal_runs=False, + ) as run: + assert run is None + MockRunTree.assert_not_called() + + @pytest.mark.asyncio + @patch(_PATCH_TRACING_CTX) + @patch(_PATCH_SANDBOX) + @patch(_PATCH_RUNTREE) + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + @patch(_PATCH_WF_INFO) + @patch(f"{_MOD}.temporalio.activity.info") + async def test_false_still_propagates_context( + self, + mock_act_info: Any, + mock_wf_info: Any, + mock_in_wf: Any, + mock_replaying: Any, + MockRunTree: Any, + mock_sandbox: Any, + mock_tracing_ctx: Any, + ) -> None: + """With add_temporal_runs=False, no runs are created but context still propagates. + + 1. Workflow outbound: injects the inbound's _current_run (parent fallback) + into headers even though no StartActivity run is created. + 2. Activity inbound: sets tracing_context(parent=extracted_parent) + unconditionally (before _maybe_run), so @traceable code nests correctly + even without a RunActivity run. + """ + from temporalio.contrib.langsmith._interceptor import ( + _LangSmithWorkflowOutboundInterceptor, + ) + + mock_wf_info.return_value = _mock_workflow_info() + mock_act_info.return_value = _mock_activity_info() + + # --- Workflow outbound: context propagation without run creation --- + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=False) + + # Create inbound interceptor + wf_class_input = MagicMock() + wf_interceptor_cls = config.workflow_interceptor_class(wf_class_input) + mock_inbound_next = MagicMock() + mock_inbound_next.execute_workflow = AsyncMock() + inbound = wf_interceptor_cls(mock_inbound_next) + + # Create outbound interceptor + mock_outbound_next = MagicMock() + mock_outbound_next.start_activity = MagicMock() + inbound.init(mock_outbound_next) + outbound = _LangSmithWorkflowOutboundInterceptor( + mock_outbound_next, config, inbound + ) + + # Simulate an inbound parent context (as if extracted from headers) + mock_parent = _make_mock_run() + inbound._current_run = mock_parent + + mock_input = MagicMock() + mock_input.activity = "do_thing" + mock_input.headers = {} + + outbound.start_activity(mock_input) + + # No RunTree should be created (add_temporal_runs=False) + MockRunTree.assert_not_called() + # But headers SHOULD be injected from the inbound's parent context + assert HEADER_KEY in mock_input.headers + mock_outbound_next.start_activity.assert_called_once() + + # --- Activity inbound: tracing_context with extracted parent --- + MockRunTree.reset_mock() + mock_tracing_ctx.reset_mock() + + mock_act_next = MagicMock() + mock_act_next.execute_activity = AsyncMock(return_value="result") + act_interceptor = config.intercept_activity(mock_act_next) + + mock_act_input = MagicMock() + mock_extracted_parent = _make_mock_run() + + with patch( + f"{_MOD}._extract_context", return_value=mock_extracted_parent + ): + await act_interceptor.execute_activity(mock_act_input) + + # No RunTree should be created (add_temporal_runs=False) + MockRunTree.assert_not_called() + # tracing_context SHOULD be called with the client and extracted parent + # (unconditionally, before _maybe_run) + mock_tracing_ctx.assert_called_once_with( + client=config._client, enabled=True, parent=mock_extracted_parent + ) + mock_act_next.execute_activity.assert_called_once() + + @pytest.mark.asyncio + @patch(_PATCH_TRACING_CTX) + @patch(_PATCH_RUNTREE) + @patch(f"{_MOD}.temporalio.activity.info") + async def test_false_activity_no_parent_no_context( + self, + mock_act_info: Any, + MockRunTree: Any, + mock_tracing_ctx: Any, + ) -> None: + """With add_temporal_runs=False and no parent in headers, tracing_context + is still called with the client (so @traceable can use it), but no parent. + """ + mock_act_info.return_value = _mock_activity_info() + config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=False) + + mock_act_next = MagicMock() + mock_act_next.execute_activity = AsyncMock(return_value="result") + act_interceptor = config.intercept_activity(mock_act_next) + + mock_act_input = MagicMock() + + with patch(f"{_MOD}._extract_context", return_value=None): + await act_interceptor.execute_activity(mock_act_input) + + MockRunTree.assert_not_called() + # tracing_context called with client and enabled (no parent) + mock_tracing_ctx.assert_called_once_with(client=config._client, enabled=True) + mock_act_next.execute_activity.assert_called_once() diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py new file mode 100644 index 000000000..90b3499c1 --- /dev/null +++ b/tests/contrib/langsmith/test_plugin.py @@ -0,0 +1,113 @@ +"""Tests for LangSmithPlugin construction, configuration, and end-to-end usage.""" + +from __future__ import annotations + +import uuid +from unittest.mock import MagicMock + +from langsmith import traceable, tracing_context + +from temporalio.client import Client +from temporalio.contrib.langsmith import LangSmithInterceptor, LangSmithPlugin +from temporalio.testing import WorkflowEnvironment +from tests.contrib.langsmith.conftest import dump_runs +from tests.contrib.langsmith.test_integration import ( + ComprehensiveWorkflow, + TraceableActivityWorkflow, + _make_client_and_collector, + nested_traceable_activity, + traceable_activity, +) +from tests.helpers import new_worker + + +class TestPluginConstruction: + """Tests for LangSmithPlugin construction.""" + + def test_construction_stores_all_config(self) -> None: + """All constructor kwargs are stored on the interceptor.""" + mock_client = MagicMock() + plugin = LangSmithPlugin( + client=mock_client, + project_name="my-project", + add_temporal_runs=False, + metadata={"env": "prod"}, + tags=["v1"], + ) + assert len(plugin.interceptors) > 0 + interceptor = plugin.interceptors[0] + assert isinstance(interceptor, LangSmithInterceptor) + assert interceptor._client is mock_client + assert interceptor._project_name == "my-project" + assert interceptor._add_temporal_runs is False + assert interceptor._default_metadata == {"env": "prod"} + assert interceptor._default_tags == ["v1"] + + + +class TestPluginIntegration: + """End-to-end test using LangSmithPlugin as a Temporal client plugin.""" + + async def test_comprehensive_plugin_trace_hierarchy( + self, client: Client, env: WorkflowEnvironment + ) -> None: + """Plugin wired to a real Temporal worker produces the full trace hierarchy.""" + temporal_client, collector, mock_ls_client = _make_client_and_collector(client) + + @traceable(name="user_pipeline") + async def user_pipeline() -> str: + async with new_worker( + temporal_client, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + activities=[nested_traceable_activity, traceable_activity], + ) as worker: + handle = await temporal_client.start_workflow( + ComprehensiveWorkflow.run, + id=f"plugin-comprehensive-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + # Query + await handle.query(ComprehensiveWorkflow.my_query) + # Signal + await handle.signal(ComprehensiveWorkflow.my_signal, "hello") + # Update (completes the workflow) + await handle.execute_update( + ComprehensiveWorkflow.my_update, "finish" + ) + return await handle.result() + + with tracing_context(client=mock_ls_client, enabled=True): + result = await user_pipeline() + + assert result == "comprehensive-done" + + hierarchy = dump_runs(collector) + expected = [ + "user_pipeline", + " StartWorkflow:ComprehensiveWorkflow", + " RunWorkflow:ComprehensiveWorkflow", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", + " QueryWorkflow:my_query", + " HandleQuery:my_query", + " SignalWorkflow:my_signal", + " HandleSignal:my_signal", + " StartWorkflowUpdate:my_update", + " ValidateUpdate:my_update", + " HandleUpdate:my_update", + ] + assert hierarchy == expected, ( + f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + ) diff --git a/uv.lock b/uv.lock index c63faefad..fd93c5d59 100644 --- a/uv.lock +++ b/uv.lock @@ -2436,6 +2436,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/a0/b91504515c1f9a299fc157967ffbd2f0321bce0516a3d5b89f6f4cad0355/lazy_object_proxy-1.12.0-pp39.pp310.pp311.graalpy311-none-any.whl", hash = "sha256:c3b2e0af1f7f77c4263759c4824316ce458fabe0fceadcd24ef8ca08b2d1e402", size = 15072, upload-time = "2025-08-22T13:50:05.498Z" }, ] +[[package]] +name = "langsmith" +version = "0.7.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson", marker = "platform_python_implementation != 'PyPy'" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "requests-toolbelt" }, + { name = "uuid-utils" }, + { name = "xxhash" }, + { name = "zstandard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/79/81041dde07a974e728db7def23c1c7255950b8874102925cc77093bc847d/langsmith-0.7.17.tar.gz", hash = "sha256:6c1b0c2863cdd6636d2a58b8d5b1b80060703d98cac2593f4233e09ac25b5a9d", size = 1132228, upload-time = "2026-03-12T20:41:10.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/31/62689d57f4d25792bd6a3c05c868771899481be2f3e31f9e71d31e1ac4ab/langsmith-0.7.17-py3-none-any.whl", hash = "sha256:cbec10460cb6c6ecc94c18c807be88a9984838144ae6c4693c9f859f378d7d02", size = 359147, upload-time = "2026-03-12T20:41:08.758Z" }, +] + [[package]] name = "litellm" version = "1.78.0" @@ -3466,6 +3486,87 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/90/68152b7465f50285d3ce2481b3aec2f82822e3f52e5152eeeaf516bab841/opentelemetry_semantic_conventions-0.58b0-py3-none-any.whl", hash = "sha256:5564905ab1458b96684db1340232729fce3b5375a06e140e8904c78e4f815b28", size = 207954, upload-time = "2025-09-11T10:28:59.218Z" }, ] +[[package]] +name = "orjson" +version = "3.11.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/45/b268004f745ede84e5798b48ee12b05129d19235d0e15267aa57dcdb400b/orjson-3.11.7.tar.gz", hash = "sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49", size = 6144992, upload-time = "2026-02-02T15:38:49.29Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174", size = 229140, upload-time = "2026-02-02T15:37:06.082Z" }, + { url = "https://files.pythonhosted.org/packages/52/a2/fa129e749d500f9b183e8a3446a193818a25f60261e9ce143ad61e975208/orjson-3.11.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67", size = 128670, upload-time = "2026-02-02T15:37:08.002Z" }, + { url = "https://files.pythonhosted.org/packages/08/93/1e82011cd1e0bd051ef9d35bed1aa7fb4ea1f0a055dc2c841b46b43a9ebd/orjson-3.11.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11", size = 123832, upload-time = "2026-02-02T15:37:09.191Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d8/a26b431ef962c7d55736674dddade876822f3e33223c1f47a36879350d04/orjson-3.11.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc", size = 129171, upload-time = "2026-02-02T15:37:11.112Z" }, + { url = "https://files.pythonhosted.org/packages/a7/19/f47819b84a580f490da260c3ee9ade214cf4cf78ac9ce8c1c758f80fdfc9/orjson-3.11.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16", size = 141967, upload-time = "2026-02-02T15:37:12.282Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/37ece39a0777ba077fdcdbe4cccae3be8ed00290c14bf8afdc548befc260/orjson-3.11.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222", size = 130991, upload-time = "2026-02-02T15:37:13.465Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ed/f2b5d66aa9b6b5c02ff5f120efc7b38c7c4962b21e6be0f00fd99a5c348e/orjson-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa", size = 133674, upload-time = "2026-02-02T15:37:14.694Z" }, + { url = "https://files.pythonhosted.org/packages/c4/6e/baa83e68d1aa09fa8c3e5b2c087d01d0a0bd45256de719ed7bc22c07052d/orjson-3.11.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e", size = 138722, upload-time = "2026-02-02T15:37:16.501Z" }, + { url = "https://files.pythonhosted.org/packages/0c/47/7f8ef4963b772cd56999b535e553f7eb5cd27e9dd6c049baee6f18bfa05d/orjson-3.11.7-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2", size = 409056, upload-time = "2026-02-02T15:37:17.895Z" }, + { url = "https://files.pythonhosted.org/packages/38/eb/2df104dd2244b3618f25325a656f85cc3277f74bbd91224752410a78f3c7/orjson-3.11.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c", size = 144196, upload-time = "2026-02-02T15:37:19.349Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2a/ee41de0aa3a6686598661eae2b4ebdff1340c65bfb17fcff8b87138aab21/orjson-3.11.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f", size = 134979, upload-time = "2026-02-02T15:37:20.906Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fa/92fc5d3d402b87a8b28277a9ed35386218a6a5287c7fe5ee9b9f02c53fb2/orjson-3.11.7-cp310-cp310-win32.whl", hash = "sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de", size = 127968, upload-time = "2026-02-02T15:37:23.178Z" }, + { url = "https://files.pythonhosted.org/packages/07/29/a576bf36d73d60df06904d3844a9df08e25d59eba64363aaf8ec2f9bff41/orjson-3.11.7-cp310-cp310-win_amd64.whl", hash = "sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993", size = 125128, upload-time = "2026-02-02T15:37:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/37/02/da6cb01fc6087048d7f61522c327edf4250f1683a58a839fdcc435746dd5/orjson-3.11.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c", size = 228664, upload-time = "2026-02-02T15:37:25.542Z" }, + { url = "https://files.pythonhosted.org/packages/c1/c2/5885e7a5881dba9a9af51bc564e8967225a642b3e03d089289a35054e749/orjson-3.11.7-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b", size = 125344, upload-time = "2026-02-02T15:37:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/4e7688de0a92d1caf600dfd5fb70b4c5bfff51dfa61ac555072ef2d0d32a/orjson-3.11.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e", size = 128404, upload-time = "2026-02-02T15:37:28.108Z" }, + { url = "https://files.pythonhosted.org/packages/2f/b2/ec04b74ae03a125db7bd69cffd014b227b7f341e3261bf75b5eb88a1aa92/orjson-3.11.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5", size = 123677, upload-time = "2026-02-02T15:37:30.287Z" }, + { url = "https://files.pythonhosted.org/packages/4c/69/f95bdf960605f08f827f6e3291fe243d8aa9c5c9ff017a8d7232209184c3/orjson-3.11.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62", size = 128950, upload-time = "2026-02-02T15:37:31.595Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1b/de59c57bae1d148ef298852abd31909ac3089cff370dfd4cd84cc99cbc42/orjson-3.11.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910", size = 141756, upload-time = "2026-02-02T15:37:32.985Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9e/9decc59f4499f695f65c650f6cfa6cd4c37a3fbe8fa235a0a3614cb54386/orjson-3.11.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b", size = 130812, upload-time = "2026-02-02T15:37:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/28/e6/59f932bcabd1eac44e334fe8e3281a92eacfcb450586e1f4bde0423728d8/orjson-3.11.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960", size = 133444, upload-time = "2026-02-02T15:37:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/f1/36/b0f05c0eaa7ca30bc965e37e6a2956b0d67adb87a9872942d3568da846ae/orjson-3.11.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8", size = 138609, upload-time = "2026-02-02T15:37:36.657Z" }, + { url = "https://files.pythonhosted.org/packages/b8/03/58ec7d302b8d86944c60c7b4b82975d5161fcce4c9bc8c6cb1d6741b6115/orjson-3.11.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504", size = 408918, upload-time = "2026-02-02T15:37:38.076Z" }, + { url = "https://files.pythonhosted.org/packages/06/3a/868d65ef9a8b99be723bd510de491349618abd9f62c826cf206d962db295/orjson-3.11.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e", size = 143998, upload-time = "2026-02-02T15:37:39.706Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c7/1e18e1c83afe3349f4f6dc9e14910f0ae5f82eac756d1412ea4018938535/orjson-3.11.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561", size = 134802, upload-time = "2026-02-02T15:37:41.002Z" }, + { url = "https://files.pythonhosted.org/packages/d4/0b/ccb7ee1a65b37e8eeb8b267dc953561d72370e85185e459616d4345bab34/orjson-3.11.7-cp311-cp311-win32.whl", hash = "sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d", size = 127828, upload-time = "2026-02-02T15:37:42.241Z" }, + { url = "https://files.pythonhosted.org/packages/af/9e/55c776dffda3f381e0f07d010a4f5f3902bf48eaba1bb7684d301acd4924/orjson-3.11.7-cp311-cp311-win_amd64.whl", hash = "sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471", size = 124941, upload-time = "2026-02-02T15:37:43.444Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/424a620fa7d263b880162505fb107ef5e0afaa765b5b06a88312ac291560/orjson-3.11.7-cp311-cp311-win_arm64.whl", hash = "sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d", size = 126245, upload-time = "2026-02-02T15:37:45.18Z" }, + { url = "https://files.pythonhosted.org/packages/80/bf/76f4f1665f6983385938f0e2a5d7efa12a58171b8456c252f3bae8a4cf75/orjson-3.11.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f", size = 228545, upload-time = "2026-02-02T15:37:46.376Z" }, + { url = "https://files.pythonhosted.org/packages/79/53/6c72c002cb13b5a978a068add59b25a8bdf2800ac1c9c8ecdb26d6d97064/orjson-3.11.7-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b", size = 125224, upload-time = "2026-02-02T15:37:47.697Z" }, + { url = "https://files.pythonhosted.org/packages/2c/83/10e48852865e5dd151bdfe652c06f7da484578ed02c5fca938e3632cb0b8/orjson-3.11.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a", size = 128154, upload-time = "2026-02-02T15:37:48.954Z" }, + { url = "https://files.pythonhosted.org/packages/6e/52/a66e22a2b9abaa374b4a081d410edab6d1e30024707b87eab7c734afe28d/orjson-3.11.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10", size = 123548, upload-time = "2026-02-02T15:37:50.187Z" }, + { url = "https://files.pythonhosted.org/packages/de/38/605d371417021359f4910c496f764c48ceb8997605f8c25bf1dfe58c0ebe/orjson-3.11.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa", size = 129000, upload-time = "2026-02-02T15:37:51.426Z" }, + { url = "https://files.pythonhosted.org/packages/44/98/af32e842b0ffd2335c89714d48ca4e3917b42f5d6ee5537832e069a4b3ac/orjson-3.11.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8", size = 141686, upload-time = "2026-02-02T15:37:52.607Z" }, + { url = "https://files.pythonhosted.org/packages/96/0b/fc793858dfa54be6feee940c1463370ece34b3c39c1ca0aa3845f5ba9892/orjson-3.11.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f", size = 130812, upload-time = "2026-02-02T15:37:53.944Z" }, + { url = "https://files.pythonhosted.org/packages/dc/91/98a52415059db3f374757d0b7f0f16e3b5cd5976c90d1c2b56acaea039e6/orjson-3.11.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad", size = 133440, upload-time = "2026-02-02T15:37:55.615Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b6/cb540117bda61791f46381f8c26c8f93e802892830a6055748d3bb1925ab/orjson-3.11.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867", size = 138386, upload-time = "2026-02-02T15:37:56.814Z" }, + { url = "https://files.pythonhosted.org/packages/63/1a/50a3201c334a7f17c231eee5f841342190723794e3b06293f26e7cf87d31/orjson-3.11.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d", size = 408853, upload-time = "2026-02-02T15:37:58.291Z" }, + { url = "https://files.pythonhosted.org/packages/87/cd/8de1c67d0be44fdc22701e5989c0d015a2adf391498ad42c4dc589cd3013/orjson-3.11.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab", size = 144130, upload-time = "2026-02-02T15:38:00.163Z" }, + { url = "https://files.pythonhosted.org/packages/0f/fe/d605d700c35dd55f51710d159fc54516a280923cd1b7e47508982fbb387d/orjson-3.11.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2", size = 134818, upload-time = "2026-02-02T15:38:01.507Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e4/15ecc67edb3ddb3e2f46ae04475f2d294e8b60c1825fbe28a428b93b3fbd/orjson-3.11.7-cp312-cp312-win32.whl", hash = "sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f", size = 127923, upload-time = "2026-02-02T15:38:02.75Z" }, + { url = "https://files.pythonhosted.org/packages/34/70/2e0855361f76198a3965273048c8e50a9695d88cd75811a5b46444895845/orjson-3.11.7-cp312-cp312-win_amd64.whl", hash = "sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74", size = 125007, upload-time = "2026-02-02T15:38:04.032Z" }, + { url = "https://files.pythonhosted.org/packages/68/40/c2051bd19fc467610fed469dc29e43ac65891571138f476834ca192bc290/orjson-3.11.7-cp312-cp312-win_arm64.whl", hash = "sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5", size = 126089, upload-time = "2026-02-02T15:38:05.297Z" }, + { url = "https://files.pythonhosted.org/packages/89/25/6e0e52cac5aab51d7b6dcd257e855e1dec1c2060f6b28566c509b4665f62/orjson-3.11.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733", size = 228390, upload-time = "2026-02-02T15:38:06.8Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/a77f48d2fc8a05bbc529e5ff481fb43d914f9e383ea2469d4f3d51df3d00/orjson-3.11.7-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4", size = 125189, upload-time = "2026-02-02T15:38:08.181Z" }, + { url = "https://files.pythonhosted.org/packages/89/25/0a16e0729a0e6a1504f9d1a13cdd365f030068aab64cec6958396b9969d7/orjson-3.11.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785", size = 128106, upload-time = "2026-02-02T15:38:09.41Z" }, + { url = "https://files.pythonhosted.org/packages/66/da/a2e505469d60666a05ab373f1a6322eb671cb2ba3a0ccfc7d4bc97196787/orjson-3.11.7-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539", size = 123363, upload-time = "2026-02-02T15:38:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/23/bf/ed73f88396ea35c71b38961734ea4a4746f7ca0768bf28fd551d37e48dd0/orjson-3.11.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1", size = 129007, upload-time = "2026-02-02T15:38:12.138Z" }, + { url = "https://files.pythonhosted.org/packages/73/3c/b05d80716f0225fc9008fbf8ab22841dcc268a626aa550561743714ce3bf/orjson-3.11.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1", size = 141667, upload-time = "2026-02-02T15:38:13.398Z" }, + { url = "https://files.pythonhosted.org/packages/61/e8/0be9b0addd9bf86abfc938e97441dcd0375d494594b1c8ad10fe57479617/orjson-3.11.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705", size = 130832, upload-time = "2026-02-02T15:38:14.698Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ec/c68e3b9021a31d9ec15a94931db1410136af862955854ed5dd7e7e4f5bff/orjson-3.11.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace", size = 133373, upload-time = "2026-02-02T15:38:16.109Z" }, + { url = "https://files.pythonhosted.org/packages/d2/45/f3466739aaafa570cc8e77c6dbb853c48bf56e3b43738020e2661e08b0ac/orjson-3.11.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b", size = 138307, upload-time = "2026-02-02T15:38:17.453Z" }, + { url = "https://files.pythonhosted.org/packages/e1/84/9f7f02288da1ffb31405c1be07657afd1eecbcb4b64ee2817b6fe0f785fa/orjson-3.11.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157", size = 408695, upload-time = "2026-02-02T15:38:18.831Z" }, + { url = "https://files.pythonhosted.org/packages/18/07/9dd2f0c0104f1a0295ffbe912bc8d63307a539b900dd9e2c48ef7810d971/orjson-3.11.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3", size = 144099, upload-time = "2026-02-02T15:38:20.28Z" }, + { url = "https://files.pythonhosted.org/packages/a5/66/857a8e4a3292e1f7b1b202883bcdeb43a91566cf59a93f97c53b44bd6801/orjson-3.11.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223", size = 134806, upload-time = "2026-02-02T15:38:22.186Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5b/6ebcf3defc1aab3a338ca777214966851e92efb1f30dc7fc8285216e6d1b/orjson-3.11.7-cp313-cp313-win32.whl", hash = "sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3", size = 127914, upload-time = "2026-02-02T15:38:23.511Z" }, + { url = "https://files.pythonhosted.org/packages/00/04/c6f72daca5092e3117840a1b1e88dfc809cc1470cf0734890d0366b684a1/orjson-3.11.7-cp313-cp313-win_amd64.whl", hash = "sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757", size = 124986, upload-time = "2026-02-02T15:38:24.836Z" }, + { url = "https://files.pythonhosted.org/packages/03/ba/077a0f6f1085d6b806937246860fafbd5b17f3919c70ee3f3d8d9c713f38/orjson-3.11.7-cp313-cp313-win_arm64.whl", hash = "sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539", size = 126045, upload-time = "2026-02-02T15:38:26.216Z" }, + { url = "https://files.pythonhosted.org/packages/e9/1e/745565dca749813db9a093c5ebc4bac1a9475c64d54b95654336ac3ed961/orjson-3.11.7-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0", size = 228391, upload-time = "2026-02-02T15:38:27.757Z" }, + { url = "https://files.pythonhosted.org/packages/46/19/e40f6225da4d3aa0c8dc6e5219c5e87c2063a560fe0d72a88deb59776794/orjson-3.11.7-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0", size = 125188, upload-time = "2026-02-02T15:38:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7e/c4de2babef2c0817fd1f048fd176aa48c37bec8aef53d2fa932983032cce/orjson-3.11.7-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6", size = 128097, upload-time = "2026-02-02T15:38:30.618Z" }, + { url = "https://files.pythonhosted.org/packages/eb/74/233d360632bafd2197f217eee7fb9c9d0229eac0c18128aee5b35b0014fe/orjson-3.11.7-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf", size = 123364, upload-time = "2026-02-02T15:38:32.363Z" }, + { url = "https://files.pythonhosted.org/packages/79/51/af79504981dd31efe20a9e360eb49c15f06df2b40e7f25a0a52d9ae888e8/orjson-3.11.7-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5", size = 129076, upload-time = "2026-02-02T15:38:33.68Z" }, + { url = "https://files.pythonhosted.org/packages/67/e2/da898eb68b72304f8de05ca6715870d09d603ee98d30a27e8a9629abc64b/orjson-3.11.7-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892", size = 141705, upload-time = "2026-02-02T15:38:34.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/89/15364d92acb3d903b029e28d834edb8780c2b97404cbf7929aa6b9abdb24/orjson-3.11.7-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e", size = 130855, upload-time = "2026-02-02T15:38:36.379Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8b/ecdad52d0b38d4b8f514be603e69ccd5eacf4e7241f972e37e79792212ec/orjson-3.11.7-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1", size = 133386, upload-time = "2026-02-02T15:38:37.704Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/45e1dcf10e17d0924b7c9162f87ec7b4ca79e28a0548acf6a71788d3e108/orjson-3.11.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183", size = 138295, upload-time = "2026-02-02T15:38:39.096Z" }, + { url = "https://files.pythonhosted.org/packages/63/d7/4d2e8b03561257af0450f2845b91fbd111d7e526ccdf737267108075e0ba/orjson-3.11.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650", size = 408720, upload-time = "2026-02-02T15:38:40.634Z" }, + { url = "https://files.pythonhosted.org/packages/78/cf/d45343518282108b29c12a65892445fc51f9319dc3c552ceb51bb5905ed2/orjson-3.11.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141", size = 144152, upload-time = "2026-02-02T15:38:42.262Z" }, + { url = "https://files.pythonhosted.org/packages/a9/3a/d6001f51a7275aacd342e77b735c71fa04125a3f93c36fee4526bc8c654e/orjson-3.11.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2", size = 134814, upload-time = "2026-02-02T15:38:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d3/f19b47ce16820cc2c480f7f1723e17f6d411b3a295c60c8ad3aa9ff1c96a/orjson-3.11.7-cp314-cp314-win32.whl", hash = "sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576", size = 127997, upload-time = "2026-02-02T15:38:45.06Z" }, + { url = "https://files.pythonhosted.org/packages/12/df/172771902943af54bf661a8d102bdf2e7f932127968080632bda6054b62c/orjson-3.11.7-cp314-cp314-win_amd64.whl", hash = "sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1", size = 124985, upload-time = "2026-02-02T15:38:46.388Z" }, + { url = "https://files.pythonhosted.org/packages/6f/1c/f2a8d8a1b17514660a614ce5f7aac74b934e69f5abc2700cc7ced882a009/orjson-3.11.7-cp314-cp314-win_arm64.whl", hash = "sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d", size = 126038, upload-time = "2026-02-02T15:38:47.703Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -4820,6 +4921,7 @@ dev = [ { name = "googleapis-common-protos" }, { name = "grpcio-tools" }, { name = "httpx" }, + { name = "langsmith" }, { name = "maturin" }, { name = "moto", extra = ["s3", "server"] }, { name = "mypy" }, @@ -4869,6 +4971,7 @@ dev = [ { name = "googleapis-common-protos", specifier = "==1.70.0" }, { name = "grpcio-tools", specifier = ">=1.48.2,<2" }, { name = "httpx", specifier = ">=0.28.1" }, + { name = "langsmith", specifier = ">=0.7.17" }, { name = "maturin", specifier = ">=1.8.2" }, { name = "moto", extras = ["s3", "server"], specifier = ">=5" }, { name = "mypy", specifier = "==1.18.2" }, @@ -5239,6 +5342,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] +[[package]] +name = "uuid-utils" +version = "0.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/d1/38a573f0c631c062cf42fa1f5d021d4dd3c31fb23e4376e4b56b0c9fbbed/uuid_utils-0.14.1.tar.gz", hash = "sha256:9bfc95f64af80ccf129c604fb6b8ca66c6f256451e32bc4570f760e4309c9b69", size = 22195, upload-time = "2026-02-20T22:50:38.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/b7/add4363039a34506a58457d96d4aa2126061df3a143eb4d042aedd6a2e76/uuid_utils-0.14.1-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:93a3b5dc798a54a1feb693f2d1cb4cf08258c32ff05ae4929b5f0a2ca624a4f0", size = 604679, upload-time = "2026-02-20T22:50:27.469Z" }, + { url = "https://files.pythonhosted.org/packages/dd/84/d1d0bef50d9e66d31b2019997c741b42274d53dde2e001b7a83e9511c339/uuid_utils-0.14.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ccd65a4b8e83af23eae5e56d88034b2fe7264f465d3e830845f10d1591b81741", size = 309346, upload-time = "2026-02-20T22:50:31.857Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ed/b6d6fd52a6636d7c3eddf97d68da50910bf17cd5ac221992506fb56cf12e/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b56b0cacd81583834820588378e432b0696186683b813058b707aedc1e16c4b1", size = 344714, upload-time = "2026-02-20T22:50:42.642Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a7/a19a1719fb626fe0b31882db36056d44fe904dc0cf15b06fdf56b2679cf7/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb3cf14de789097320a3c56bfdfdd51b1225d11d67298afbedee7e84e3837c96", size = 350914, upload-time = "2026-02-20T22:50:36.487Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fc/f6690e667fdc3bb1a73f57951f97497771c56fe23e3d302d7404be394d4f/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e0854a90d67f4b0cc6e54773deb8be618f4c9bad98d3326f081423b5d14fae", size = 482609, upload-time = "2026-02-20T22:50:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/54/6e/dcd3fa031320921a12ec7b4672dea3bd1dd90ddffa363a91831ba834d559/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6743ba194de3910b5feb1a62590cd2587e33a73ab6af8a01b642ceb5055862", size = 345699, upload-time = "2026-02-20T22:50:46.87Z" }, + { url = "https://files.pythonhosted.org/packages/04/28/e5220204b58b44ac0047226a9d016a113fde039280cc8732d9e6da43b39f/uuid_utils-0.14.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:043fb58fde6cf1620a6c066382f04f87a8e74feb0f95a585e4ed46f5d44af57b", size = 372205, upload-time = "2026-02-20T22:50:28.438Z" }, + { url = "https://files.pythonhosted.org/packages/c7/d9/3d2eb98af94b8dfffc82b6a33b4dfc87b0a5de2c68a28f6dde0db1f8681b/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c915d53f22945e55fe0d3d3b0b87fd965a57f5fd15666fd92d6593a73b1dd297", size = 521836, upload-time = "2026-02-20T22:50:23.057Z" }, + { url = "https://files.pythonhosted.org/packages/a8/15/0eb106cc6fe182f7577bc0ab6e2f0a40be247f35c5e297dbf7bbc460bd02/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:0972488e3f9b449e83f006ead5a0e0a33ad4a13e4462e865b7c286ab7d7566a3", size = 625260, upload-time = "2026-02-20T22:50:25.949Z" }, + { url = "https://files.pythonhosted.org/packages/3c/17/f539507091334b109e7496830af2f093d9fc8082411eafd3ece58af1f8ba/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:1c238812ae0c8ffe77d8d447a32c6dfd058ea4631246b08b5a71df586ff08531", size = 587824, upload-time = "2026-02-20T22:50:35.225Z" }, + { url = "https://files.pythonhosted.org/packages/2e/c2/d37a7b2e41f153519367d4db01f0526e0d4b06f1a4a87f1c5dfca5d70a8b/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:bec8f8ef627af86abf8298e7ec50926627e29b34fa907fcfbedb45aaa72bca43", size = 551407, upload-time = "2026-02-20T22:50:44.915Z" }, + { url = "https://files.pythonhosted.org/packages/65/36/2d24b2cbe78547c6532da33fb8613debd3126eccc33a6374ab788f5e46e9/uuid_utils-0.14.1-cp39-abi3-win32.whl", hash = "sha256:b54d6aa6252d96bac1fdbc80d26ba71bad9f220b2724d692ad2f2310c22ef523", size = 183476, upload-time = "2026-02-20T22:50:32.745Z" }, + { url = "https://files.pythonhosted.org/packages/83/92/2d7e90df8b1a69ec4cff33243ce02b7a62f926ef9e2f0eca5a026889cd73/uuid_utils-0.14.1-cp39-abi3-win_amd64.whl", hash = "sha256:fc27638c2ce267a0ce3e06828aff786f91367f093c80625ee21dad0208e0f5ba", size = 187147, upload-time = "2026-02-20T22:50:45.807Z" }, + { url = "https://files.pythonhosted.org/packages/d9/26/529f4beee17e5248e37e0bc17a2761d34c0fa3b1e5729c88adb2065bae6e/uuid_utils-0.14.1-cp39-abi3-win_arm64.whl", hash = "sha256:b04cb49b42afbc4ff8dbc60cf054930afc479d6f4dd7f1ec3bbe5dbfdde06b7a", size = 188132, upload-time = "2026-02-20T22:50:41.718Z" }, + { url = "https://files.pythonhosted.org/packages/91/f9/6c64bdbf71f58ccde7919e00491812556f446a5291573af92c49a5e9aaef/uuid_utils-0.14.1-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b197cd5424cf89fb019ca7f53641d05bfe34b1879614bed111c9c313b5574cd8", size = 591617, upload-time = "2026-02-20T22:50:24.532Z" }, + { url = "https://files.pythonhosted.org/packages/d0/f0/758c3b0fb0c4871c7704fef26a5bc861de4f8a68e4831669883bebe07b0f/uuid_utils-0.14.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:12c65020ba6cb6abe1d57fcbfc2d0ea0506c67049ee031714057f5caf0f9bc9c", size = 303702, upload-time = "2026-02-20T22:50:40.687Z" }, + { url = "https://files.pythonhosted.org/packages/85/89/d91862b544c695cd58855efe3201f83894ed82fffe34500774238ab8eba7/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b5d2ad28063d422ccc2c28d46471d47b61a58de885d35113a8f18cb547e25bf", size = 337678, upload-time = "2026-02-20T22:50:39.768Z" }, + { url = "https://files.pythonhosted.org/packages/ee/6b/cf342ba8a898f1de024be0243fac67c025cad530c79ea7f89c4ce718891a/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da2234387b45fde40b0fedfee64a0ba591caeea9c48c7698ab6e2d85c7991533", size = 343711, upload-time = "2026-02-20T22:50:43.965Z" }, + { url = "https://files.pythonhosted.org/packages/b3/20/049418d094d396dfa6606b30af925cc68a6670c3b9103b23e6990f84b589/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50fffc2827348c1e48972eed3d1c698959e63f9d030aa5dd82ba451113158a62", size = 476731, upload-time = "2026-02-20T22:50:30.589Z" }, + { url = "https://files.pythonhosted.org/packages/77/a1/0857f64d53a90321e6a46a3d4cc394f50e1366132dcd2ae147f9326ca98b/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1dbe718765f70f5b7f9b7f66b6a937802941b1cc56bcf642ce0274169741e01", size = 338902, upload-time = "2026-02-20T22:50:33.927Z" }, + { url = "https://files.pythonhosted.org/packages/ed/d0/5bf7cbf1ac138c92b9ac21066d18faf4d7e7f651047b700eb192ca4b9fdb/uuid_utils-0.14.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:258186964039a8e36db10810c1ece879d229b01331e09e9030bc5dcabe231bd2", size = 364700, upload-time = "2026-02-20T22:50:21.732Z" }, +] + [[package]] name = "uvicorn" version = "0.37.0" @@ -5434,6 +5566,124 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/34/98a2f52245f4d47be93b580dae5f9861ef58977d73a79eb47c58f1ad1f3a/xmltodict-1.0.4-py3-none-any.whl", hash = "sha256:a4a00d300b0e1c59fc2bfccb53d7b2e88c32f200df138a0dd2229f842497026a", size = 13580, upload-time = "2026-02-22T02:21:21.039Z" }, ] +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + [[package]] name = "yarl" version = "1.22.0" @@ -5600,3 +5850,93 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/9a/62a9ba3a919594605a07c34eee3068659bbd648e2fa0c4a86d876810b674/zope_interface-8.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:87e6b089002c43231fb9afec89268391bcc7a3b66e76e269ffde19a8112fb8d5", size = 264201, upload-time = "2025-09-25T06:26:27.797Z" }, { url = "https://files.pythonhosted.org/packages/da/06/8fe88bd7edef60566d21ef5caca1034e10f6b87441ea85de4bbf9ea74768/zope_interface-8.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:64a43f5280aa770cbafd0307cb3d1ff430e2a1001774e8ceb40787abe4bb6658", size = 212273, upload-time = "2025-09-25T06:00:25.398Z" }, ] + +[[package]] +name = "zstandard" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/7a/28efd1d371f1acd037ac64ed1c5e2b41514a6cc937dd6ab6a13ab9f0702f/zstandard-0.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e59fdc271772f6686e01e1b3b74537259800f57e24280be3f29c8a0deb1904dd", size = 795256, upload-time = "2025-09-14T22:15:56.415Z" }, + { url = "https://files.pythonhosted.org/packages/96/34/ef34ef77f1ee38fc8e4f9775217a613b452916e633c4f1d98f31db52c4a5/zstandard-0.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4d441506e9b372386a5271c64125f72d5df6d2a8e8a2a45a0ae09b03cb781ef7", size = 640565, upload-time = "2025-09-14T22:15:58.177Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1b/4fdb2c12eb58f31f28c4d28e8dc36611dd7205df8452e63f52fb6261d13e/zstandard-0.25.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:ab85470ab54c2cb96e176f40342d9ed41e58ca5733be6a893b730e7af9c40550", size = 5345306, upload-time = "2025-09-14T22:16:00.165Z" }, + { url = "https://files.pythonhosted.org/packages/73/28/a44bdece01bca027b079f0e00be3b6bd89a4df180071da59a3dd7381665b/zstandard-0.25.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e05ab82ea7753354bb054b92e2f288afb750e6b439ff6ca78af52939ebbc476d", size = 5055561, upload-time = "2025-09-14T22:16:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/e9/74/68341185a4f32b274e0fc3410d5ad0750497e1acc20bd0f5b5f64ce17785/zstandard-0.25.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:78228d8a6a1c177a96b94f7e2e8d012c55f9c760761980da16ae7546a15a8e9b", size = 5402214, upload-time = "2025-09-14T22:16:04.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/67/f92e64e748fd6aaffe01e2b75a083c0c4fd27abe1c8747fee4555fcee7dd/zstandard-0.25.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:2b6bd67528ee8b5c5f10255735abc21aa106931f0dbaf297c7be0c886353c3d0", size = 5449703, upload-time = "2025-09-14T22:16:06.312Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e5/6d36f92a197c3c17729a2125e29c169f460538a7d939a27eaaa6dcfcba8e/zstandard-0.25.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4b6d83057e713ff235a12e73916b6d356e3084fd3d14ced499d84240f3eecee0", size = 5556583, upload-time = "2025-09-14T22:16:08.457Z" }, + { url = "https://files.pythonhosted.org/packages/d7/83/41939e60d8d7ebfe2b747be022d0806953799140a702b90ffe214d557638/zstandard-0.25.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9174f4ed06f790a6869b41cba05b43eeb9a35f8993c4422ab853b705e8112bbd", size = 5045332, upload-time = "2025-09-14T22:16:10.444Z" }, + { url = "https://files.pythonhosted.org/packages/b3/87/d3ee185e3d1aa0133399893697ae91f221fda79deb61adbe998a7235c43f/zstandard-0.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25f8f3cd45087d089aef5ba3848cd9efe3ad41163d3400862fb42f81a3a46701", size = 5572283, upload-time = "2025-09-14T22:16:12.128Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1d/58635ae6104df96671076ac7d4ae7816838ce7debd94aecf83e30b7121b0/zstandard-0.25.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3756b3e9da9b83da1796f8809dd57cb024f838b9eeafde28f3cb472012797ac1", size = 4959754, upload-time = "2025-09-14T22:16:14.225Z" }, + { url = "https://files.pythonhosted.org/packages/75/d6/57e9cb0a9983e9a229dd8fd2e6e96593ef2aa82a3907188436f22b111ccd/zstandard-0.25.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:81dad8d145d8fd981b2962b686b2241d3a1ea07733e76a2f15435dfb7fb60150", size = 5266477, upload-time = "2025-09-14T22:16:16.343Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a9/ee891e5edf33a6ebce0a028726f0bbd8567effe20fe3d5808c42323e8542/zstandard-0.25.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a5a419712cf88862a45a23def0ae063686db3d324cec7edbe40509d1a79a0aab", size = 5440914, upload-time = "2025-09-14T22:16:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/58/08/a8522c28c08031a9521f27abc6f78dbdee7312a7463dd2cfc658b813323b/zstandard-0.25.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e7360eae90809efd19b886e59a09dad07da4ca9ba096752e61a2e03c8aca188e", size = 5819847, upload-time = "2025-09-14T22:16:20.559Z" }, + { url = "https://files.pythonhosted.org/packages/6f/11/4c91411805c3f7b6f31c60e78ce347ca48f6f16d552fc659af6ec3b73202/zstandard-0.25.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:75ffc32a569fb049499e63ce68c743155477610532da1eb38e7f24bf7cd29e74", size = 5363131, upload-time = "2025-09-14T22:16:22.206Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d6/8c4bd38a3b24c4c7676a7a3d8de85d6ee7a983602a734b9f9cdefb04a5d6/zstandard-0.25.0-cp310-cp310-win32.whl", hash = "sha256:106281ae350e494f4ac8a80470e66d1fe27e497052c8d9c3b95dc4cf1ade81aa", size = 436469, upload-time = "2025-09-14T22:16:25.002Z" }, + { url = "https://files.pythonhosted.org/packages/93/90/96d50ad417a8ace5f841b3228e93d1bb13e6ad356737f42e2dde30d8bd68/zstandard-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea9d54cc3d8064260114a0bbf3479fc4a98b21dffc89b3459edd506b69262f6e", size = 506100, upload-time = "2025-09-14T22:16:23.569Z" }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, + { url = "https://files.pythonhosted.org/packages/35/0b/8df9c4ad06af91d39e94fa96cc010a24ac4ef1378d3efab9223cc8593d40/zstandard-0.25.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94", size = 795735, upload-time = "2025-09-14T22:17:26.042Z" }, + { url = "https://files.pythonhosted.org/packages/3f/06/9ae96a3e5dcfd119377ba33d4c42a7d89da1efabd5cb3e366b156c45ff4d/zstandard-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1", size = 640440, upload-time = "2025-09-14T22:17:27.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/14/933d27204c2bd404229c69f445862454dcc101cd69ef8c6068f15aaec12c/zstandard-0.25.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f", size = 5343070, upload-time = "2025-09-14T22:17:28.896Z" }, + { url = "https://files.pythonhosted.org/packages/6d/db/ddb11011826ed7db9d0e485d13df79b58586bfdec56e5c84a928a9a78c1c/zstandard-0.25.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea", size = 5063001, upload-time = "2025-09-14T22:17:31.044Z" }, + { url = "https://files.pythonhosted.org/packages/db/00/87466ea3f99599d02a5238498b87bf84a6348290c19571051839ca943777/zstandard-0.25.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e", size = 5394120, upload-time = "2025-09-14T22:17:32.711Z" }, + { url = "https://files.pythonhosted.org/packages/2b/95/fc5531d9c618a679a20ff6c29e2b3ef1d1f4ad66c5e161ae6ff847d102a9/zstandard-0.25.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551", size = 5451230, upload-time = "2025-09-14T22:17:34.41Z" }, + { url = "https://files.pythonhosted.org/packages/63/4b/e3678b4e776db00f9f7b2fe58e547e8928ef32727d7a1ff01dea010f3f13/zstandard-0.25.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a", size = 5547173, upload-time = "2025-09-14T22:17:36.084Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d5/ba05ed95c6b8ec30bd468dfeab20589f2cf709b5c940483e31d991f2ca58/zstandard-0.25.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611", size = 5046736, upload-time = "2025-09-14T22:17:37.891Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/870aa06b3a76c73eced65c044b92286a3c4e00554005ff51962deef28e28/zstandard-0.25.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3", size = 5576368, upload-time = "2025-09-14T22:17:40.206Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/398dc2ffc89d304d59bc12f0fdd931b4ce455bddf7038a0a67733a25f550/zstandard-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b", size = 4954022, upload-time = "2025-09-14T22:17:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5c/36ba1e5507d56d2213202ec2b05e8541734af5f2ce378c5d1ceaf4d88dc4/zstandard-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851", size = 5267889, upload-time = "2025-09-14T22:17:43.577Z" }, + { url = "https://files.pythonhosted.org/packages/70/e8/2ec6b6fb7358b2ec0113ae202647ca7c0e9d15b61c005ae5225ad0995df5/zstandard-0.25.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250", size = 5433952, upload-time = "2025-09-14T22:17:45.271Z" }, + { url = "https://files.pythonhosted.org/packages/7b/01/b5f4d4dbc59ef193e870495c6f1275f5b2928e01ff5a81fecb22a06e22fb/zstandard-0.25.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98", size = 5814054, upload-time = "2025-09-14T22:17:47.08Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/fbd822d5c6f427cf158316d012c5a12f233473c2f9c5fe5ab1ae5d21f3d8/zstandard-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf", size = 5360113, upload-time = "2025-09-14T22:17:48.893Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/69a553d2047f9a2c7347caa225bb3a63b6d7704ad74610cb7823baa08ed7/zstandard-0.25.0-cp313-cp313-win32.whl", hash = "sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09", size = 436936, upload-time = "2025-09-14T22:17:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/b9c06c870f3bd8767c201f1edbdf9e8dc34be5b0fbc5682c4f80fe948475/zstandard-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5", size = 506232, upload-time = "2025-09-14T22:17:50.402Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/60c3c01243bb81d381c9916e2a6d9e149ab8627c0c7d7abb2d73384b3c0c/zstandard-0.25.0-cp313-cp313-win_arm64.whl", hash = "sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049", size = 462671, upload-time = "2025-09-14T22:17:51.533Z" }, + { url = "https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3", size = 795887, upload-time = "2025-09-14T22:17:54.198Z" }, + { url = "https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f", size = 640658, upload-time = "2025-09-14T22:17:55.423Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8b6b71b1ddd517f68ffb55e10834388d4f793c49c6b83effaaa05785b0b4/zstandard-0.25.0-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c", size = 5379849, upload-time = "2025-09-14T22:17:57.372Z" }, + { url = "https://files.pythonhosted.org/packages/a4/86/a48e56320d0a17189ab7a42645387334fba2200e904ee47fc5a26c1fd8ca/zstandard-0.25.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439", size = 5058095, upload-time = "2025-09-14T22:17:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ad/eb659984ee2c0a779f9d06dbfe45e2dc39d99ff40a319895df2d3d9a48e5/zstandard-0.25.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043", size = 5551751, upload-time = "2025-09-14T22:18:01.618Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/b637faea43677eb7bd42ab204dfb7053bd5c4582bfe6b1baefa80ac0c47b/zstandard-0.25.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859", size = 6364818, upload-time = "2025-09-14T22:18:03.769Z" }, + { url = "https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0", size = 5560402, upload-time = "2025-09-14T22:18:05.954Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ae/56523ae9c142f0c08efd5e868a6da613ae76614eca1305259c3bf6a0ed43/zstandard-0.25.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7", size = 4955108, upload-time = "2025-09-14T22:18:07.68Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/c899f2d6df0840d5e384cf4c4121458c72802e8bda19691f3b16619f51e9/zstandard-0.25.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2", size = 5269248, upload-time = "2025-09-14T22:18:09.753Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c0/59e912a531d91e1c192d3085fc0f6fb2852753c301a812d856d857ea03c6/zstandard-0.25.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344", size = 5430330, upload-time = "2025-09-14T22:18:11.966Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/7e31db1240de2df22a58e2ea9a93fc6e38cc29353e660c0272b6735d6669/zstandard-0.25.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c", size = 5811123, upload-time = "2025-09-14T22:18:13.907Z" }, + { url = "https://files.pythonhosted.org/packages/f6/49/fac46df5ad353d50535e118d6983069df68ca5908d4d65b8c466150a4ff1/zstandard-0.25.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088", size = 5359591, upload-time = "2025-09-14T22:18:16.465Z" }, + { url = "https://files.pythonhosted.org/packages/c2/38/f249a2050ad1eea0bb364046153942e34abba95dd5520af199aed86fbb49/zstandard-0.25.0-cp314-cp314-win32.whl", hash = "sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12", size = 444513, upload-time = "2025-09-14T22:18:20.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/43/241f9615bcf8ba8903b3f0432da069e857fc4fd1783bd26183db53c4804b/zstandard-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2", size = 516118, upload-time = "2025-09-14T22:18:17.849Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ef/da163ce2450ed4febf6467d77ccb4cd52c4c30ab45624bad26ca0a27260c/zstandard-0.25.0-cp314-cp314-win_arm64.whl", hash = "sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d", size = 476940, upload-time = "2025-09-14T22:18:19.088Z" }, +] From df9a55c75301125497d808cf6131b118559b0fc0 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Mon, 16 Mar 2026 20:54:59 -0400 Subject: [PATCH 02/30] Refactor LangSmith interceptor: add ReplaySafeRunTree, reduce boilerplate - Add ReplaySafeRunTree wrapper that handles replay skipping and sandbox safety (post/end/patch no-op during replay, sandbox_unrestricted in workflow context), inspired by OTel plugin's _ReplaySafeSpan pattern - Add config.maybe_run() to eliminate repeated config kwargs at every call site - Add _traced_call (client outbound) and _traced_outbound (workflow outbound) helpers to reduce interceptor methods to one-liners - Fold _extract_context into _workflow_maybe_run for workflow inbound - Remove _safe_post, _safe_patch helpers (internalized in wrapper) - Remove in_workflow parameter from _maybe_run (wrapper detects it) - Establish consistent wrapping invariant: all run references are ReplaySafeRunTree, unwrapping is unconditional ._run at RunTree constructor boundary - Parametrize redundant unit tests (client outbound, workflow inbound/outbound) and remove duplicate test - Remove _make_interceptor test helper, use LangSmithInterceptor directly - Collapse plugin constructor tests into one, add comprehensive plugin integration test, remove redundant sandbox tests Co-Authored-By: Claude Opus 4.6 --- temporalio/contrib/langsmith/_interceptor.py | 295 ++++++++----------- tests/contrib/langsmith/test_interceptor.py | 46 +-- 2 files changed, 155 insertions(+), 186 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 2d34b8c15..1f983c1cd 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -75,7 +75,7 @@ def _extract_context( if not header: return None ls_headers = _payload_converter.from_payloads([header])[0] - return RunTree.from_headers(ls_headers) + return ReplaySafeRunTree(RunTree.from_headers(ls_headers)) def _inject_nexus_context( @@ -98,7 +98,7 @@ def _extract_nexus_context( if not raw: return None ls_headers = json.loads(raw) - return RunTree.from_headers(ls_headers) + return ReplaySafeRunTree(RunTree.from_headers(ls_headers)) # --------------------------------------------------------------------------- @@ -115,24 +115,54 @@ def _is_replaying() -> bool: # --------------------------------------------------------------------------- -# Sandbox-safe post/patch helpers +# ReplaySafeRunTree wrapper # --------------------------------------------------------------------------- -def _safe_post(run_tree: Any, in_workflow: bool) -> None: - if in_workflow: - with temporalio.workflow.unsafe.sandbox_unrestricted(): - run_tree.post() - else: - run_tree.post() +class ReplaySafeRunTree: + """Wraps a RunTree to handle replay skipping and sandbox safety transparently. + During replay, ``post()``, ``end()``, and ``patch()`` become no-ops. + Inside a workflow sandbox, ``post()`` and ``patch()`` are wrapped in + ``sandbox_unrestricted()``. + """ -def _safe_patch(run_tree: Any, in_workflow: bool) -> None: - if in_workflow: - with temporalio.workflow.unsafe.sandbox_unrestricted(): - run_tree.patch() - else: - run_tree.patch() + def __init__(self, run_tree: Any) -> None: + self._run = run_tree + + def to_headers(self) -> dict[str, str]: + return self._run.to_headers() + + @property + def ls_client(self) -> Any: + return self._run.ls_client + + @ls_client.setter + def ls_client(self, value: Any) -> None: + self._run.ls_client = value + + def post(self) -> None: + if _is_replaying(): + return + if temporalio.workflow.in_workflow(): + with temporalio.workflow.unsafe.sandbox_unrestricted(): + self._run.post() + else: + self._run.post() + + def end(self, **kwargs: Any) -> None: + if _is_replaying(): + return + self._run.end(**kwargs) + + def patch(self) -> None: + if _is_replaying(): + return + if temporalio.workflow.in_workflow(): + with temporalio.workflow.unsafe.sandbox_unrestricted(): + self._run.patch() + else: + self._run.patch() # --------------------------------------------------------------------------- @@ -160,30 +190,26 @@ def _maybe_run( tags: list[str] | None = None, parent: Any | None = None, project_name: str | None = None, - in_workflow: bool = False, ) -> Iterator[Any | None]: - """Create a LangSmith run, handling errors and replay. + """Create a LangSmith run, handling errors. - - If replaying, yields None (skip tracing entirely). - If add_temporal_runs is False, yields None (no run created). Context propagation is handled unconditionally by callers. - - When a run IS created, sets it as ambient context via - ``tracing_context(parent=run_tree)`` so ``get_current_run_tree()`` + - When a run IS created, wraps it in :class:`ReplaySafeRunTree` for + replay and sandbox safety, then sets it as ambient context via + ``tracing_context(parent=raw_run)`` so ``get_current_run_tree()`` returns it and ``_inject_current_context()`` can inject it. - On exception: marks run as errored (unless benign ApplicationError), re-raises. - - If in_workflow is True, wraps post()/patch() in sandbox_unrestricted(). """ - if _is_replaying(): - yield None - return - if not add_temporal_runs: yield None return # If no explicit parent, inherit from ambient @traceable context if parent is None: - parent = get_current_run_tree() + raw = get_current_run_tree() + if raw is not None: + parent = ReplaySafeRunTree(raw) kwargs: dict[str, Any] = dict( name=name, @@ -194,24 +220,25 @@ def _maybe_run( if project_name is not None: kwargs["project_name"] = project_name if parent is not None: - kwargs["parent_run"] = parent + kwargs["parent_run"] = parent._run if metadata: kwargs["extra"] = {"metadata": metadata} if tags: kwargs["tags"] = tags - run_tree = RunTree(**kwargs) - _safe_post(run_tree, in_workflow) + raw_run = RunTree(**kwargs) + run_tree = ReplaySafeRunTree(raw_run) + run_tree.post() try: - with tracing_context(parent=run_tree, client=client): + with tracing_context(parent=raw_run, client=client): yield run_tree except Exception as exc: if not _is_benign_error(exc): run_tree.end(error=f"{type(exc).__name__}: {exc}") - _safe_patch(run_tree, in_workflow) + run_tree.patch() raise else: run_tree.end(outputs={"status": "ok"}) - _safe_patch(run_tree, in_workflow) + run_tree.patch() # --------------------------------------------------------------------------- @@ -246,6 +273,29 @@ def __init__( self._default_metadata = default_metadata or {} self._default_tags = default_tags or [] + @contextmanager + def maybe_run( + self, + name: str, + *, + run_type: str = "chain", + parent: Any | None = None, + extra_metadata: dict[str, Any] | None = None, + ) -> Iterator[Any | None]: + """Create a LangSmith run with this interceptor's config already applied.""" + metadata = {**self._default_metadata, **(extra_metadata or {})} + with _maybe_run( + self._client, + name, + add_temporal_runs=self._add_temporal_runs, + run_type=run_type, + metadata=metadata, + tags=list(self._default_tags), + parent=parent, + project_name=self._project_name, + ) as run: + yield run + def intercept_client( self, next: temporalio.client.OutboundInterceptor ) -> temporalio.client.OutboundInterceptor: @@ -288,65 +338,35 @@ def __init__( super().__init__(next) self._config = config + @contextmanager + def _traced_call(self, name: str, input: Any) -> Iterator[None]: + """Wrap a client call with a LangSmith run and inject context into headers.""" + with self._config.maybe_run(name): + input.headers = _inject_current_context(input.headers) + yield + async def start_workflow(self, input: Any) -> Any: prefix = ( "SignalWithStartWorkflow" if input.start_signal else "StartWorkflow" ) - with _maybe_run( - self._config._client, - f"{prefix}:{input.workflow}", - add_temporal_runs=self._config._add_temporal_runs, - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), - project_name=self._config._project_name, - ): - input.headers = _inject_current_context(input.headers) + with self._traced_call(f"{prefix}:{input.workflow}", input): return await super().start_workflow(input) async def query_workflow(self, input: Any) -> Any: - with _maybe_run( - self._config._client, - f"QueryWorkflow:{input.query}", - add_temporal_runs=self._config._add_temporal_runs, - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), - project_name=self._config._project_name, - ): - input.headers = _inject_current_context(input.headers) + with self._traced_call(f"QueryWorkflow:{input.query}", input): return await super().query_workflow(input) async def signal_workflow(self, input: Any) -> None: - with _maybe_run( - self._config._client, - f"SignalWorkflow:{input.signal}", - add_temporal_runs=self._config._add_temporal_runs, - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), - project_name=self._config._project_name, - ): - input.headers = _inject_current_context(input.headers) + with self._traced_call(f"SignalWorkflow:{input.signal}", input): return await super().signal_workflow(input) async def start_workflow_update(self, input: Any) -> Any: - with _maybe_run( - self._config._client, - f"StartWorkflowUpdate:{input.update}", - add_temporal_runs=self._config._add_temporal_runs, - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), - project_name=self._config._project_name, - ): - input.headers = _inject_current_context(input.headers) + with self._traced_call(f"StartWorkflowUpdate:{input.update}", input): return await super().start_workflow_update(input) async def start_update_with_start_workflow(self, input: Any) -> Any: - with _maybe_run( - self._config._client, + with self._config.maybe_run( f"StartUpdateWithStartWorkflow:{input.start_workflow_input.workflow}", - add_temporal_runs=self._config._add_temporal_runs, - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), - project_name=self._config._project_name, ): input.start_workflow_input.headers = _inject_current_context( input.start_workflow_input.headers @@ -378,15 +398,14 @@ def __init__( async def execute_activity(self, input: Any) -> Any: parent = _extract_context(input.headers) info = temporalio.activity.info() - metadata = { - **self._config._default_metadata, + extra_metadata = { "temporalWorkflowID": info.workflow_id or "", "temporalRunID": info.workflow_run_id or "", "temporalActivityID": info.activity_id, } # Unconditionally set tracing context so @traceable functions inside # activities can use the plugin's LangSmith client and inherit parent. - # When add_temporal_runs=True: _maybe_run overrides with the RunActivity run. + # When add_temporal_runs=True: maybe_run overrides with the RunActivity run. # When add_temporal_runs=False: parent (if any) remains active for @traceable, # and the client is available even without a parent. # Override the parent's ls_client so @traceable children (via create_child) @@ -400,15 +419,11 @@ async def execute_activity(self, input: Any) -> Any: if parent: ctx_kwargs["parent"] = parent with tracing_context(**ctx_kwargs): - with _maybe_run( - self._config._client, + with self._config.maybe_run( f"RunActivity:{info.activity_type}", - add_temporal_runs=self._config._add_temporal_runs, run_type="tool", - metadata=metadata, - tags=list(self._config._default_tags), parent=parent, - project_name=self._config._project_name, + extra_metadata=extra_metadata, ): return await super().execute_activity(input) @@ -433,28 +448,22 @@ def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: @contextmanager def _workflow_maybe_run( - self, name: str, parent: Any | None = None + self, name: str, headers: Mapping[str, Payload] | None = None ) -> Iterator[Any | None]: """Workflow-specific run creation with metadata. - Stores the run (or parent fallback) as ``_current_run`` so the outbound - interceptor can propagate context even when ``add_temporal_runs=False``. + Extracts parent from headers (if provided) and stores the run (or parent + fallback) as ``_current_run`` so the outbound interceptor can propagate + context even when ``add_temporal_runs=False``. """ + parent = _extract_context(headers) if headers else None info = temporalio.workflow.info() - metadata = { - **self._config._default_metadata, + extra_metadata = { "temporalWorkflowID": info.workflow_id, "temporalRunID": info.run_id, } - with _maybe_run( - self._config._client, - name, - add_temporal_runs=self._config._add_temporal_runs, - metadata=metadata, - tags=list(self._config._default_tags), - parent=parent, - project_name=self._config._project_name, - in_workflow=True, + with self._config.maybe_run( + name, parent=parent, extra_metadata=extra_metadata ) as run: self._current_run = run or parent try: @@ -463,39 +472,25 @@ def _workflow_maybe_run( self._current_run = None async def execute_workflow(self, input: Any) -> Any: - parent = _extract_context(input.headers) with self._workflow_maybe_run( - f"RunWorkflow:{temporalio.workflow.info().workflow_type}", - parent=parent, + f"RunWorkflow:{temporalio.workflow.info().workflow_type}", input.headers ): return await super().execute_workflow(input) async def handle_signal(self, input: Any) -> None: - parent = _extract_context(input.headers) - with self._workflow_maybe_run( - f"HandleSignal:{input.signal}", parent=parent - ): + with self._workflow_maybe_run(f"HandleSignal:{input.signal}", input.headers): return await super().handle_signal(input) async def handle_query(self, input: Any) -> Any: - parent = _extract_context(input.headers) - with self._workflow_maybe_run( - f"HandleQuery:{input.query}", parent=parent - ): + with self._workflow_maybe_run(f"HandleQuery:{input.query}", input.headers): return await super().handle_query(input) def handle_update_validator(self, input: Any) -> None: - parent = _extract_context(input.headers) - with self._workflow_maybe_run( - f"ValidateUpdate:{input.update}", parent=parent - ): + with self._workflow_maybe_run(f"ValidateUpdate:{input.update}", input.headers): return super().handle_update_validator(input) async def handle_update_handler(self, input: Any) -> Any: - parent = _extract_context(input.headers) - with self._workflow_maybe_run( - f"HandleUpdate:{input.update}", parent=parent - ): + with self._workflow_maybe_run(f"HandleUpdate:{input.update}", input.headers): return await super().handle_update_handler(input) @@ -520,59 +515,34 @@ def __init__( self._inbound = inbound @contextmanager - def _workflow_maybe_run(self, name: str) -> Iterator[Any | None]: - """Outbound workflow run creation, parented under inbound's current run.""" - with _maybe_run( - self._config._client, - name, - add_temporal_runs=self._config._add_temporal_runs, - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), - parent=self._inbound._current_run, - project_name=self._config._project_name, - in_workflow=True, + def _traced_outbound(self, name: str, input: Any) -> Iterator[Any | None]: + """Outbound workflow run creation with context injection into input.headers.""" + with self._config.maybe_run( + name, parent=self._inbound._current_run ) as run: - yield run - - def start_activity(self, input: Any) -> Any: - with self._workflow_maybe_run(f"StartActivity:{input.activity}") as run: context_source = run or self._inbound._current_run if context_source: input.headers = _inject_context(input.headers, context_source) + yield run + + def start_activity(self, input: Any) -> Any: + with self._traced_outbound(f"StartActivity:{input.activity}", input): return super().start_activity(input) def start_local_activity(self, input: Any) -> Any: - with self._workflow_maybe_run(f"StartActivity:{input.activity}") as run: - context_source = run or self._inbound._current_run - if context_source: - input.headers = _inject_context(input.headers, context_source) + with self._traced_outbound(f"StartActivity:{input.activity}", input): return super().start_local_activity(input) async def start_child_workflow(self, input: Any) -> Any: - with self._workflow_maybe_run( - f"StartChildWorkflow:{input.workflow}" - ) as run: - context_source = run or self._inbound._current_run - if context_source: - input.headers = _inject_context(input.headers, context_source) + with self._traced_outbound(f"StartChildWorkflow:{input.workflow}", input): return await super().start_child_workflow(input) async def signal_child_workflow(self, input: Any) -> None: - with self._workflow_maybe_run( - f"SignalChildWorkflow:{input.signal}" - ) as run: - context_source = run or self._inbound._current_run - if context_source: - input.headers = _inject_context(input.headers, context_source) + with self._traced_outbound(f"SignalChildWorkflow:{input.signal}", input): return await super().signal_child_workflow(input) async def signal_external_workflow(self, input: Any) -> None: - with self._workflow_maybe_run( - f"SignalExternalWorkflow:{input.signal}" - ) as run: - context_source = run or self._inbound._current_run - if context_source: - input.headers = _inject_context(input.headers, context_source) + with self._traced_outbound(f"SignalExternalWorkflow:{input.signal}", input): return await super().signal_external_workflow(input) def continue_as_new(self, input: Any) -> NoReturn: @@ -583,8 +553,9 @@ def continue_as_new(self, input: Any) -> NoReturn: super().continue_as_new(input) async def start_nexus_operation(self, input: Any) -> Any: - with self._workflow_maybe_run( - f"StartNexusOperation:{input.service}/{input.operation_name}" + with self._config.maybe_run( + f"StartNexusOperation:{input.service}/{input.operation_name}", + parent=self._inbound._current_run, ) as run: context_source = run or self._inbound._current_run if context_source: @@ -614,28 +585,18 @@ def __init__( async def execute_nexus_operation_start(self, input: Any) -> Any: parent = _extract_nexus_context(input.ctx.headers) - with _maybe_run( - self._config._client, + with self._config.maybe_run( f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", - add_temporal_runs=self._config._add_temporal_runs, run_type="tool", - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), parent=parent, - project_name=self._config._project_name, ): return await self.next.execute_nexus_operation_start(input) async def execute_nexus_operation_cancel(self, input: Any) -> Any: parent = _extract_nexus_context(input.ctx.headers) - with _maybe_run( - self._config._client, + with self._config.maybe_run( f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", - add_temporal_runs=self._config._add_temporal_runs, run_type="tool", - metadata={**self._config._default_metadata}, - tags=list(self._config._default_tags), parent=parent, - project_name=self._config._project_name, ): return await self.next.execute_nexus_operation_cancel(input) diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index 2ff8e3eb1..b22e122a6 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -12,6 +12,7 @@ from temporalio.contrib.langsmith import LangSmithInterceptor from temporalio.contrib.langsmith._interceptor import ( HEADER_KEY, + ReplaySafeRunTree, _extract_context, _inject_context, _maybe_run, @@ -107,8 +108,9 @@ def test_inject_extract_roundtrip(self, MockRunTree: Any) -> None: MockRunTree.from_headers.return_value = mock_extracted extracted = _extract_context(result) - # extracted should be reconstructed from headers - assert extracted is mock_extracted + # extracted should be a ReplaySafeRunTree wrapping the reconstructed run + assert isinstance(extracted, ReplaySafeRunTree) + assert extracted._run is mock_extracted MockRunTree.from_headers.assert_called_once() def test_extract_missing_header(self) -> None: @@ -142,20 +144,26 @@ class TestReplaySafety: @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=True) @patch(_PATCH_IN_WORKFLOW, return_value=True) - def test_skip_trace_during_replay( + def test_replay_noop_post_end_patch( self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any ) -> None: - """During replay, _maybe_run yields None — no RunTree created.""" + """During replay, RunTree is created but post/end/patch are no-ops.""" + mock_run = _make_mock_run() + MockRunTree.return_value = mock_run mock_client = MagicMock() with _maybe_run( mock_client, "TestRun", add_temporal_runs=True, - in_workflow=True, ) as run: - assert run is None - # RunTree should never be instantiated during replay - MockRunTree.assert_not_called() + assert isinstance(run, ReplaySafeRunTree) + assert run._run is mock_run + # RunTree IS created (wrapped in ReplaySafeRunTree) + MockRunTree.assert_called_once() + # But post/end/patch are no-ops during replay + mock_run.post.assert_not_called() + mock_run.end.assert_not_called() + mock_run.patch.assert_not_called() @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @@ -163,7 +171,7 @@ def test_skip_trace_during_replay( def test_create_trace_when_not_replaying( self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any ) -> None: - """When not replaying (but in workflow), _maybe_run creates a RunTree.""" + """When not replaying (but in workflow), _maybe_run creates a ReplaySafeRunTree.""" mock_run = _make_mock_run() MockRunTree.return_value = mock_run mock_client = MagicMock() @@ -171,9 +179,9 @@ def test_create_trace_when_not_replaying( mock_client, "TestRun", add_temporal_runs=True, - in_workflow=True, ) as run: - assert run is mock_run + assert isinstance(run, ReplaySafeRunTree) + assert run._run is mock_run MockRunTree.assert_called_once() assert MockRunTree.call_args.kwargs["name"] == "TestRun" @@ -182,7 +190,7 @@ def test_create_trace_when_not_replaying( def test_create_trace_outside_workflow( self, mock_in_wf: Any, MockRunTree: Any ) -> None: - """Outside workflow (client/activity), RunTree IS created. No is_replaying check.""" + """Outside workflow (client/activity), RunTree IS created.""" mock_run = _make_mock_run() MockRunTree.return_value = mock_run mock_client = MagicMock() @@ -190,9 +198,9 @@ def test_create_trace_outside_workflow( mock_client, "TestRun", add_temporal_runs=True, - in_workflow=False, ) as run: - assert run is mock_run + assert isinstance(run, ReplaySafeRunTree) + assert run._run is mock_run MockRunTree.assert_called_once() @@ -219,7 +227,7 @@ def test_exception_marks_run_errored( "TestRun", add_temporal_runs=True, ) as run: - assert run is mock_run + assert run._run is mock_run raise RuntimeError("boom") # run.end should have been called with error containing "boom" mock_run.end.assert_called() @@ -244,7 +252,7 @@ def test_benign_application_error_not_marked( "TestRun", add_temporal_runs=True, ) as run: - assert run is mock_run + assert run._run is mock_run raise ApplicationError( "benign", category=ApplicationErrorCategory.BENIGN, @@ -271,7 +279,7 @@ def test_non_benign_application_error_marked( "TestRun", add_temporal_runs=True, ) as run: - assert run is mock_run + assert run._run is mock_run raise ApplicationError("bad", non_retryable=True) mock_run.end.assert_called() end_kwargs = mock_run.end.call_args.kwargs @@ -292,7 +300,7 @@ def test_success_completes_normally( "TestRun", add_temporal_runs=True, ) as run: - assert run is mock_run + assert run._run is mock_run mock_run.end.assert_called_once() end_kwargs = mock_run.end.call_args.kwargs assert end_kwargs.get("outputs") == {"status": "ok"} @@ -316,7 +324,7 @@ def test_cancelled_error_propagates_without_marking_run( "TestRun", add_temporal_runs=True, ) as run: - assert run is mock_run + assert run._run is mock_run raise asyncio.CancelledError() # run.end should NOT have been called with error= end_calls = mock_run.end.call_args_list From 5d25abc784b537664b0aa9e2c69d2ac6f5619549 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Mon, 16 Mar 2026 21:02:33 -0400 Subject: [PATCH 03/30] Fix import sorting and extract _get_current_run_safe helper Fix ruff I001 import sorting violations in _interceptor.py and test_integration.py. Extract _get_current_run_safe() helper for reading ambient LangSmith context with replay safety. Co-Authored-By: Claude Opus 4.6 --- temporalio/contrib/langsmith/_interceptor.py | 17 +++++++++++------ tests/contrib/langsmith/test_integration.py | 2 -- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 1f983c1cd..7e5e4e4bc 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -6,14 +6,15 @@ from contextlib import contextmanager from typing import Any, Iterator, Mapping, NoReturn +from langsmith import tracing_context +from langsmith.run_helpers import get_current_run_tree +from langsmith.run_trees import RunTree + import temporalio.activity import temporalio.client import temporalio.converter import temporalio.worker import temporalio.workflow -from langsmith import tracing_context -from langsmith.run_helpers import get_current_run_tree -from langsmith.run_trees import RunTree from temporalio.api.common.v1 import Payload from temporalio.exceptions import ApplicationError, ApplicationErrorCategory @@ -47,6 +48,12 @@ def _inject_context( } +def _get_current_run_safe() -> ReplaySafeRunTree | None: + """Get the current ambient LangSmith run tree, wrapped for replay safety.""" + raw = get_current_run_tree() + return ReplaySafeRunTree(raw) if raw is not None else None + + def _inject_current_context( headers: Mapping[str, Payload], ) -> Mapping[str, Payload]: @@ -207,9 +214,7 @@ def _maybe_run( # If no explicit parent, inherit from ambient @traceable context if parent is None: - raw = get_current_run_tree() - if raw is not None: - parent = ReplaySafeRunTree(raw) + parent = _get_current_run_safe() kwargs: dict[str, Any] = dict( name=name, diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 133f4377d..bff97dbb7 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -15,11 +15,9 @@ from temporalio.contrib.langsmith import LangSmithPlugin from temporalio.exceptions import ApplicationError from temporalio.testing import WorkflowEnvironment - from tests.contrib.langsmith.conftest import InMemoryRunCollector, dump_runs from tests.helpers import new_worker - # --------------------------------------------------------------------------- # Shared @traceable functions and activities # --------------------------------------------------------------------------- From 601d67a5f2ba735c4550e98cf3b5d6ac74d3a2d4 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Mon, 16 Mar 2026 21:43:40 -0400 Subject: [PATCH 04/30] Add Nexus integration test coverage Co-Authored-By: Claude Opus 4.6 --- tests/contrib/langsmith/test_integration.py | 59 ++++++++++++++++++++- tests/contrib/langsmith/test_plugin.py | 16 ++++++ 2 files changed, 73 insertions(+), 2 deletions(-) diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index bff97dbb7..358957722 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -7,16 +7,18 @@ from typing import Any from unittest.mock import MagicMock +import nexusrpc.handler import pytest from langsmith import traceable, tracing_context -from temporalio import activity, common, workflow +from temporalio import activity, common, nexus, workflow from temporalio.client import Client, WorkflowFailureError from temporalio.contrib.langsmith import LangSmithPlugin from temporalio.exceptions import ApplicationError from temporalio.testing import WorkflowEnvironment from tests.contrib.langsmith.conftest import InMemoryRunCollector, dump_runs from tests.helpers import new_worker +from tests.helpers.nexus import make_nexus_endpoint_name # --------------------------------------------------------------------------- # Shared @traceable functions and activities @@ -64,6 +66,29 @@ async def run(self) -> str: ) +@workflow.defn +class SimpleNexusWorkflow: + @workflow.run + async def run(self, input: str) -> str: + return await workflow.execute_activity( + traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + + +@nexusrpc.handler.service_handler +class NexusService: + @nexus.workflow_run_operation + async def run_operation( + self, ctx: nexus.WorkflowRunOperationContext, input: str + ) -> nexus.WorkflowHandle[str]: + return await ctx.start_workflow( + SimpleNexusWorkflow.run, + input, + id=f"nexus-wf-{ctx.request_id}", + ) + + # --------------------------------------------------------------------------- # Simple/basic workflows and activities # --------------------------------------------------------------------------- @@ -113,7 +138,17 @@ async def run(self) -> str: TraceableActivityWorkflow.run, id=f"child-{workflow.info().workflow_id}", ) - # 4. Wait for signal + # 4. Nexus operation + nexus_client = workflow.create_nexus_client( + endpoint=make_nexus_endpoint_name(workflow.info().task_queue), + service=NexusService, + ) + nexus_handle = await nexus_client.start_operation( + operation=NexusService.run_operation, + input="test-input", + ) + await nexus_handle + # 5. Wait for signal await workflow.wait_condition(lambda: self._signal_received) # 5. Wait for update to complete await workflow.wait_condition(lambda: self._complete) @@ -449,8 +484,14 @@ async def user_pipeline() -> str: temporal_client, ComprehensiveWorkflow, TraceableActivityWorkflow, + SimpleNexusWorkflow, activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], ) as worker: + await env.create_nexus_endpoint( + make_nexus_endpoint_name(worker.task_queue), + worker.task_queue, + ) handle = await temporal_client.start_workflow( ComprehensiveWorkflow.run, id=f"comprehensive-{uuid.uuid4()}", @@ -489,6 +530,13 @@ async def user_pipeline() -> str: " StartActivity:traceable_activity", " RunActivity:traceable_activity", " inner_llm_call", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", " QueryWorkflow:my_query", " HandleQuery:my_query", " SignalWorkflow:my_signal", @@ -517,8 +565,14 @@ async def user_pipeline() -> str: temporal_client, ComprehensiveWorkflow, TraceableActivityWorkflow, + SimpleNexusWorkflow, activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], ) as worker: + await env.create_nexus_endpoint( + make_nexus_endpoint_name(worker.task_queue), + worker.task_queue, + ) handle = await temporal_client.start_workflow( ComprehensiveWorkflow.run, id=f"comprehensive-no-runs-{uuid.uuid4()}", @@ -547,6 +601,7 @@ async def user_pipeline() -> str: " outer_chain", " inner_llm_call", " inner_llm_call", + " inner_llm_call", ] assert hierarchy == expected, ( f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index 90b3499c1..a474fcd78 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -13,12 +13,15 @@ from tests.contrib.langsmith.conftest import dump_runs from tests.contrib.langsmith.test_integration import ( ComprehensiveWorkflow, + NexusService, + SimpleNexusWorkflow, TraceableActivityWorkflow, _make_client_and_collector, nested_traceable_activity, traceable_activity, ) from tests.helpers import new_worker +from tests.helpers.nexus import make_nexus_endpoint_name class TestPluginConstruction: @@ -60,8 +63,14 @@ async def user_pipeline() -> str: temporal_client, ComprehensiveWorkflow, TraceableActivityWorkflow, + SimpleNexusWorkflow, activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], ) as worker: + await env.create_nexus_endpoint( + make_nexus_endpoint_name(worker.task_queue), + worker.task_queue, + ) handle = await temporal_client.start_workflow( ComprehensiveWorkflow.run, id=f"plugin-comprehensive-{uuid.uuid4()}", @@ -100,6 +109,13 @@ async def user_pipeline() -> str: " StartActivity:traceable_activity", " RunActivity:traceable_activity", " inner_llm_call", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " inner_llm_call", " QueryWorkflow:my_query", " HandleQuery:my_query", " SignalWorkflow:my_signal", From cdb5886bbf99e922e82b768c128d9c14a00e5740 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Tue, 17 Mar 2026 11:09:45 -0400 Subject: [PATCH 05/30] Apply ruff formatting to all langsmith files Co-Authored-By: Claude Opus 4.6 --- temporalio/contrib/langsmith/_interceptor.py | 8 +- temporalio/contrib/langsmith/_plugin.py | 5 +- tests/contrib/langsmith/test_integration.py | 71 ++++++------- tests/contrib/langsmith/test_interceptor.py | 102 ++++++++++++++----- tests/contrib/langsmith/test_plugin.py | 11 +- 5 files changed, 117 insertions(+), 80 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 7e5e4e4bc..303bc52cc 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -351,9 +351,7 @@ def _traced_call(self, name: str, input: Any) -> Iterator[None]: yield async def start_workflow(self, input: Any) -> Any: - prefix = ( - "SignalWithStartWorkflow" if input.start_signal else "StartWorkflow" - ) + prefix = "SignalWithStartWorkflow" if input.start_signal else "StartWorkflow" with self._traced_call(f"{prefix}:{input.workflow}", input): return await super().start_workflow(input) @@ -522,9 +520,7 @@ def __init__( @contextmanager def _traced_outbound(self, name: str, input: Any) -> Iterator[Any | None]: """Outbound workflow run creation with context injection into input.headers.""" - with self._config.maybe_run( - name, parent=self._inbound._current_run - ) as run: + with self._config.maybe_run(name, parent=self._inbound._current_run) as run: context_source = run or self._inbound._current_run if context_source: input.headers = _inject_context(input.headers, context_source) diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py index dfac32ece..5527560d7 100644 --- a/temporalio/contrib/langsmith/_plugin.py +++ b/temporalio/contrib/langsmith/_plugin.py @@ -70,5 +70,8 @@ async def shutdown(self) -> None: if not self.interceptors: return interceptor = self.interceptors[0] - if isinstance(interceptor, LangSmithInterceptor) and interceptor._client is not None: + if ( + isinstance(interceptor, LangSmithInterceptor) + and interceptor._client is not None + ): interceptor._client.flush() diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 358957722..a730223f2 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -283,27 +283,26 @@ async def test_workflow_activity_trace_hierarchy( " StartActivity:simple_activity", " RunActivity:simple_activity", ] - assert hierarchy == expected, ( - f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" - ) + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" # Verify run_type: RunActivity is "tool", others are "chain" for run in collector.runs: if run.name == "RunActivity:simple_activity": - assert run.run_type == "tool", ( - f"Expected RunActivity run_type='tool', got '{run.run_type}'" - ) + assert ( + run.run_type == "tool" + ), f"Expected RunActivity run_type='tool', got '{run.run_type}'" else: - assert run.run_type == "chain", ( - f"Expected {run.name} run_type='chain', got '{run.run_type}'" - ) + assert ( + run.run_type == "chain" + ), f"Expected {run.name} run_type='chain', got '{run.run_type}'" # Verify successful runs have outputs == {"status": "ok"} for run in collector.runs: - assert run.outputs == {"status": "ok"}, ( - f"Expected {run.name} outputs={{'status': 'ok'}}, got {run.outputs}" - ) - + assert run.outputs == { + "status": "ok" + }, f"Expected {run.name} outputs={{'status': 'ok'}}, got {run.outputs}" # --------------------------------------------------------------------------- @@ -380,9 +379,9 @@ async def test_activity_failure_marked( " StartActivity:failing_activity", " RunActivity:failing_activity", ] - assert hierarchy == expected, ( - f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" - ) + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" # Verify the RunActivity run has an error activity_runs = [ r for r in collector.runs if r.name == "RunActivity:failing_activity" @@ -414,13 +413,11 @@ async def test_workflow_failure_marked( "StartWorkflow:FailingWorkflow", " RunWorkflow:FailingWorkflow", ] - assert hierarchy == expected, ( - f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" - ) + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" # Verify the RunWorkflow run has an error - wf_runs = [ - r for r in collector.runs if r.name == "RunWorkflow:FailingWorkflow" - ] + wf_runs = [r for r in collector.runs if r.name == "RunWorkflow:FailingWorkflow"] assert len(wf_runs) == 1 assert wf_runs[0].error == "ApplicationError: workflow-failed" @@ -451,14 +448,12 @@ async def test_benign_error_not_marked( " StartActivity:benign_failing_activity", " RunActivity:benign_failing_activity", ] - assert hierarchy == expected, ( - f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" - ) + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" # The RunActivity run for benign error should NOT have error set activity_runs = [ - r - for r in collector.runs - if r.name == "RunActivity:benign_failing_activity" + r for r in collector.runs if r.name == "RunActivity:benign_failing_activity" ] assert len(activity_runs) == 1 assert activity_runs[0].error is None @@ -502,9 +497,7 @@ async def user_pipeline() -> str: # Signal await handle.signal(ComprehensiveWorkflow.my_signal, "hello") # Update (completes the workflow) - await handle.execute_update( - ComprehensiveWorkflow.my_update, "finish" - ) + await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") return await handle.result() with tracing_context(client=mock_ls_client, enabled=True): @@ -545,9 +538,9 @@ async def user_pipeline() -> str: " ValidateUpdate:my_update", " HandleUpdate:my_update", ] - assert hierarchy == expected, ( - f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" - ) + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" async def test_comprehensive_without_temporal_runs( self, client: Client, env: WorkflowEnvironment @@ -583,9 +576,7 @@ async def user_pipeline() -> str: # Signal await handle.signal(ComprehensiveWorkflow.my_signal, "hello") # Update (completes the workflow) - await handle.execute_update( - ComprehensiveWorkflow.my_update, "finish" - ) + await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") return await handle.result() with tracing_context(client=mock_ls_client, enabled=True): @@ -603,6 +594,6 @@ async def user_pipeline() -> str: " inner_llm_call", " inner_llm_call", ] - assert hierarchy == expected, ( - f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" - ) + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index b22e122a6..f790423c4 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -344,7 +344,9 @@ def _make_client_interceptor( self, *, add_temporal_runs: bool = True ) -> tuple[Any, MagicMock]: """Create a client outbound interceptor with a mock next.""" - config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) mock_next = MagicMock() mock_next.start_workflow = AsyncMock() mock_next.query_workflow = AsyncMock() @@ -357,18 +359,34 @@ def _make_client_interceptor( @pytest.mark.parametrize( "method,input_attrs,expected_name", [ - ("start_workflow", {"workflow": "MyWorkflow", "start_signal": None}, "StartWorkflow:MyWorkflow"), - ("start_workflow", {"workflow": "MyWorkflow", "start_signal": "my_signal"}, "SignalWithStartWorkflow:MyWorkflow"), + ( + "start_workflow", + {"workflow": "MyWorkflow", "start_signal": None}, + "StartWorkflow:MyWorkflow", + ), + ( + "start_workflow", + {"workflow": "MyWorkflow", "start_signal": "my_signal"}, + "SignalWithStartWorkflow:MyWorkflow", + ), ("query_workflow", {"query": "get_status"}, "QueryWorkflow:get_status"), ("signal_workflow", {"signal": "my_signal"}, "SignalWorkflow:my_signal"), - ("start_workflow_update", {"update": "my_update"}, "StartWorkflowUpdate:my_update"), + ( + "start_workflow_update", + {"update": "my_update"}, + "StartWorkflowUpdate:my_update", + ), ], ids=["start_workflow", "signal_with_start", "query", "signal", "update"], ) @pytest.mark.asyncio @patch(_PATCH_RUNTREE) async def test_creates_trace_and_injects_headers( - self, MockRunTree: Any, method: str, input_attrs: dict[str, Any], expected_name: str + self, + MockRunTree: Any, + method: str, + input_attrs: dict[str, Any], + expected_name: str, ) -> None: """Each client method creates the correct trace and injects headers.""" mock_run = _make_mock_run() @@ -404,8 +422,7 @@ async def test_start_update_with_start_workflow(self, MockRunTree: Any) -> None: await interceptor.start_update_with_start_workflow(mock_input) assert ( - _get_runtree_name(MockRunTree) - == "StartUpdateWithStartWorkflow:MyWorkflow" + _get_runtree_name(MockRunTree) == "StartUpdateWithStartWorkflow:MyWorkflow" ) assert HEADER_KEY in mock_input.start_workflow_input.headers assert HEADER_KEY in mock_input.update_workflow_input.headers @@ -423,9 +440,7 @@ async def test_add_temporal_runs_false_skips_trace( _inject_current_context() is called unconditionally, but get_current_run_tree() returns None so headers are unchanged. """ - interceptor, mock_next = self._make_client_interceptor( - add_temporal_runs=False - ) + interceptor, mock_next = self._make_client_interceptor(add_temporal_runs=False) mock_input = MagicMock() mock_input.workflow = "MyWorkflow" mock_input.start_signal = None @@ -455,9 +470,7 @@ async def test_add_temporal_runs_false_with_ambient_context( gets propagated through Temporal headers. """ mock_ambient_run = _make_mock_run() - interceptor, mock_next = self._make_client_interceptor( - add_temporal_runs=False - ) + interceptor, mock_next = self._make_client_interceptor(add_temporal_runs=False) mock_input = MagicMock() mock_input.workflow = "MyWorkflow" mock_input.start_signal = None @@ -484,7 +497,9 @@ class TestActivityInboundInterceptor: def _make_activity_interceptor( self, *, add_temporal_runs: bool = True ) -> tuple[Any, MagicMock]: - config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) mock_next = MagicMock() mock_next.execute_activity = AsyncMock(return_value="activity_result") interceptor = config.intercept_activity(mock_next) @@ -564,7 +579,9 @@ def _make_workflow_interceptors( self, *, add_temporal_runs: bool = True ) -> tuple[Any, MagicMock]: """Create workflow inbound interceptor and a mock next.""" - config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) mock_next = MagicMock() mock_next.execute_workflow = AsyncMock(return_value="wf_result") mock_next.handle_signal = AsyncMock() @@ -630,7 +647,12 @@ async def test_execute_workflow( [ ("handle_signal", "signal", "my_signal", "HandleSignal:my_signal"), ("handle_query", "query", "get_status", "HandleQuery:get_status"), - ("handle_update_validator", "update", "my_update", "ValidateUpdate:my_update"), + ( + "handle_update_validator", + "update", + "my_update", + "ValidateUpdate:my_update", + ), ("handle_update_handler", "update", "my_update", "HandleUpdate:my_update"), ], ids=["signal", "query", "validator", "update_handler"], @@ -687,7 +709,9 @@ def _make_outbound_interceptor( Returns (outbound_interceptor, mock_next, inbound_interceptor). """ - config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) # Create mock next for inbound mock_inbound_next = MagicMock() @@ -734,12 +758,38 @@ def _make_outbound_interceptor( "method,input_attr,input_val,expected_name", [ ("start_activity", "activity", "do_thing", "StartActivity:do_thing"), - ("start_local_activity", "activity", "local_thing", "StartActivity:local_thing"), - ("start_child_workflow", "workflow", "ChildWorkflow", "StartChildWorkflow:ChildWorkflow"), - ("signal_child_workflow", "signal", "child_signal", "SignalChildWorkflow:child_signal"), - ("signal_external_workflow", "signal", "ext_signal", "SignalExternalWorkflow:ext_signal"), + ( + "start_local_activity", + "activity", + "local_thing", + "StartActivity:local_thing", + ), + ( + "start_child_workflow", + "workflow", + "ChildWorkflow", + "StartChildWorkflow:ChildWorkflow", + ), + ( + "signal_child_workflow", + "signal", + "child_signal", + "SignalChildWorkflow:child_signal", + ), + ( + "signal_external_workflow", + "signal", + "ext_signal", + "SignalExternalWorkflow:ext_signal", + ), + ], + ids=[ + "activity", + "local_activity", + "child_workflow", + "signal_child", + "signal_external", ], - ids=["activity", "local_activity", "child_workflow", "signal_child", "signal_external"], ) @pytest.mark.asyncio @patch(_PATCH_SANDBOX) @@ -837,7 +887,9 @@ class TestNexusInboundInterceptor: def _make_nexus_interceptor( self, *, add_temporal_runs: bool = True ) -> tuple[Any, MagicMock]: - config = LangSmithInterceptor(client=MagicMock(), add_temporal_runs=add_temporal_runs) + config = LangSmithInterceptor( + client=MagicMock(), add_temporal_runs=add_temporal_runs + ) mock_next = MagicMock() mock_next.execute_nexus_operation_start = AsyncMock() mock_next.execute_nexus_operation_cancel = AsyncMock() @@ -1044,9 +1096,7 @@ async def test_false_still_propagates_context( mock_act_input = MagicMock() mock_extracted_parent = _make_mock_run() - with patch( - f"{_MOD}._extract_context", return_value=mock_extracted_parent - ): + with patch(f"{_MOD}._extract_context", return_value=mock_extracted_parent): await act_interceptor.execute_activity(mock_act_input) # No RunTree should be created (add_temporal_runs=False) diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index a474fcd78..c814965bf 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -47,7 +47,6 @@ def test_construction_stores_all_config(self) -> None: assert interceptor._default_tags == ["v1"] - class TestPluginIntegration: """End-to-end test using LangSmithPlugin as a Temporal client plugin.""" @@ -81,9 +80,7 @@ async def user_pipeline() -> str: # Signal await handle.signal(ComprehensiveWorkflow.my_signal, "hello") # Update (completes the workflow) - await handle.execute_update( - ComprehensiveWorkflow.my_update, "finish" - ) + await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") return await handle.result() with tracing_context(client=mock_ls_client, enabled=True): @@ -124,6 +121,6 @@ async def user_pipeline() -> str: " ValidateUpdate:my_update", " HandleUpdate:my_update", ] - assert hierarchy == expected, ( - f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" - ) + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" From 941637f446ff1aaead750801d967d46989492768 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Tue, 17 Mar 2026 11:21:48 -0400 Subject: [PATCH 06/30] Fix pydocstyle, pyright, and mypy lint errors Co-Authored-By: Claude Opus 4.6 --- temporalio/contrib/langsmith/_interceptor.py | 12 ++++++++++++ tests/contrib/langsmith/test_interceptor.py | 5 +++++ tests/contrib/langsmith/test_plugin.py | 1 + 3 files changed, 18 insertions(+) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 303bc52cc..aa79fabd9 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -135,20 +135,25 @@ class ReplaySafeRunTree: """ def __init__(self, run_tree: Any) -> None: + """Initialize with the underlying RunTree to wrap.""" self._run = run_tree def to_headers(self) -> dict[str, str]: + """Delegate header serialization to the underlying RunTree.""" return self._run.to_headers() @property def ls_client(self) -> Any: + """Get the LangSmith client from the underlying RunTree.""" return self._run.ls_client @ls_client.setter def ls_client(self, value: Any) -> None: + """Set the LangSmith client on the underlying RunTree.""" self._run.ls_client = value def post(self) -> None: + """Post the run to LangSmith, skipping during replay.""" if _is_replaying(): return if temporalio.workflow.in_workflow(): @@ -158,11 +163,13 @@ def post(self) -> None: self._run.post() def end(self, **kwargs: Any) -> None: + """End the run, skipping during replay.""" if _is_replaying(): return self._run.end(**kwargs) def patch(self) -> None: + """Patch the run to LangSmith, skipping during replay.""" if _is_replaying(): return if temporalio.workflow.in_workflow(): @@ -267,6 +274,7 @@ def __init__( default_metadata: dict[str, Any] | None = None, default_tags: list[str] | None = None, ) -> None: + """Initialize the LangSmith interceptor with tracing configuration.""" # Import langsmith.Client lazily to avoid hard dependency at import time if client is None: import langsmith @@ -304,16 +312,19 @@ def maybe_run( def intercept_client( self, next: temporalio.client.OutboundInterceptor ) -> temporalio.client.OutboundInterceptor: + """Create a client outbound interceptor for LangSmith tracing.""" return _LangSmithClientOutboundInterceptor(next, self) def intercept_activity( self, next: temporalio.worker.ActivityInboundInterceptor ) -> temporalio.worker.ActivityInboundInterceptor: + """Create an activity inbound interceptor for LangSmith tracing.""" return _LangSmithActivityInboundInterceptor(next, self) def workflow_interceptor_class( self, input: temporalio.worker.WorkflowInterceptorClassInput ) -> type[_LangSmithWorkflowInboundInterceptor]: + """Return the workflow interceptor class with config bound.""" config = self class InterceptorWithConfig(_LangSmithWorkflowInboundInterceptor): @@ -324,6 +335,7 @@ class InterceptorWithConfig(_LangSmithWorkflowInboundInterceptor): def intercept_nexus_operation( self, next: temporalio.worker.NexusOperationInboundInterceptor ) -> temporalio.worker.NexusOperationInboundInterceptor: + """Create a Nexus operation inbound interceptor for LangSmith tracing.""" return _LangSmithNexusOperationInboundInterceptor(next, self) diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index f790423c4..b152ca13b 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -227,6 +227,7 @@ def test_exception_marks_run_errored( "TestRun", add_temporal_runs=True, ) as run: + assert run is not None assert run._run is mock_run raise RuntimeError("boom") # run.end should have been called with error containing "boom" @@ -252,6 +253,7 @@ def test_benign_application_error_not_marked( "TestRun", add_temporal_runs=True, ) as run: + assert run is not None assert run._run is mock_run raise ApplicationError( "benign", @@ -279,6 +281,7 @@ def test_non_benign_application_error_marked( "TestRun", add_temporal_runs=True, ) as run: + assert run is not None assert run._run is mock_run raise ApplicationError("bad", non_retryable=True) mock_run.end.assert_called() @@ -300,6 +303,7 @@ def test_success_completes_normally( "TestRun", add_temporal_runs=True, ) as run: + assert run is not None assert run._run is mock_run mock_run.end.assert_called_once() end_kwargs = mock_run.end.call_args.kwargs @@ -324,6 +328,7 @@ def test_cancelled_error_propagates_without_marking_run( "TestRun", add_temporal_runs=True, ) as run: + assert run is not None assert run._run is mock_run raise asyncio.CancelledError() # run.end should NOT have been called with error= diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index c814965bf..502e8d5f0 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -37,6 +37,7 @@ def test_construction_stores_all_config(self) -> None: metadata={"env": "prod"}, tags=["v1"], ) + assert plugin.interceptors is not None assert len(plugin.interceptors) > 0 interceptor = plugin.interceptors[0] assert isinstance(interceptor, LangSmithInterceptor) From 2fa95713e38ba8927fb0e0de5de908b0cfef45b0 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Tue, 17 Mar 2026 11:38:39 -0400 Subject: [PATCH 07/30] Fix basedpyright errors and add CLAUDE.md with CI lint docs Co-Authored-By: Claude Opus 4.6 --- CLAUDE.md | 51 ++++++++++++++++++++ temporalio/contrib/langsmith/_interceptor.py | 5 +- 2 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000..591acc3cb --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,51 @@ +# CLAUDE.md — Temporal Python SDK + +## CI Pipeline + +CI is defined in `.github/workflows/ci.yml`. The main jobs are: + +### `build-lint-test` (matrix: Python 3.10/3.14 x multiple OS) +1. `poe build-develop` — builds the Rust bridge via maturin +2. `poe lint` — runs ALL of the following (defined in `pyproject.toml [tool.poe.tasks]`): + - `uv run ruff check --select I` — import sorting + - `uv run ruff format --check` — code formatting + - `uv run pyright` — type checking (whole repo) + - `uv run mypy --namespace-packages --check-untyped-defs .` — type checking (whole repo) + - `uv run basedpyright` — stricter type checking (whole repo, catches more than pyright) + - `uv run pydocstyle --ignore-decorators=overload` — docstring style +3. `poe test` — runs `uv run pytest` +4. Time-skipping tests (non-ARM only) + +### `test-latest-deps` (ubuntu, Python 3.13, upgraded deps) +Same as above but with `uv lock --upgrade` first. + +### `features-tests` +Runs the `temporalio/features` repo tests against this branch. + +## Before Pushing + +Always run the full lint suite locally before pushing: +``` +uv run ruff check --select I +uv run ruff format --check +uv run pyright +uv run mypy --namespace-packages --check-untyped-defs . +uv run basedpyright +uv run pydocstyle --ignore-decorators=overload +``` + +Or equivalently: `poe lint` (requires `poe build-develop` first). + +To auto-fix formatting: `poe format` (runs `ruff check --select I --fix` + `ruff format`). + +## Dev Commands + +All commands use `uv run` prefix. Key poe tasks: +- `poe build-develop` — build Rust bridge (required before lint/test) +- `poe format` — auto-fix formatting +- `poe lint` — run all linters +- `poe test` — run pytest + +## Branch Naming + +Temporal convention: prepend `maplexu/` to branch names. diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index aa79fabd9..205a41011 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -4,7 +4,7 @@ import json from contextlib import contextmanager -from typing import Any, Iterator, Mapping, NoReturn +from typing import Any, ClassVar, Iterator, Mapping, NoReturn from langsmith import tracing_context from langsmith.run_helpers import get_current_run_tree @@ -275,6 +275,7 @@ def __init__( default_tags: list[str] | None = None, ) -> None: """Initialize the LangSmith interceptor with tracing configuration.""" + super().__init__() # Import langsmith.Client lazily to avoid hard dependency at import time if client is None: import langsmith @@ -453,7 +454,7 @@ class _LangSmithWorkflowInboundInterceptor( ): """Instruments workflow execution with LangSmith runs.""" - _config: LangSmithInterceptor + _config: ClassVar[LangSmithInterceptor] _current_run: Any | None = None def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: From 7623d4347c3c122d6ee23947f54646b0774cbf50 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Tue, 17 Mar 2026 11:56:55 -0400 Subject: [PATCH 08/30] Fix all basedpyright warnings (deprecated imports, unused params) Co-Authored-By: Claude Opus 4.6 --- temporalio/contrib/langsmith/_interceptor.py | 3 +- tests/contrib/langsmith/test_integration.py | 17 +++++-- tests/contrib/langsmith/test_interceptor.py | 48 ++++++++++---------- 3 files changed, 39 insertions(+), 29 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 205a41011..08b4af2ea 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -3,8 +3,9 @@ from __future__ import annotations import json +from collections.abc import Iterator, Mapping from contextlib import contextmanager -from typing import Any, ClassVar, Iterator, Mapping, NoReturn +from typing import Any, ClassVar, NoReturn from langsmith import tracing_context from langsmith.run_helpers import get_current_run_tree diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index a730223f2..9cfebed29 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -69,7 +69,7 @@ async def run(self) -> str: @workflow.defn class SimpleNexusWorkflow: @workflow.run - async def run(self, input: str) -> str: + async def run(self, _input: str) -> str: return await workflow.execute_activity( traceable_activity, start_to_close_timeout=timedelta(seconds=10), @@ -155,7 +155,7 @@ async def run(self) -> str: return "comprehensive-done" @workflow.signal - def my_signal(self, value: str) -> None: + def my_signal(self, _value: str) -> None: self._signal_received = True @workflow.query @@ -262,6 +262,7 @@ async def test_workflow_activity_trace_hierarchy( self, client: Client, env: WorkflowEnvironment ) -> None: """StartWorkflow → RunWorkflow → StartActivity → RunActivity hierarchy.""" + _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -315,6 +316,7 @@ async def test_no_duplicate_traces_on_replay( self, client: Client, env: WorkflowEnvironment ) -> None: """With max_cached_workflows=0 (forcing replay), no duplicate runs appear.""" + _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -356,6 +358,7 @@ async def test_activity_failure_marked( self, client: Client, env: WorkflowEnvironment ) -> None: """A failing activity run is marked with an error.""" + _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -390,9 +393,12 @@ async def test_activity_failure_marked( assert activity_runs[0].error == "ApplicationError: activity-failed" async def test_workflow_failure_marked( - self, client: Client, env: WorkflowEnvironment + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A failing workflow run is marked with an error.""" + _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -422,9 +428,12 @@ async def test_workflow_failure_marked( assert wf_runs[0].error == "ApplicationError: workflow-failed" async def test_benign_error_not_marked( - self, client: Client, env: WorkflowEnvironment + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A benign ApplicationError does NOT mark the run as errored.""" + _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index b152ca13b..a44f5d4c5 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -145,7 +145,7 @@ class TestReplaySafety: @patch(_PATCH_IS_REPLAYING, return_value=True) @patch(_PATCH_IN_WORKFLOW, return_value=True) def test_replay_noop_post_end_patch( - self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any + self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any ) -> None: """During replay, RunTree is created but post/end/patch are no-ops.""" mock_run = _make_mock_run() @@ -169,7 +169,7 @@ def test_replay_noop_post_end_patch( @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) def test_create_trace_when_not_replaying( - self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any + self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any ) -> None: """When not replaying (but in workflow), _maybe_run creates a ReplaySafeRunTree.""" mock_run = _make_mock_run() @@ -188,7 +188,7 @@ def test_create_trace_when_not_replaying( @patch(_PATCH_RUNTREE) @patch(_PATCH_IN_WORKFLOW, return_value=False) def test_create_trace_outside_workflow( - self, mock_in_wf: Any, MockRunTree: Any + self, _mock_in_wf: Any, MockRunTree: Any ) -> None: """Outside workflow (client/activity), RunTree IS created.""" mock_run = _make_mock_run() @@ -215,7 +215,7 @@ class TestErrorHandling: @patch(_PATCH_RUNTREE) @patch(_PATCH_IN_WORKFLOW, return_value=False) def test_exception_marks_run_errored( - self, mock_in_wf: Any, MockRunTree: Any + self, _mock_in_wf: Any, MockRunTree: Any ) -> None: """RuntimeError marks the run as errored and re-raises.""" mock_run = _make_mock_run() @@ -239,7 +239,7 @@ def test_exception_marks_run_errored( @patch(_PATCH_RUNTREE) @patch(_PATCH_IN_WORKFLOW, return_value=False) def test_benign_application_error_not_marked( - self, mock_in_wf: Any, MockRunTree: Any + self, _mock_in_wf: Any, MockRunTree: Any ) -> None: """Benign ApplicationError does not mark the run as errored.""" from temporalio.exceptions import ApplicationError, ApplicationErrorCategory @@ -267,7 +267,7 @@ def test_benign_application_error_not_marked( @patch(_PATCH_RUNTREE) @patch(_PATCH_IN_WORKFLOW, return_value=False) def test_non_benign_application_error_marked( - self, mock_in_wf: Any, MockRunTree: Any + self, _mock_in_wf: Any, MockRunTree: Any ) -> None: """Non-benign ApplicationError marks the run as errored.""" from temporalio.exceptions import ApplicationError @@ -292,7 +292,7 @@ def test_non_benign_application_error_marked( @patch(_PATCH_RUNTREE) @patch(_PATCH_IN_WORKFLOW, return_value=False) def test_success_completes_normally( - self, mock_in_wf: Any, MockRunTree: Any + self, _mock_in_wf: Any, MockRunTree: Any ) -> None: """On success, run.end(outputs={"status": "ok"}) and run.patch() are called.""" mock_run = _make_mock_run() @@ -313,7 +313,7 @@ def test_success_completes_normally( @patch(_PATCH_RUNTREE) @patch(_PATCH_IN_WORKFLOW, return_value=False) def test_cancelled_error_propagates_without_marking_run( - self, mock_in_wf: Any, MockRunTree: Any + self, _mock_in_wf: Any, MockRunTree: Any ) -> None: """CancelledError (BaseException) propagates without marking run as errored. @@ -559,7 +559,7 @@ async def test_execute_activity_no_header( mock_info_fn.return_value = _mock_activity_info() mock_run = _make_mock_run() MockRunTree.return_value = mock_run - interceptor, mock_next = self._make_activity_interceptor() + interceptor, _mock_next = self._make_activity_interceptor() mock_input = MagicMock() mock_input.headers = {} # No LangSmith header @@ -617,8 +617,8 @@ def _make_workflow_interceptors( async def test_execute_workflow( self, mock_wf_info: Any, - mock_in_wf: Any, - mock_replaying: Any, + _mock_in_wf: Any, + _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, ) -> None: @@ -671,8 +671,8 @@ async def test_execute_workflow( async def test_handler_creates_trace( self, mock_wf_info: Any, - mock_in_wf: Any, - mock_replaying: Any, + _mock_in_wf: Any, + _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, method: str, @@ -803,8 +803,8 @@ def _make_outbound_interceptor( @patch(_PATCH_IN_WORKFLOW, return_value=True) async def test_creates_trace_and_injects_headers( self, - mock_in_wf: Any, - mock_replaying: Any, + _mock_in_wf: Any, + _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, method: str, @@ -834,10 +834,10 @@ async def test_creates_trace_and_injects_headers( @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) def test_continue_as_new( - self, mock_in_wf: Any, mock_replaying: Any, MockRunTree: Any + self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any ) -> None: """continue_as_new does NOT create a new trace, but injects context from current run.""" - outbound, mock_next, inbound = self._make_outbound_interceptor() + outbound, mock_next, _inbound = self._make_outbound_interceptor() mock_input = MagicMock() mock_input.headers = {} @@ -857,8 +857,8 @@ def test_continue_as_new( @patch(_PATCH_IN_WORKFLOW, return_value=True) async def test_start_nexus_operation( self, - mock_in_wf: Any, - mock_replaying: Any, + _mock_in_wf: Any, + _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, ) -> None: @@ -978,7 +978,7 @@ class TestLazyClientPrevention: @patch(_PATCH_IN_WORKFLOW, return_value=False) @patch(_PATCH_RUNTREE) def test_runtree_always_receives_ls_client( - self, MockRunTree: Any, mock_in_wf: Any + self, MockRunTree: Any, _mock_in_wf: Any ) -> None: """Every RunTree() created by _maybe_run receives ls_client= (pre-created client).""" mock_client = MagicMock() @@ -1008,7 +1008,7 @@ class TestAddTemporalRunsToggle: @patch(_PATCH_RUNTREE) @patch(_PATCH_IN_WORKFLOW, return_value=False) - def test_false_skips_traces(self, mock_in_wf: Any, MockRunTree: Any) -> None: + def test_false_skips_traces(self, _mock_in_wf: Any, MockRunTree: Any) -> None: """With add_temporal_runs=False, _maybe_run yields None (no run created). Callers are responsible for propagating context even when the run is None. @@ -1035,10 +1035,10 @@ async def test_false_still_propagates_context( self, mock_act_info: Any, mock_wf_info: Any, - mock_in_wf: Any, - mock_replaying: Any, + _mock_in_wf: Any, + _mock_replaying: Any, MockRunTree: Any, - mock_sandbox: Any, + _mock_sandbox: Any, mock_tracing_ctx: Any, ) -> None: """With add_temporal_runs=False, no runs are created but context still propagates. From a3c0beec209fafb3a1cba9a268b931a2530bb85b Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Tue, 17 Mar 2026 12:07:19 -0400 Subject: [PATCH 09/30] Clean up unused env params: use type:ignore consistently Co-Authored-By: Claude Opus 4.6 --- tests/contrib/langsmith/test_integration.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 9cfebed29..ff65af82f 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -259,10 +259,11 @@ def _make_client_and_collector( class TestBasicTracing: async def test_workflow_activity_trace_hierarchy( - self, client: Client, env: WorkflowEnvironment + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """StartWorkflow → RunWorkflow → StartActivity → RunActivity hierarchy.""" - _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -313,10 +314,11 @@ async def test_workflow_activity_trace_hierarchy( class TestReplay: async def test_no_duplicate_traces_on_replay( - self, client: Client, env: WorkflowEnvironment + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """With max_cached_workflows=0 (forcing replay), no duplicate runs appear.""" - _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -355,10 +357,11 @@ async def test_no_duplicate_traces_on_replay( class TestErrorTracing: async def test_activity_failure_marked( - self, client: Client, env: WorkflowEnvironment + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A failing activity run is marked with an error.""" - _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -398,7 +401,6 @@ async def test_workflow_failure_marked( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A failing workflow run is marked with an error.""" - _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( @@ -433,7 +435,6 @@ async def test_benign_error_not_marked( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A benign ApplicationError does NOT mark the run as errored.""" - _ = env temporal_client, collector, _ = _make_client_and_collector(client) async with new_worker( From 982d2206548b3a16b3c131f7eac1a15e8f3acae5 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 18 Mar 2026 16:26:43 -0400 Subject: [PATCH 10/30] Address PR review feedback: defaults, naming, and header key - Change add_temporal_runs default to False in both plugin and interceptor (reviewer preference for opt-in behavior) - Rename plugin to langchain.LangSmithPlugin per organization.PluginName convention - Prefix header key with _temporal- to avoid collisions - Update all tests to explicitly pass add_temporal_runs=True Co-Authored-By: Claude Opus 4.6 --- temporalio/contrib/langsmith/_interceptor.py | 4 ++-- temporalio/contrib/langsmith/_plugin.py | 6 ++--- tests/contrib/langsmith/test_integration.py | 24 +++++++++++++++----- tests/contrib/langsmith/test_interceptor.py | 2 +- tests/contrib/langsmith/test_plugin.py | 4 +++- 5 files changed, 27 insertions(+), 13 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 08b4af2ea..564a03385 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -23,7 +23,7 @@ # Constants # --------------------------------------------------------------------------- -HEADER_KEY = "_langsmith-context" +HEADER_KEY = "_temporal-langsmith-context" # --------------------------------------------------------------------------- # Context helpers @@ -271,7 +271,7 @@ def __init__( *, client: Any | None = None, project_name: str | None = None, - add_temporal_runs: bool = True, + add_temporal_runs: bool = False, default_metadata: dict[str, Any] | None = None, default_tags: list[str] | None = None, ) -> None: diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py index 5527560d7..4fac4d782 100644 --- a/temporalio/contrib/langsmith/_plugin.py +++ b/temporalio/contrib/langsmith/_plugin.py @@ -23,7 +23,7 @@ def __init__( *, client: Any | None = None, project_name: str | None = None, - add_temporal_runs: bool = True, + add_temporal_runs: bool = False, metadata: dict[str, Any] | None = None, tags: list[str] | None = None, ) -> None: @@ -34,7 +34,7 @@ def __init__( lazily (using LANGSMITH_API_KEY env var). project_name: LangSmith project name for traces. add_temporal_runs: Whether to create LangSmith runs for Temporal - operations. Defaults to True. + operations. Defaults to False. metadata: Default metadata to attach to all runs. tags: Default tags to attach to all runs. """ @@ -60,7 +60,7 @@ def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: return runner super().__init__( - "LangSmithPlugin", + "langchain.LangSmithPlugin", interceptors=interceptors, workflow_runner=workflow_runner, ) diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index ff65af82f..f7799eb42 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -264,7 +264,9 @@ async def test_workflow_activity_trace_hierarchy( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """StartWorkflow → RunWorkflow → StartActivity → RunActivity hierarchy.""" - temporal_client, collector, _ = _make_client_and_collector(client) + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) async with new_worker( temporal_client, @@ -319,7 +321,9 @@ async def test_no_duplicate_traces_on_replay( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """With max_cached_workflows=0 (forcing replay), no duplicate runs appear.""" - temporal_client, collector, _ = _make_client_and_collector(client) + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) async with new_worker( temporal_client, @@ -362,7 +366,9 @@ async def test_activity_failure_marked( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A failing activity run is marked with an error.""" - temporal_client, collector, _ = _make_client_and_collector(client) + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) async with new_worker( temporal_client, @@ -401,7 +407,9 @@ async def test_workflow_failure_marked( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A failing workflow run is marked with an error.""" - temporal_client, collector, _ = _make_client_and_collector(client) + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) async with new_worker( temporal_client, @@ -435,7 +443,9 @@ async def test_benign_error_not_marked( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """A benign ApplicationError does NOT mark the run as errored.""" - temporal_client, collector, _ = _make_client_and_collector(client) + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) async with new_worker( temporal_client, @@ -481,7 +491,9 @@ async def test_comprehensive_with_temporal_runs( """Full workflow exercising activity, local activity, child workflow, signal, query, and update — all nested under an ambient @traceable. """ - temporal_client, collector, mock_ls_client = _make_client_and_collector(client) + temporal_client, collector, mock_ls_client = _make_client_and_collector( + client, add_temporal_runs=True + ) @traceable(name="user_pipeline") async def user_pipeline() -> str: diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index a44f5d4c5..b64b6aeed 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -114,7 +114,7 @@ def test_inject_extract_roundtrip(self, MockRunTree: Any) -> None: MockRunTree.from_headers.assert_called_once() def test_extract_missing_header(self) -> None: - """When the _langsmith-context header is absent, returns None.""" + """When the _temporal-langsmith-context header is absent, returns None.""" headers: dict[str, Payload] = {} result = _extract_context(headers) assert result is None diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index 502e8d5f0..3f7df1c0b 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -55,7 +55,9 @@ async def test_comprehensive_plugin_trace_hierarchy( self, client: Client, env: WorkflowEnvironment ) -> None: """Plugin wired to a real Temporal worker produces the full trace hierarchy.""" - temporal_client, collector, mock_ls_client = _make_client_and_collector(client) + temporal_client, collector, mock_ls_client = _make_client_and_collector( + client, add_temporal_runs=True + ) @traceable(name="user_pipeline") async def user_pipeline() -> str: From ad67096d01fd4933498715670b3f02b346d98899 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Thu, 19 Mar 2026 13:17:15 -0400 Subject: [PATCH 11/30] Add replay safety and worker restart tests for LangSmith plugin - Add @traceable call (outer_chain) directly in ComprehensiveWorkflow to test non-deterministic tracing alongside deterministic replay - Set max_cached_workflows=0 on all test workers to force replay on every workflow task, exposing header non-determinism - Restructure comprehensive tests with mid-workflow worker restart: one shared collector across two worker lifetimes proves context propagates via headers, not cached plugin state - Add is_waiting_for_signal query and poll helper for deterministic sync (no arbitrary sleeps) - Consolidate make_mock_ls_client in conftest.py, remove unused fixtures, use raw client for polling to avoid trace contamination - Tests are expected to fail (TDD): sandbox blocks @traceable in workflows, max_cached_workflows=0 exposes outputs=None on eviction Co-Authored-By: Claude Opus 4.6 --- CLAUDE.md | 57 ++++ temporalio/contrib/langsmith/_interceptor.py | 275 +++++++++++++++---- tests/contrib/langsmith/conftest.py | 24 +- tests/contrib/langsmith/test_integration.py | 199 +++++++++++--- tests/contrib/langsmith/test_interceptor.py | 6 +- tests/contrib/langsmith/test_plugin.py | 25 +- 6 files changed, 467 insertions(+), 119 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 591acc3cb..92ab22596 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -46,6 +46,63 @@ All commands use `uv run` prefix. Key poe tasks: - `poe lint` — run all linters - `poe test` — run pytest +## Team Workflow + +This repo uses **agent teams** (not subagents with worktrees). Delegate coding tasks to the `coder` teammate via `SendMessage`. + +**All agents (team-lead and teammates) must load the `temporal-developer` skill** at the start of any task. This provides Temporal-specific guidance for workflows, activities, signals, queries, updates, Nexus, and SDK patterns. + +### What coder CAN do +- Read/explore code (Glob, Grep, Read) +- Edit and write files (Edit, Write) +- Spawn sub-agents for exploration + +### What coder CANNOT do — team-lead must handle +- **Run tests** — `uv run pytest` has no `--prefix` equivalent, and `cd` doesn't persist across Bash calls. +- **Run lints** — same reason (`uv run ruff`, `uv run pyright`, etc.). +- **Git operations** — commits, pushes, branch management. + +### Writing teammate prompts +Be thorough and explicit upfront — don't rely on correcting teammates after launch. Every prompt to coder should include: +- **What to do** — the specific task, relevant file paths, and expected outcome. +- **What NOT to do** — explicitly state that coder cannot run tests or lints. Don't let them try and fail. +- **Operational constraints** — remind them: no compound Bash commands, no `git` commands, no `uv run`. Use `Edit`/`Write`/`Read`/`Glob`/`Grep` only. +- **Load the `temporal-developer` skill** — remind teammates to invoke it at the start of their task. +- **Dev environment context** — whether the Rust bridge is built, which branch they're on, any known lint pitfalls (e.g., basedpyright strictness). +- **Reference material** — point to existing patterns in the codebase (file paths and line numbers) rather than describing from memory. + +### Workflow +1. **Team-lead** sends task to coder with a thorough prompt (see above). +2. **Coder** explores, writes code, reports back. +3. **Team-lead** runs all lints and tests, reports failures back to coder for fixes. +4. **Team-lead** commits and pushes after user approval. + +### Context management +- Delegate aggressively to preserve your context window. +- Do not duplicate work your teammate is doing (don't read the same files they're exploring). +- When coder reports back, trust their findings — don't re-verify unless something seems off. + +## CI Lint Details + +`basedpyright` is the strictest linter and the most common source of CI failures. It catches things the others miss: +- `reportDeprecated` — flags use of deprecated APIs +- `reportUnusedParameter` — unused function parameters +- `reportMissingSuperCall` — missing `super().__init__()` calls +- `reportUninitializedInstanceVariable` — instance vars not set in `__init__` + +Always run `uv run basedpyright` locally before pushing. If it passes, the other type checkers will almost certainly pass too. + +## Time-Skipping Tests + +CI runs tests twice: `poe test` (normal mode) and `poe test --workflow-environment time-skipping` (non-ARM only). The time-skipping test server has a **known limitation: it does not persist headers**. This means any test that depends on header propagation (e.g., tracing context) will fail in time-skipping mode. The established pattern for handling this is: + +```python +if env.supports_time_skipping: + pytest.skip("Time skipping server doesn't persist headers.") +``` + +See `tests/worker/test_workflow.py:8249` for the existing precedent. + ## Branch Naming Temporal convention: prepend `maplexu/` to branch names. diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 564a03385..789825251 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -3,6 +3,8 @@ from __future__ import annotations import json +import random +import uuid from collections.abc import Iterator, Mapping from contextlib import contextmanager from typing import Any, ClassVar, NoReturn @@ -39,7 +41,7 @@ def _inject_context( """Inject LangSmith context into Temporal payload headers. Serializes the run's trace context (trace ID, parent run ID, dotted order) - into a Temporal header under ``_langsmith-context``, enabling parent-child + into a Temporal header under ``_temporal-langsmith-context``, enabling parent-child trace nesting across process boundaries (client → worker, workflow → activity). """ ls_headers = run_tree.to_headers() @@ -49,10 +51,9 @@ def _inject_context( } -def _get_current_run_safe() -> ReplaySafeRunTree | None: - """Get the current ambient LangSmith run tree, wrapped for replay safety.""" - raw = get_current_run_tree() - return ReplaySafeRunTree(raw) if raw is not None else None +def _get_current_run_safe() -> RunTree | None: + """Get the current ambient LangSmith run tree.""" + return get_current_run_tree() def _inject_current_context( @@ -76,14 +77,16 @@ def _extract_context( """Extract LangSmith context from Temporal payload headers. Reconstructs a :class:`RunTree` from the ``_langsmith-context`` header on - the receiving side, so inbound interceptors can establish a parent-child - relationship with the sender's run. Returns ``None`` if no header is present. + the receiving side, wrapped in a :class:`ReplaySafeRunTree` so inbound + interceptors can establish a parent-child relationship with the sender's + run. Returns ``None`` if no header is present. """ header = headers.get(HEADER_KEY) if not header: return None ls_headers = _payload_converter.from_payloads([header])[0] - return ReplaySafeRunTree(RunTree.from_headers(ls_headers)) + run = RunTree.from_headers(ls_headers) + return ReplaySafeRunTree(run) if run else None def _inject_nexus_context( @@ -106,7 +109,58 @@ def _extract_nexus_context( if not raw: return None ls_headers = json.loads(raw) - return ReplaySafeRunTree(RunTree.from_headers(ls_headers)) + run = RunTree.from_headers(ls_headers) + return ReplaySafeRunTree(run) if run else None + + +# --------------------------------------------------------------------------- +# Sandbox safety: patch @traceable's aio_to_thread +# --------------------------------------------------------------------------- + +_aio_to_thread_patched = False + + +def _patch_aio_to_thread() -> None: + """Patch langsmith's ``aio_to_thread`` to run synchronously in workflows. + + The ``@traceable`` decorator uses ``aio_to_thread()`` → + ``loop.run_in_executor()`` for run setup/teardown. The Temporal workflow + sandbox blocks ``run_in_executor``. This patch runs those functions + synchronously (they are CPU-bound, no I/O) when inside a workflow. + """ + global _aio_to_thread_patched # noqa: PLW0603 + if _aio_to_thread_patched: + return + + import langsmith._internal._aiter as _aiter + + _original = _aiter.aio_to_thread + + import contextvars + + async def _safe_aio_to_thread( + func: Any, + /, + *args: Any, + __ctx: contextvars.Context | None = None, + **kwargs: Any, + ) -> Any: + if not temporalio.workflow.in_workflow(): + return await _original(func, *args, __ctx=__ctx, **kwargs) + with temporalio.workflow.unsafe.sandbox_unrestricted(): + # During replay, disable tracing so @traceable calls don't + # produce duplicate traces for code that already ran. + # Run func directly in the current context (no ctx.run) so + # that context var changes (e.g. _PARENT_RUN_TREE set by + # @traceable's _setup_run) propagate to the caller. + # This is safe because workflows are single-threaded. + if _is_replaying(): + with tracing_context(enabled=False): + return func(*args, **kwargs) + return func(*args, **kwargs) + + _aiter.aio_to_thread = _safe_aio_to_thread # type: ignore[assignment] + _aio_to_thread_patched = True # --------------------------------------------------------------------------- @@ -122,62 +176,100 @@ def _is_replaying() -> bool: ) +def _get_workflow_random() -> random.Random | None: + """Get a deterministic random generator for the current workflow. + + Follows the OTel pattern: creates a workflow-safe random generator once + via ``workflow.new_random()`` and stores it on the workflow instance so + subsequent calls return the same generator. The generator is seeded from + the workflow's deterministic seed, so it produces identical UUIDs across + replays and eviction/restart cycles. + + Returns ``None`` outside a workflow, in read-only (query) contexts, or + when workflow APIs are mocked (unit tests). + """ + try: + if not temporalio.workflow.in_workflow(): + return None + if temporalio.workflow.unsafe.is_read_only(): + return None + inst = temporalio.workflow.instance() + rng = getattr(inst, "__temporal_langsmith_random", None) + if rng is None: + rng = temporalio.workflow.new_random() + setattr(inst, "__temporal_langsmith_random", rng) + return rng + except Exception: + return None + + +def _uuid_from_random(rng: random.Random) -> uuid.UUID: + """Generate a deterministic UUID4 from a workflow-bound random generator.""" + return uuid.UUID(int=rng.getrandbits(128), version=4) + + # --------------------------------------------------------------------------- # ReplaySafeRunTree wrapper # --------------------------------------------------------------------------- -class ReplaySafeRunTree: - """Wraps a RunTree to handle replay skipping and sandbox safety transparently. +class ReplaySafeRunTree(RunTree): + """Wrapper around a :class:`RunTree` with replay-safe ``post``, ``end``, and ``patch``. + + Inherits from :class:`RunTree` so ``isinstance`` checks pass, but does + **not** call ``super().__init__()``—the wrapped ``_run`` is the real + RunTree. Attribute access is delegated via ``__getattr__``/``__setattr__``. During replay, ``post()``, ``end()``, and ``patch()`` become no-ops. - Inside a workflow sandbox, ``post()`` and ``patch()`` are wrapped in + Inside a workflow sandbox, these methods are wrapped in ``sandbox_unrestricted()``. """ - def __init__(self, run_tree: Any) -> None: - """Initialize with the underlying RunTree to wrap.""" - self._run = run_tree + def __init__(self, run_tree: RunTree) -> None: # pyright: ignore[reportMissingSuperCall] + """Wrap an existing RunTree with replay-safe overrides.""" + object.__setattr__(self, "_run", run_tree) - def to_headers(self) -> dict[str, str]: - """Delegate header serialization to the underlying RunTree.""" - return self._run.to_headers() + def __getattr__(self, name: str) -> Any: + """Delegate attribute access to the wrapped RunTree.""" + return getattr(self._run, name) - @property - def ls_client(self) -> Any: - """Get the LangSmith client from the underlying RunTree.""" - return self._run.ls_client + def __setattr__(self, name: str, value: Any) -> None: + """Delegate attribute setting to the wrapped RunTree.""" + setattr(self._run, name, value) - @ls_client.setter - def ls_client(self, value: Any) -> None: - """Set the LangSmith client on the underlying RunTree.""" - self._run.ls_client = value + def to_headers(self) -> dict[str, Any]: + """Delegate to the wrapped RunTree's to_headers.""" + return self._run.to_headers() - def post(self) -> None: + def post(self, exclude_child_runs: bool = True) -> None: """Post the run to LangSmith, skipping during replay.""" if _is_replaying(): return if temporalio.workflow.in_workflow(): with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._run.post() + self._run.post(exclude_child_runs=exclude_child_runs) else: - self._run.post() + self._run.post(exclude_child_runs=exclude_child_runs) def end(self, **kwargs: Any) -> None: """End the run, skipping during replay.""" if _is_replaying(): return - self._run.end(**kwargs) + if temporalio.workflow.in_workflow(): + with temporalio.workflow.unsafe.sandbox_unrestricted(): + self._run.end(**kwargs) + else: + self._run.end(**kwargs) - def patch(self) -> None: + def patch(self, *, exclude_inputs: bool = False) -> None: """Patch the run to LangSmith, skipping during replay.""" if _is_replaying(): return if temporalio.workflow.in_workflow(): with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._run.patch() + self._run.patch(exclude_inputs=exclude_inputs) else: - self._run.patch() + self._run.patch(exclude_inputs=exclude_inputs) # --------------------------------------------------------------------------- @@ -210,11 +302,22 @@ def _maybe_run( - If add_temporal_runs is False, yields None (no run created). Context propagation is handled unconditionally by callers. - - When a run IS created, wraps it in :class:`ReplaySafeRunTree` for + - When a run IS created, uses :class:`ReplaySafeRunTree` for replay and sandbox safety, then sets it as ambient context via - ``tracing_context(parent=raw_run)`` so ``get_current_run_tree()`` + ``tracing_context(parent=run_tree)`` so ``get_current_run_tree()`` returns it and ``_inject_current_context()`` can inject it. - On exception: marks run as errored (unless benign ApplicationError), re-raises. + + Args: + client: LangSmith client instance. + name: Display name for the run. + add_temporal_runs: Whether to create Temporal-level trace runs. + run_type: LangSmith run type (default ``"chain"``). + inputs: Input data to record on the run. + metadata: Extra metadata to attach to the run. + tags: Tags to attach to the run. + parent: Parent run for nesting. + project_name: LangSmith project name override. """ if not add_temporal_runs: yield None @@ -230,19 +333,36 @@ def _maybe_run( inputs=inputs or {}, ls_client=client, ) + # Deterministic IDs and start times in workflow context so that runs + # survive eviction/replay with max_cached_workflows=0. Uses a + # workflow-bound random generator (following the OTel pattern) to + # produce identical UUIDs across replays and worker restarts. + rng = _get_workflow_random() + if rng is not None: + kwargs["id"] = _uuid_from_random(rng) + kwargs["start_time"] = temporalio.workflow.now() + elif temporalio.workflow.in_workflow(): + # Read-only context (e.g. query handler) — use workflow.uuid4() + try: + kwargs["id"] = temporalio.workflow.uuid4() + kwargs["start_time"] = temporalio.workflow.now() + except Exception: + pass # Not in a real workflow context (e.g., unit test mock) if project_name is not None: kwargs["project_name"] = project_name if parent is not None: - kwargs["parent_run"] = parent._run + # Unwrap ReplaySafeRunTree so RunTree gets the real parent + kwargs["parent_run"] = ( + parent._run if isinstance(parent, ReplaySafeRunTree) else parent + ) if metadata: kwargs["extra"] = {"metadata": metadata} if tags: kwargs["tags"] = tags - raw_run = RunTree(**kwargs) - run_tree = ReplaySafeRunTree(raw_run) + run_tree = ReplaySafeRunTree(RunTree(**kwargs)) run_tree.post() try: - with tracing_context(parent=raw_run, client=client): + with tracing_context(parent=run_tree, client=client): yield run_tree except Exception as exc: if not _is_benign_error(exc): @@ -327,6 +447,7 @@ def workflow_interceptor_class( self, input: temporalio.worker.WorkflowInterceptorClassInput ) -> type[_LangSmithWorkflowInboundInterceptor]: """Return the workflow interceptor class with config bound.""" + _patch_aio_to_thread() config = self class InterceptorWithConfig(_LangSmithWorkflowInboundInterceptor): @@ -418,7 +539,7 @@ async def execute_activity(self, input: Any) -> Any: extra_metadata = { "temporalWorkflowID": info.workflow_id or "", "temporalRunID": info.workflow_run_id or "", - "temporalActivityID": info.activity_id, + "temporalActivityID": info.activity_id or "", } # Unconditionally set tracing context so @traceable functions inside # activities can use the plugin's LangSmith client and inherit parent. @@ -465,49 +586,95 @@ def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: @contextmanager def _workflow_maybe_run( - self, name: str, headers: Mapping[str, Payload] | None = None + self, + name: str, + headers: Mapping[str, Payload] | None = None, + *, + is_handler: bool = False, ) -> Iterator[Any | None]: """Workflow-specific run creation with metadata. Extracts parent from headers (if provided) and stores the run (or parent fallback) as ``_current_run`` so the outbound interceptor can propagate context even when ``add_temporal_runs=False``. + + Always sets up ``tracing_context`` so ``@traceable`` functions called + from workflow code can discover the parent and LangSmith client, + independent of the ``add_temporal_runs`` toggle. + + When ``is_handler`` is True and no LangSmith context is found in + headers, skips trace creation if a workflow run is already active + (``_current_run`` is set). This suppresses orphan traces from + uninstrumented client operations (e.g. query polling) while still + allowing handler traces when invoked with propagated context. """ parent = _extract_context(headers) if headers else None + if parent is not None: + parent.ls_client = self._config._client + # Handler from an uninstrumented client during workflow execution: + # no LangSmith headers but _current_run is set. Skip trace creation + # to avoid orphan/duplicate handler traces (e.g. query polling). + if is_handler and parent is None and self._current_run is not None: + yield None + return info = temporalio.workflow.info() extra_metadata = { "temporalWorkflowID": info.workflow_id, "temporalRunID": info.run_id, } - with self._config.maybe_run( - name, parent=parent, extra_metadata=extra_metadata - ) as run: - self._current_run = run or parent - try: - yield run - finally: - self._current_run = None + # Set up tracing context for @traceable functions inside the workflow. + # When add_temporal_runs=True, _maybe_run overrides with the + # RunWorkflow run as parent. When False, this outer context ensures + # @traceable still sees the propagated parent from headers. + ctx_kwargs: dict[str, Any] = { + "client": self._config._client, + "enabled": True, + } + if parent: + ctx_kwargs["parent"] = parent + with tracing_context(**ctx_kwargs): + with self._config.maybe_run( + name, + parent=parent, + extra_metadata=extra_metadata, + ) as run: + prev_run = self._current_run + self._current_run = run or parent + try: + yield run + finally: + self._current_run = prev_run async def execute_workflow(self, input: Any) -> Any: + wf_type = temporalio.workflow.info().workflow_type with self._workflow_maybe_run( - f"RunWorkflow:{temporalio.workflow.info().workflow_type}", input.headers + f"RunWorkflow:{wf_type}", + input.headers, ): return await super().execute_workflow(input) async def handle_signal(self, input: Any) -> None: - with self._workflow_maybe_run(f"HandleSignal:{input.signal}", input.headers): + with self._workflow_maybe_run( + f"HandleSignal:{input.signal}", input.headers, is_handler=True + ): return await super().handle_signal(input) async def handle_query(self, input: Any) -> Any: - with self._workflow_maybe_run(f"HandleQuery:{input.query}", input.headers): + with self._workflow_maybe_run( + f"HandleQuery:{input.query}", input.headers, is_handler=True + ): return await super().handle_query(input) def handle_update_validator(self, input: Any) -> None: - with self._workflow_maybe_run(f"ValidateUpdate:{input.update}", input.headers): + with self._workflow_maybe_run( + f"ValidateUpdate:{input.update}", input.headers, is_handler=True + ): return super().handle_update_validator(input) async def handle_update_handler(self, input: Any) -> Any: - with self._workflow_maybe_run(f"HandleUpdate:{input.update}", input.headers): + with self._workflow_maybe_run( + f"HandleUpdate:{input.update}", input.headers, is_handler=True + ): return await super().handle_update_handler(input) diff --git a/tests/contrib/langsmith/conftest.py b/tests/contrib/langsmith/conftest.py index 7973f880a..92c6a3db5 100644 --- a/tests/contrib/langsmith/conftest.py +++ b/tests/contrib/langsmith/conftest.py @@ -1,4 +1,4 @@ -"""Shared test fixtures for LangSmith plugin tests.""" +"""Shared test helpers for LangSmith plugin tests.""" from __future__ import annotations @@ -6,8 +6,6 @@ from typing import Any from unittest.mock import MagicMock -import pytest - @dataclass class _RunRecord: @@ -91,27 +89,11 @@ def _walk(parent_id: str | None, depth: int) -> None: return result -@pytest.fixture -def collector() -> InMemoryRunCollector: - return InMemoryRunCollector() - - -@pytest.fixture -def mock_ls_client(collector: InMemoryRunCollector) -> MagicMock: - """A mock langsmith.Client that records create_run / update_run calls.""" +def make_mock_ls_client(collector: InMemoryRunCollector) -> MagicMock: + """Create a mock langsmith.Client wired to a collector.""" client = MagicMock() client.create_run.side_effect = collector.record_create client.update_run.side_effect = collector.record_update - # Stub session property (needed by RunTree internals) client.session = MagicMock() client.tracing_queue = MagicMock() return client - - -@pytest.fixture -def langsmith_plugin(mock_ls_client: MagicMock, collector: InMemoryRunCollector): - """Return (plugin, collector) wired to a mock client.""" - from temporalio.contrib.langsmith import LangSmithPlugin - - plugin = LangSmithPlugin(client=mock_ls_client) - return plugin, collector diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index f7799eb42..936627b5b 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio import uuid from datetime import timedelta from typing import Any @@ -12,11 +13,16 @@ from langsmith import traceable, tracing_context from temporalio import activity, common, nexus, workflow -from temporalio.client import Client, WorkflowFailureError +from temporalio.client import Client, WorkflowFailureError, WorkflowQueryFailedError from temporalio.contrib.langsmith import LangSmithPlugin from temporalio.exceptions import ApplicationError +from temporalio.service import RPCError from temporalio.testing import WorkflowEnvironment -from tests.contrib.langsmith.conftest import InMemoryRunCollector, dump_runs +from tests.contrib.langsmith.conftest import ( + InMemoryRunCollector, + dump_runs, + make_mock_ls_client, +) from tests.helpers import new_worker from tests.helpers.nexus import make_nexus_endpoint_name @@ -119,26 +125,29 @@ async def run(self) -> str: class ComprehensiveWorkflow: def __init__(self) -> None: self._signal_received = False + self._waiting_for_signal = False self._complete = False @workflow.run async def run(self) -> str: - # 1. Regular activity + # Regular activity await workflow.execute_activity( nested_traceable_activity, start_to_close_timeout=timedelta(seconds=10), ) - # 2. Local activity + # Local activity await workflow.execute_local_activity( nested_traceable_activity, start_to_close_timeout=timedelta(seconds=10), ) - # 3. Child workflow + # Direct @traceable call + await _outer_chain("from-workflow") + # Child workflow await workflow.execute_child_workflow( TraceableActivityWorkflow.run, id=f"child-{workflow.info().workflow_id}", ) - # 4. Nexus operation + # Nexus operation nexus_client = workflow.create_nexus_client( endpoint=make_nexus_endpoint_name(workflow.info().task_queue), service=NexusService, @@ -148,9 +157,15 @@ async def run(self) -> str: input="test-input", ) await nexus_handle - # 5. Wait for signal + # Wait for signal + self._waiting_for_signal = True await workflow.wait_condition(lambda: self._signal_received) - # 5. Wait for update to complete + # Post-signal activity (verifies context survives signal wait) + await workflow.execute_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + # Wait for update to complete await workflow.wait_condition(lambda: self._complete) return "comprehensive-done" @@ -162,6 +177,10 @@ def my_signal(self, _value: str) -> None: def my_query(self) -> bool: return self._signal_received + @workflow.query + def is_waiting_for_signal(self) -> bool: + return self._waiting_for_signal + @workflow.update def my_update(self, value: str) -> str: self._complete = True @@ -233,11 +252,7 @@ def _make_plugin_and_collector( ) -> tuple[LangSmithPlugin, InMemoryRunCollector, MagicMock]: """Create a LangSmithPlugin wired to an InMemoryRunCollector via mock client.""" collector = InMemoryRunCollector() - mock_ls_client = MagicMock() - mock_ls_client.create_run.side_effect = collector.record_create - mock_ls_client.update_run.side_effect = collector.record_update - mock_ls_client.session = MagicMock() - mock_ls_client.tracing_queue = MagicMock() + mock_ls_client = make_mock_ls_client(collector) plugin = LangSmithPlugin(client=mock_ls_client, **kwargs) return plugin, collector, mock_ls_client @@ -252,6 +267,37 @@ def _make_client_and_collector( return Client(**config), collector, mock_ls_client +def _make_temporal_client( + client: Client, mock_ls_client: MagicMock, **kwargs: Any +) -> Client: + """Create a Temporal Client with a fresh LangSmith plugin.""" + plugin = LangSmithPlugin(client=mock_ls_client, **kwargs) + config = client.config() + config["plugins"] = [plugin] + return Client(**config) + + +async def _poll_query( + handle: Any, + query: Any, + *, + expected: Any = True, + timeout_secs: float = 10.0, + interval_secs: float = 0.2, +) -> bool: + """Poll a workflow query until it returns the expected value or times out.""" + deadline = asyncio.get_event_loop().time() + timeout_secs + while asyncio.get_event_loop().time() < deadline: + try: + result = await handle.query(query) + if result == expected: + return True + except (WorkflowQueryFailedError, RPCError): + pass # Query not yet available (workflow hasn't started) + await asyncio.sleep(interval_secs) + return False + + # --------------------------------------------------------------------------- # TestBasicTracing # --------------------------------------------------------------------------- @@ -272,6 +318,7 @@ async def test_workflow_activity_trace_hierarchy( temporal_client, SimpleWorkflow, activities=[simple_activity], + max_cached_workflows=0, ) as worker: result = await temporal_client.start_workflow( SimpleWorkflow.run, @@ -375,6 +422,7 @@ async def test_activity_failure_marked( ActivityFailureWorkflow, activities=[failing_activity], workflow_failure_exception_types=[ApplicationError], + max_cached_workflows=0, ) as worker: handle = await temporal_client.start_workflow( ActivityFailureWorkflow.run, @@ -415,6 +463,7 @@ async def test_workflow_failure_marked( temporal_client, FailingWorkflow, workflow_failure_exception_types=[ApplicationError], + max_cached_workflows=0, ) as worker: handle = await temporal_client.start_workflow( FailingWorkflow.run, @@ -452,6 +501,7 @@ async def test_benign_error_not_marked( BenignErrorWorkflow, activities=[benign_failing_activity], workflow_failure_exception_types=[ApplicationError], + max_cached_workflows=0, ) as worker: handle = await temporal_client.start_workflow( BenignErrorWorkflow.run, @@ -488,41 +538,74 @@ class TestComprehensiveTracing: async def test_comprehensive_with_temporal_runs( self, client: Client, env: WorkflowEnvironment ) -> None: - """Full workflow exercising activity, local activity, child workflow, - signal, query, and update — all nested under an ambient @traceable. + """Full trace hierarchy with worker restart mid-workflow. + + Starts workflow on first worker, kills it at signal wait point, + then starts fresh worker+plugin to signal and complete the workflow. + Verifies combined hierarchy from both worker lifetimes in one assertion. """ - temporal_client, collector, mock_ls_client = _make_client_and_collector( - client, add_temporal_runs=True - ) + if env.supports_time_skipping: + pytest.skip("Time-skipping server doesn't persist headers.") + + task_queue = f"comprehensive-{uuid.uuid4()}" + workflow_id = f"comprehensive-{uuid.uuid4()}" + collector = InMemoryRunCollector() + mock_ls = make_mock_ls_client(collector) @traceable(name="user_pipeline") async def user_pipeline() -> str: + # Phase 1: Start workflow, run until signal wait + temporal_client_1 = _make_temporal_client( + client, mock_ls, add_temporal_runs=True + ) async with new_worker( - temporal_client, + temporal_client_1, ComprehensiveWorkflow, TraceableActivityWorkflow, SimpleNexusWorkflow, activities=[nested_traceable_activity, traceable_activity], nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, ) as worker: await env.create_nexus_endpoint( make_nexus_endpoint_name(worker.task_queue), worker.task_queue, ) - handle = await temporal_client.start_workflow( + handle = await temporal_client_1.start_workflow( ComprehensiveWorkflow.run, - id=f"comprehensive-{uuid.uuid4()}", + id=workflow_id, task_queue=worker.task_queue, ) - # Query + # Poll via raw client to avoid creating trace runs + raw_handle = client.get_workflow_handle(workflow_id) + assert await _poll_query( + raw_handle, + ComprehensiveWorkflow.is_waiting_for_signal, + expected=True, + ), "Workflow never reached signal wait point" + + # Phase 2: Fresh worker+plugin, signal to resume, complete + temporal_client_2 = _make_temporal_client( + client, mock_ls, add_temporal_runs=True + ) + async with new_worker( + temporal_client_2, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + SimpleNexusWorkflow, + activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, + ): + handle = temporal_client_2.get_workflow_handle(workflow_id) await handle.query(ComprehensiveWorkflow.my_query) - # Signal await handle.signal(ComprehensiveWorkflow.my_signal, "hello") - # Update (completes the workflow) await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") return await handle.result() - with tracing_context(client=mock_ls_client, enabled=True): + with tracing_context(client=mock_ls, enabled=True): result = await user_pipeline() assert result == "comprehensive-done" @@ -540,6 +623,8 @@ async def user_pipeline() -> str: " RunActivity:nested_traceable_activity", " outer_chain", " inner_llm_call", + " outer_chain", + " inner_llm_call", " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", @@ -552,6 +637,10 @@ async def user_pipeline() -> str: " StartActivity:traceable_activity", " RunActivity:traceable_activity", " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", " QueryWorkflow:my_query", " HandleQuery:my_query", " SignalWorkflow:my_signal", @@ -567,41 +656,71 @@ async def user_pipeline() -> str: async def test_comprehensive_without_temporal_runs( self, client: Client, env: WorkflowEnvironment ) -> None: - """With add_temporal_runs=False, only @traceable runs appear, - all nested under the ambient user_pipeline. + """Same comprehensive workflow with add_temporal_runs=False and worker restart. + + Only @traceable runs appear. Context propagation via headers still works. """ - temporal_client, collector, mock_ls_client = _make_client_and_collector( - client, add_temporal_runs=False - ) + if env.supports_time_skipping: + pytest.skip("Time-skipping server doesn't persist headers.") + + task_queue = f"comprehensive-no-runs-{uuid.uuid4()}" + workflow_id = f"comprehensive-no-runs-{uuid.uuid4()}" + collector = InMemoryRunCollector() + mock_ls = make_mock_ls_client(collector) @traceable(name="user_pipeline") async def user_pipeline() -> str: + # Phase 1: Start workflow, run until signal wait + temporal_client_1 = _make_temporal_client( + client, mock_ls, add_temporal_runs=False + ) async with new_worker( - temporal_client, + temporal_client_1, ComprehensiveWorkflow, TraceableActivityWorkflow, SimpleNexusWorkflow, activities=[nested_traceable_activity, traceable_activity], nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, ) as worker: await env.create_nexus_endpoint( make_nexus_endpoint_name(worker.task_queue), worker.task_queue, ) - handle = await temporal_client.start_workflow( + handle = await temporal_client_1.start_workflow( ComprehensiveWorkflow.run, - id=f"comprehensive-no-runs-{uuid.uuid4()}", + id=workflow_id, task_queue=worker.task_queue, ) - # Query - await handle.query(ComprehensiveWorkflow.my_query) - # Signal + # Poll via raw client to avoid creating trace runs + raw_handle = client.get_workflow_handle(workflow_id) + assert await _poll_query( + raw_handle, + ComprehensiveWorkflow.is_waiting_for_signal, + expected=True, + ), "Workflow never reached signal wait point" + + # Phase 2: Fresh worker+plugin, signal to resume, complete + temporal_client_2 = _make_temporal_client( + client, mock_ls, add_temporal_runs=False + ) + async with new_worker( + temporal_client_2, + ComprehensiveWorkflow, + TraceableActivityWorkflow, + SimpleNexusWorkflow, + activities=[nested_traceable_activity, traceable_activity], + nexus_service_handlers=[NexusService()], + task_queue=task_queue, + max_cached_workflows=0, + ): + handle = temporal_client_2.get_workflow_handle(workflow_id) await handle.signal(ComprehensiveWorkflow.my_signal, "hello") - # Update (completes the workflow) await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") return await handle.result() - with tracing_context(client=mock_ls_client, enabled=True): + with tracing_context(client=mock_ls, enabled=True): result = await user_pipeline() assert result == "comprehensive-done" @@ -613,8 +732,12 @@ async def user_pipeline() -> str: " inner_llm_call", " outer_chain", " inner_llm_call", + " outer_chain", + " inner_llm_call", " inner_llm_call", " inner_llm_call", + " outer_chain", + " inner_llm_call", ] assert ( hierarchy == expected diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index b64b6aeed..17050bfe5 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -541,10 +541,12 @@ async def test_execute_activity_creates_run_with_context_and_metadata( assert metadata["temporalWorkflowID"] == "wf-123" assert metadata["temporalRunID"] == "run-456" assert metadata["temporalActivityID"] == "act-789" - # Verify tracing_context sets parent + # Verify tracing_context sets parent (wrapped in ReplaySafeRunTree) mock_tracing_ctx.assert_called() ctx_kwargs = mock_tracing_ctx.call_args.kwargs - assert ctx_kwargs.get("parent") is mock_run + parent = ctx_kwargs.get("parent") + assert isinstance(parent, ReplaySafeRunTree) + assert parent._run is mock_run # Verify super() called and result passed through mock_next.execute_activity.assert_called_once() assert result == "activity_result" diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index 3f7df1c0b..f902d20ef 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -5,6 +5,7 @@ import uuid from unittest.mock import MagicMock +import pytest from langsmith import traceable, tracing_context from temporalio.client import Client @@ -17,6 +18,7 @@ SimpleNexusWorkflow, TraceableActivityWorkflow, _make_client_and_collector, + _poll_query, nested_traceable_activity, traceable_activity, ) @@ -55,6 +57,9 @@ async def test_comprehensive_plugin_trace_hierarchy( self, client: Client, env: WorkflowEnvironment ) -> None: """Plugin wired to a real Temporal worker produces the full trace hierarchy.""" + if env.supports_time_skipping: + pytest.skip("Time-skipping server doesn't persist headers.") + temporal_client, collector, mock_ls_client = _make_client_and_collector( client, add_temporal_runs=True ) @@ -68,21 +73,27 @@ async def user_pipeline() -> str: SimpleNexusWorkflow, activities=[nested_traceable_activity, traceable_activity], nexus_service_handlers=[NexusService()], + max_cached_workflows=0, ) as worker: await env.create_nexus_endpoint( make_nexus_endpoint_name(worker.task_queue), worker.task_queue, ) + workflow_id = f"plugin-comprehensive-{uuid.uuid4()}" handle = await temporal_client.start_workflow( ComprehensiveWorkflow.run, - id=f"plugin-comprehensive-{uuid.uuid4()}", + id=workflow_id, task_queue=worker.task_queue, ) - # Query + # Poll via raw client to avoid creating trace runs + raw_handle = client.get_workflow_handle(workflow_id) + assert await _poll_query( + raw_handle, + ComprehensiveWorkflow.is_waiting_for_signal, + expected=True, + ), "Workflow never reached signal wait point" await handle.query(ComprehensiveWorkflow.my_query) - # Signal await handle.signal(ComprehensiveWorkflow.my_signal, "hello") - # Update (completes the workflow) await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") return await handle.result() @@ -104,6 +115,8 @@ async def user_pipeline() -> str: " RunActivity:nested_traceable_activity", " outer_chain", " inner_llm_call", + " outer_chain", + " inner_llm_call", " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", @@ -116,6 +129,10 @@ async def user_pipeline() -> str: " StartActivity:traceable_activity", " RunActivity:traceable_activity", " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", " QueryWorkflow:my_query", " HandleQuery:my_query", " SignalWorkflow:my_signal", From 197a2d3aaecddacc7dbb6aa97beaa5c6d626cf61 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 25 Mar 2026 14:57:11 -0400 Subject: [PATCH 12/30] Implement background thread I/O for LangSmith workflow tracing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move RunTree.post()/patch() I/O off the workflow task thread to a single-worker ThreadPoolExecutor, preventing deadlocks from compressed_traces.lock contention with the LangSmith drain thread. Key changes: - _ReplaySafeRunTree.create_child() override propagates replay safety and deterministic IDs to nested @langsmith.traceable calls - Executor-backed post()/patch() with FIFO ordering and fire-and-forget error logging via Future.add_done_callback - _ContextBridgeRunTree for add_temporal_runs=False without external context — invisible parent that produces root @traceable runs - aio_to_thread patch simplified: removed harmful replay-time tracing disable, added error gate for async @traceable without plugin - Plugin shutdown via SimplePlugin.run_context instead of dead method - Fix misleading comments referencing test artifacts instead of production reasons, remove OTel cross-references - Strict dump_runs catches dangling parent_run_id references - Add **/CLAUDE.md to .gitignore Co-Authored-By: Claude Opus 4.6 (1M context) --- .gitignore | 1 + CLAUDE.md | 108 --- temporalio/contrib/langsmith/_interceptor.py | 246 +++++- temporalio/contrib/langsmith/_plugin.py | 23 +- tests/contrib/langsmith/conftest.py | 17 +- tests/contrib/langsmith/test_background_io.py | 705 ++++++++++++++++++ tests/contrib/langsmith/test_integration.py | 200 +++++ tests/contrib/langsmith/test_interceptor.py | 46 +- 8 files changed, 1164 insertions(+), 182 deletions(-) delete mode 100644 CLAUDE.md create mode 100644 tests/contrib/langsmith/test_background_io.py diff --git a/.gitignore b/.gitignore index c35cd4447..923875d32 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ temporalio/bridge/temporal_sdk_bridge* /tests/helpers/golangworker/golangworker /.idea /sdk-python.iml +**/CLAUDE.md /.zed *.DS_Store tags diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 100644 index 92ab22596..000000000 --- a/CLAUDE.md +++ /dev/null @@ -1,108 +0,0 @@ -# CLAUDE.md — Temporal Python SDK - -## CI Pipeline - -CI is defined in `.github/workflows/ci.yml`. The main jobs are: - -### `build-lint-test` (matrix: Python 3.10/3.14 x multiple OS) -1. `poe build-develop` — builds the Rust bridge via maturin -2. `poe lint` — runs ALL of the following (defined in `pyproject.toml [tool.poe.tasks]`): - - `uv run ruff check --select I` — import sorting - - `uv run ruff format --check` — code formatting - - `uv run pyright` — type checking (whole repo) - - `uv run mypy --namespace-packages --check-untyped-defs .` — type checking (whole repo) - - `uv run basedpyright` — stricter type checking (whole repo, catches more than pyright) - - `uv run pydocstyle --ignore-decorators=overload` — docstring style -3. `poe test` — runs `uv run pytest` -4. Time-skipping tests (non-ARM only) - -### `test-latest-deps` (ubuntu, Python 3.13, upgraded deps) -Same as above but with `uv lock --upgrade` first. - -### `features-tests` -Runs the `temporalio/features` repo tests against this branch. - -## Before Pushing - -Always run the full lint suite locally before pushing: -``` -uv run ruff check --select I -uv run ruff format --check -uv run pyright -uv run mypy --namespace-packages --check-untyped-defs . -uv run basedpyright -uv run pydocstyle --ignore-decorators=overload -``` - -Or equivalently: `poe lint` (requires `poe build-develop` first). - -To auto-fix formatting: `poe format` (runs `ruff check --select I --fix` + `ruff format`). - -## Dev Commands - -All commands use `uv run` prefix. Key poe tasks: -- `poe build-develop` — build Rust bridge (required before lint/test) -- `poe format` — auto-fix formatting -- `poe lint` — run all linters -- `poe test` — run pytest - -## Team Workflow - -This repo uses **agent teams** (not subagents with worktrees). Delegate coding tasks to the `coder` teammate via `SendMessage`. - -**All agents (team-lead and teammates) must load the `temporal-developer` skill** at the start of any task. This provides Temporal-specific guidance for workflows, activities, signals, queries, updates, Nexus, and SDK patterns. - -### What coder CAN do -- Read/explore code (Glob, Grep, Read) -- Edit and write files (Edit, Write) -- Spawn sub-agents for exploration - -### What coder CANNOT do — team-lead must handle -- **Run tests** — `uv run pytest` has no `--prefix` equivalent, and `cd` doesn't persist across Bash calls. -- **Run lints** — same reason (`uv run ruff`, `uv run pyright`, etc.). -- **Git operations** — commits, pushes, branch management. - -### Writing teammate prompts -Be thorough and explicit upfront — don't rely on correcting teammates after launch. Every prompt to coder should include: -- **What to do** — the specific task, relevant file paths, and expected outcome. -- **What NOT to do** — explicitly state that coder cannot run tests or lints. Don't let them try and fail. -- **Operational constraints** — remind them: no compound Bash commands, no `git` commands, no `uv run`. Use `Edit`/`Write`/`Read`/`Glob`/`Grep` only. -- **Load the `temporal-developer` skill** — remind teammates to invoke it at the start of their task. -- **Dev environment context** — whether the Rust bridge is built, which branch they're on, any known lint pitfalls (e.g., basedpyright strictness). -- **Reference material** — point to existing patterns in the codebase (file paths and line numbers) rather than describing from memory. - -### Workflow -1. **Team-lead** sends task to coder with a thorough prompt (see above). -2. **Coder** explores, writes code, reports back. -3. **Team-lead** runs all lints and tests, reports failures back to coder for fixes. -4. **Team-lead** commits and pushes after user approval. - -### Context management -- Delegate aggressively to preserve your context window. -- Do not duplicate work your teammate is doing (don't read the same files they're exploring). -- When coder reports back, trust their findings — don't re-verify unless something seems off. - -## CI Lint Details - -`basedpyright` is the strictest linter and the most common source of CI failures. It catches things the others miss: -- `reportDeprecated` — flags use of deprecated APIs -- `reportUnusedParameter` — unused function parameters -- `reportMissingSuperCall` — missing `super().__init__()` calls -- `reportUninitializedInstanceVariable` — instance vars not set in `__init__` - -Always run `uv run basedpyright` locally before pushing. If it passes, the other type checkers will almost certainly pass too. - -## Time-Skipping Tests - -CI runs tests twice: `poe test` (normal mode) and `poe test --workflow-environment time-skipping` (non-ARM only). The time-skipping test server has a **known limitation: it does not persist headers**. This means any test that depends on header propagation (e.g., tracing context) will fail in time-skipping mode. The established pattern for handling this is: - -```python -if env.supports_time_skipping: - pytest.skip("Time skipping server doesn't persist headers.") -``` - -See `tests/worker/test_workflow.py:8249` for the existing precedent. - -## Branch Naming - -Temporal convention: prepend `maplexu/` to branch names. diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 789825251..30fc769e7 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -3,9 +3,11 @@ from __future__ import annotations import json +import logging import random import uuid from collections.abc import Iterator, Mapping +from concurrent.futures import Future, ThreadPoolExecutor from contextlib import contextmanager from typing import Any, ClassVar, NoReturn @@ -21,6 +23,11 @@ from temporalio.api.common.v1 import Payload from temporalio.exceptions import ApplicationError, ApplicationErrorCategory +# This logger is only used in _log_future_exception, which runs on the +# executor thread (not the workflow thread). Never log directly from +# workflow interceptor code — the sandbox blocks logging I/O. +logger = logging.getLogger(__name__) + # --------------------------------------------------------------------------- # Constants # --------------------------------------------------------------------------- @@ -73,11 +80,12 @@ def _inject_current_context( def _extract_context( headers: Mapping[str, Payload], + executor: ThreadPoolExecutor, ) -> Any | None: """Extract LangSmith context from Temporal payload headers. - Reconstructs a :class:`RunTree` from the ``_langsmith-context`` header on - the receiving side, wrapped in a :class:`ReplaySafeRunTree` so inbound + Reconstructs a :class:`RunTree` from the ``_temporal-langsmith-context`` header on + the receiving side, wrapped in a :class:`_ReplaySafeRunTree` so inbound interceptors can establish a parent-child relationship with the sender's run. Returns ``None`` if no header is present. """ @@ -86,7 +94,7 @@ def _extract_context( return None ls_headers = _payload_converter.from_payloads([header])[0] run = RunTree.from_headers(ls_headers) - return ReplaySafeRunTree(run) if run else None + return _ReplaySafeRunTree(run, executor=executor) if run else None def _inject_nexus_context( @@ -103,6 +111,7 @@ def _inject_nexus_context( def _extract_nexus_context( headers: dict[str, str], + executor: ThreadPoolExecutor, ) -> Any | None: """Extract LangSmith context from Nexus string headers.""" raw = headers.get(HEADER_KEY) @@ -110,7 +119,7 @@ def _extract_nexus_context( return None ls_headers = json.loads(raw) run = RunTree.from_headers(ls_headers) - return ReplaySafeRunTree(run) if run else None + return _ReplaySafeRunTree(run, executor=executor) if run else None # --------------------------------------------------------------------------- @@ -123,10 +132,15 @@ def _extract_nexus_context( def _patch_aio_to_thread() -> None: """Patch langsmith's ``aio_to_thread`` to run synchronously in workflows. - The ``@traceable`` decorator uses ``aio_to_thread()`` → + The ``@traceable`` decorator on async functions uses ``aio_to_thread()`` → ``loop.run_in_executor()`` for run setup/teardown. The Temporal workflow sandbox blocks ``run_in_executor``. This patch runs those functions synchronously (they are CPU-bound, no I/O) when inside a workflow. + + Also serves as an error gate: if ``_setup_run`` creates a plain ``RunTree`` + (no ``_ReplaySafeRunTree`` or ``_ContextBridgeRunTree`` parent), the + ``post()`` call would block. The patch detects this and raises a clear + error telling the user to configure the LangSmith plugin. """ global _aio_to_thread_patched # noqa: PLW0603 if _aio_to_thread_patched: @@ -148,16 +162,27 @@ async def _safe_aio_to_thread( if not temporalio.workflow.in_workflow(): return await _original(func, *args, __ctx=__ctx, **kwargs) with temporalio.workflow.unsafe.sandbox_unrestricted(): - # During replay, disable tracing so @traceable calls don't - # produce duplicate traces for code that already ran. # Run func directly in the current context (no ctx.run) so # that context var changes (e.g. _PARENT_RUN_TREE set by # @traceable's _setup_run) propagate to the caller. # This is safe because workflows are single-threaded. - if _is_replaying(): - with tracing_context(enabled=False): - return func(*args, **kwargs) - return func(*args, **kwargs) + # + # No replay-time tracing disable — _ReplaySafeRunTree.post() + # and patch() are no-ops during replay, which handles I/O + # suppression. _setup_run must run normally during replay to + # maintain parent-child linkage across the replay boundary. + result = func(*args, **kwargs) + # Error gate: if _setup_run created a plain RunTree (no + # _ReplaySafeRunTree parent found), post() would block on + # compressed_traces.lock. Detect this and raise a clear error. + if isinstance(result, RunTree) and not isinstance( + result, _ReplaySafeRunTree + ): + raise RuntimeError( + "Use the LangSmith plugin to enable @langsmith.traceable " + "in Temporal workflows." + ) + return result _aiter.aio_to_thread = _safe_aio_to_thread # type: ignore[assignment] _aio_to_thread_patched = True @@ -179,11 +204,11 @@ def _is_replaying() -> bool: def _get_workflow_random() -> random.Random | None: """Get a deterministic random generator for the current workflow. - Follows the OTel pattern: creates a workflow-safe random generator once - via ``workflow.new_random()`` and stores it on the workflow instance so - subsequent calls return the same generator. The generator is seeded from - the workflow's deterministic seed, so it produces identical UUIDs across - replays and eviction/restart cycles. + Creates a workflow-safe random generator once via + ``workflow.new_random()`` and stores it on the workflow instance so + subsequent calls return the same generator. The generator is seeded + from the workflow's deterministic seed, so it produces identical UUIDs + across replays and worker restarts. Returns ``None`` outside a workflow, in read-only (query) contexts, or when workflow APIs are mocked (unit tests). @@ -209,11 +234,11 @@ def _uuid_from_random(rng: random.Random) -> uuid.UUID: # --------------------------------------------------------------------------- -# ReplaySafeRunTree wrapper +# _ReplaySafeRunTree wrapper # --------------------------------------------------------------------------- -class ReplaySafeRunTree(RunTree): +class _ReplaySafeRunTree(RunTree): """Wrapper around a :class:`RunTree` with replay-safe ``post``, ``end``, and ``patch``. Inherits from :class:`RunTree` so ``isinstance`` checks pass, but does @@ -221,13 +246,20 @@ class ReplaySafeRunTree(RunTree): RunTree. Attribute access is delegated via ``__getattr__``/``__setattr__``. During replay, ``post()``, ``end()``, and ``patch()`` become no-ops. - Inside a workflow sandbox, these methods are wrapped in - ``sandbox_unrestricted()``. + In workflow context, ``post()`` and ``patch()`` submit to a single-worker + ``ThreadPoolExecutor`` for FIFO ordering, avoiding blocking on the + workflow task thread. """ - def __init__(self, run_tree: RunTree) -> None: # pyright: ignore[reportMissingSuperCall] + def __init__( # pyright: ignore[reportMissingSuperCall] + self, + run_tree: RunTree, + *, + executor: ThreadPoolExecutor, + ) -> None: """Wrap an existing RunTree with replay-safe overrides.""" object.__setattr__(self, "_run", run_tree) + object.__setattr__(self, "_executor", executor) def __getattr__(self, name: str) -> Any: """Delegate attribute access to the wrapped RunTree.""" @@ -241,18 +273,58 @@ def to_headers(self) -> dict[str, Any]: """Delegate to the wrapped RunTree's to_headers.""" return self._run.to_headers() + def _inject_deterministic_ids(self, kwargs: dict[str, Any]) -> None: + """Inject deterministic run_id and start_time in workflow context.""" + if temporalio.workflow.in_workflow(): + if kwargs.get("run_id") is None: + rng = _get_workflow_random() + if rng is not None: + kwargs["run_id"] = _uuid_from_random(rng) + if kwargs.get("start_time") is None: + kwargs["start_time"] = temporalio.workflow.now() + + def create_child(self, *args: Any, **kwargs: Any) -> _ReplaySafeRunTree: + """Create a child run, returning another _ReplaySafeRunTree. + + In workflow context, injects deterministic ``run_id`` and ``start_time`` + unless they are passed in manually via ``kwargs``. + """ + self._inject_deterministic_ids(kwargs) + child_run = self._run.create_child(*args, **kwargs) + return _ReplaySafeRunTree(child_run, executor=self._executor) + + def _submit_or_fallback(self, fn: Any, *args: Any, **kwargs: Any) -> None: + """Submit work to executor, falling back to synchronous after shutdown.""" + + def _log_future_exception(future: Future[None]) -> None: + exc = future.exception() + if exc is not None: + logger.error("LangSmith background I/O error: %s", exc) + + try: + future = self._executor.submit(fn, *args, **kwargs) + future.add_done_callback(_log_future_exception) + except RuntimeError: + # Executor shut down — fall back to synchronous execution + fn(*args, **kwargs) + def post(self, exclude_child_runs: bool = True) -> None: """Post the run to LangSmith, skipping during replay.""" - if _is_replaying(): - return if temporalio.workflow.in_workflow(): + if _is_replaying(): + return with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._run.post(exclude_child_runs=exclude_child_runs) + self._submit_or_fallback( + self._run.post, exclude_child_runs=exclude_child_runs + ) else: self._run.post(exclude_child_runs=exclude_child_runs) def end(self, **kwargs: Any) -> None: - """End the run, skipping during replay.""" + """End the run, skipping during replay. + + No I/O — just sets attributes on self._run. Runs synchronously. + """ if _is_replaying(): return if temporalio.workflow.in_workflow(): @@ -263,15 +335,85 @@ def end(self, **kwargs: Any) -> None: def patch(self, *, exclude_inputs: bool = False) -> None: """Patch the run to LangSmith, skipping during replay.""" - if _is_replaying(): - return if temporalio.workflow.in_workflow(): + if _is_replaying(): + return with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._run.patch(exclude_inputs=exclude_inputs) + self._submit_or_fallback(self._run.patch, exclude_inputs=exclude_inputs) else: self._run.patch(exclude_inputs=exclude_inputs) +class _ContextBridgeRunTree(_ReplaySafeRunTree): + """Lightweight bridge for ``add_temporal_runs=False`` without external context. + + Never posted, patched, or ended — no trace of it exists in LangSmith. + ``create_child()`` creates root ``_ReplaySafeRunTree`` objects (no + ``parent_run_id``) so that ``@traceable`` calls appear as independent + root runs. + """ + + def __init__( # pyright: ignore[reportMissingSuperCall] + self, + *, + ls_client: Any, + executor: ThreadPoolExecutor, + session_name: str | None = None, + replicas: list[Any] | None = None, + ) -> None: + """Create a context bridge with the given LangSmith client.""" + # Create a minimal RunTree for the bridge — it will never be posted + bridge_run = RunTree( + name="__bridge__", + run_type="chain", + ls_client=ls_client, + ) + if session_name is not None: + bridge_run.session_name = session_name + if replicas is not None: + bridge_run.replicas = replicas + object.__setattr__(self, "_run", bridge_run) + object.__setattr__(self, "_executor", executor) + + def post(self, exclude_child_runs: bool = True) -> NoReturn: + """Bridge must never be posted.""" + raise RuntimeError("ContextBridgeRunTree must never be posted") + + def patch(self, *, exclude_inputs: bool = False) -> NoReturn: + """Bridge must never be patched.""" + raise RuntimeError("ContextBridgeRunTree must never be patched") + + def end(self, **kwargs: Any) -> NoReturn: + """Bridge must never be ended.""" + raise RuntimeError("ContextBridgeRunTree must never be ended") + + def create_child(self, *args: Any, **kwargs: Any) -> _ReplaySafeRunTree: + """Create a root _ReplaySafeRunTree (no parent_run_id). + + Creates a fresh ``RunTree(...)`` directly (not via + ``self._run.create_child``) to avoid setting ``parent_run_id``, + ``parent_dotted_order``, and ``trace_id`` to the bridge's values. + Maps ``run_id`` → ``id`` matching LangSmith's ``create_child`` convention. + """ + self._inject_deterministic_ids(kwargs) + + # Map run_id → id (matching RunTree.create_child convention) + if "run_id" in kwargs: + kwargs["id"] = kwargs.pop("run_id") + + # Inherit ls_client and session_name from bridge via constructor. + # session_name is a Pydantic field (alias="project_name") so it + # must be passed at construction to avoid the "default" fallback. + kwargs.setdefault("ls_client", self._run.ls_client) + kwargs.setdefault("session_name", self._run.session_name) + + child_run = RunTree(*args, **kwargs) + # Set replicas post-construction (not a RunTree Pydantic field) + if self._run.replicas is not None: + child_run.replicas = self._run.replicas + return _ReplaySafeRunTree(child_run, executor=self._executor) + + # --------------------------------------------------------------------------- # _maybe_run context manager # --------------------------------------------------------------------------- @@ -297,12 +439,13 @@ def _maybe_run( tags: list[str] | None = None, parent: Any | None = None, project_name: str | None = None, + executor: ThreadPoolExecutor, ) -> Iterator[Any | None]: """Create a LangSmith run, handling errors. - If add_temporal_runs is False, yields None (no run created). Context propagation is handled unconditionally by callers. - - When a run IS created, uses :class:`ReplaySafeRunTree` for + - When a run IS created, uses :class:`_ReplaySafeRunTree` for replay and sandbox safety, then sets it as ambient context via ``tracing_context(parent=run_tree)`` so ``get_current_run_tree()`` returns it and ``_inject_current_context()`` can inject it. @@ -318,6 +461,7 @@ def _maybe_run( tags: Tags to attach to the run. parent: Parent run for nesting. project_name: LangSmith project name override. + executor: ThreadPoolExecutor for background I/O. """ if not add_temporal_runs: yield None @@ -333,10 +477,14 @@ def _maybe_run( inputs=inputs or {}, ls_client=client, ) - # Deterministic IDs and start times in workflow context so that runs - # survive eviction/replay with max_cached_workflows=0. Uses a - # workflow-bound random generator (following the OTel pattern) to - # produce identical UUIDs across replays and worker restarts. + # Deterministic IDs and start times in workflow context so that + # replayed workflows produce identical LangSmith runs instead of + # duplicates. In production, a workflow can be evicted from the + # worker cache and later replayed on a different worker — without + # deterministic IDs the replayed execution would create a second + # run for the same logical operation. Uses a workflow-bound random + # generator seeded from the workflow's deterministic seed, so UUIDs + # are identical across replays. rng = _get_workflow_random() if rng is not None: kwargs["id"] = _uuid_from_random(rng) @@ -351,15 +499,15 @@ def _maybe_run( if project_name is not None: kwargs["project_name"] = project_name if parent is not None: - # Unwrap ReplaySafeRunTree so RunTree gets the real parent + # Unwrap _ReplaySafeRunTree so RunTree gets the real parent kwargs["parent_run"] = ( - parent._run if isinstance(parent, ReplaySafeRunTree) else parent + parent._run if isinstance(parent, _ReplaySafeRunTree) else parent ) if metadata: kwargs["extra"] = {"metadata": metadata} if tags: kwargs["tags"] = tags - run_tree = ReplaySafeRunTree(RunTree(**kwargs)) + run_tree = _ReplaySafeRunTree(RunTree(**kwargs), executor=executor) run_tree.post() try: with tracing_context(parent=run_tree, client=client): @@ -407,6 +555,7 @@ def __init__( self._add_temporal_runs = add_temporal_runs self._default_metadata = default_metadata or {} self._default_tags = default_tags or [] + self._executor = ThreadPoolExecutor(max_workers=1) @contextmanager def maybe_run( @@ -427,6 +576,7 @@ def maybe_run( metadata=metadata, tags=list(self._default_tags), parent=parent, + executor=self._executor, project_name=self._project_name, ) as run: yield run @@ -534,7 +684,7 @@ def __init__( self._config = config async def execute_activity(self, input: Any) -> Any: - parent = _extract_context(input.headers) + parent = _extract_context(input.headers, self._config._executor) info = temporalio.activity.info() extra_metadata = { "temporalWorkflowID": info.workflow_id or "", @@ -554,6 +704,8 @@ async def execute_activity(self, input: Any) -> Any: "client": self._config._client, "enabled": True, } + if self._config._project_name: + ctx_kwargs["project_name"] = self._config._project_name if parent: ctx_kwargs["parent"] = parent with tracing_context(**ctx_kwargs): @@ -608,7 +760,7 @@ def _workflow_maybe_run( uninstrumented client operations (e.g. query polling) while still allowing handler traces when invoked with propagated context. """ - parent = _extract_context(headers) if headers else None + parent = _extract_context(headers, self._config._executor) if headers else None if parent is not None: parent.ls_client = self._config._client # Handler from an uninstrumented client during workflow execution: @@ -630,7 +782,19 @@ def _workflow_maybe_run( "client": self._config._client, "enabled": True, } - if parent: + # When add_temporal_runs=False and no external parent, create a + # _ContextBridgeRunTree so @traceable calls get a _ReplaySafeRunTree + # parent via create_child. The bridge is invisible in LangSmith. + bridge: _ContextBridgeRunTree | None = None + if not self._config._add_temporal_runs and parent is None: + bridge = _ContextBridgeRunTree( + ls_client=self._config._client, + executor=self._config._executor, + session_name=self._config._project_name, + ) + ctx_kwargs["parent"] = bridge + ctx_kwargs["project_name"] = self._config._project_name + elif parent: ctx_kwargs["parent"] = parent with tracing_context(**ctx_kwargs): with self._config.maybe_run( @@ -766,7 +930,7 @@ def __init__( self._config = config async def execute_nexus_operation_start(self, input: Any) -> Any: - parent = _extract_nexus_context(input.ctx.headers) + parent = _extract_nexus_context(input.ctx.headers, self._config._executor) with self._config.maybe_run( f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", run_type="tool", @@ -775,7 +939,7 @@ async def execute_nexus_operation_start(self, input: Any) -> Any: return await self.next.execute_nexus_operation_start(input) async def execute_nexus_operation_cancel(self, input: Any) -> Any: - parent = _extract_nexus_context(input.ctx.headers) + parent = _extract_nexus_context(input.ctx.headers, self._config._executor) with self._config.maybe_run( f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", run_type="tool", diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py index 4fac4d782..ea176436c 100644 --- a/temporalio/contrib/langsmith/_plugin.py +++ b/temporalio/contrib/langsmith/_plugin.py @@ -3,6 +3,8 @@ from __future__ import annotations import dataclasses +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager from typing import Any from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor @@ -59,19 +61,18 @@ def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: ) return runner + @asynccontextmanager + async def run_context() -> AsyncIterator[None]: + try: + yield + finally: + interceptor._executor.shutdown(wait=True) + if interceptor._client is not None: + interceptor._client.flush() + super().__init__( "langchain.LangSmithPlugin", interceptors=interceptors, workflow_runner=workflow_runner, + run_context=run_context, ) - - async def shutdown(self) -> None: - """Flush the LangSmith client to drain pending runs.""" - if not self.interceptors: - return - interceptor = self.interceptors[0] - if ( - isinstance(interceptor, LangSmithInterceptor) - and interceptor._client is not None - ): - interceptor._client.flush() diff --git a/tests/contrib/langsmith/conftest.py b/tests/contrib/langsmith/conftest.py index 92c6a3db5..4ceb0dda3 100644 --- a/tests/contrib/langsmith/conftest.py +++ b/tests/contrib/langsmith/conftest.py @@ -76,15 +76,16 @@ def _walk(parent_id: str | None, depth: int) -> None: result.append(" " * depth + child.name) _walk(child.id, depth + 1) - # Roots: runs whose parent_run_id is None or not in our set + # Strict: reject dangling parent references known_ids = {r.id for r in runs} - root_parents = { - r.parent_run_id - for r in runs - if r.parent_run_id is None or r.parent_run_id not in known_ids - } - for rp in sorted(root_parents, key=lambda x: (x is not None, x)): - _walk(rp, 0) + for r in runs: + if r.parent_run_id is not None and r.parent_run_id not in known_ids: + raise AssertionError( + f"Run {r.name!r} (id={r.id}) has parent_run_id={r.parent_run_id} " + f"which is not in the collected runs — dangling parent reference" + ) + # Only walk true roots (parent_run_id is None) + _walk(None, 0) return result diff --git a/tests/contrib/langsmith/test_background_io.py b/tests/contrib/langsmith/test_background_io.py new file mode 100644 index 000000000..75027ce1f --- /dev/null +++ b/tests/contrib/langsmith/test_background_io.py @@ -0,0 +1,705 @@ +"""Unit tests for _ReplaySafeRunTree and _ContextBridgeRunTree. + +Covers create_child propagation, executor-backed post/patch, +replay suppression, post-shutdown fallback, and aio_to_thread error gate. +""" + +from __future__ import annotations + +import logging +import threading +import uuid +from concurrent.futures import ThreadPoolExecutor +from datetime import datetime, timezone +from typing import Any +from unittest.mock import MagicMock, patch + +import pytest +from langsmith.run_trees import RunTree + +from temporalio.contrib.langsmith._interceptor import ( + _ReplaySafeRunTree, + _uuid_from_random, +) + +# Common patch targets +_MOD = "temporalio.contrib.langsmith._interceptor" +_PATCH_IN_WORKFLOW = f"{_MOD}.temporalio.workflow.in_workflow" +_PATCH_IS_REPLAYING = f"{_MOD}.temporalio.workflow.unsafe.is_replaying_history_events" +_PATCH_WF_NOW = f"{_MOD}.temporalio.workflow.now" +_PATCH_GET_WF_RANDOM = f"{_MOD}._get_workflow_random" + + +def _make_executor() -> ThreadPoolExecutor: + """Create a single-worker executor for tests.""" + return ThreadPoolExecutor(max_workers=1) + + +def _make_mock_run(**kwargs: Any) -> MagicMock: + """Create a mock RunTree.""" + mock = MagicMock(spec=RunTree) + mock.to_headers.return_value = {"langsmith-trace": "test"} + mock.ls_client = kwargs.get("ls_client", MagicMock()) + mock.session_name = kwargs.get("session_name", "test-session") + mock.replicas = kwargs.get("replicas", []) + mock.id = kwargs.get("id", uuid.uuid4()) + mock.name = kwargs.get("name", "test-run") + # create_child returns another mock RunTree by default + child_mock = MagicMock(spec=RunTree) + child_mock.id = uuid.uuid4() + child_mock.ls_client = mock.ls_client + child_mock.session_name = mock.session_name + child_mock.replicas = mock.replicas + mock.create_child.return_value = child_mock + return mock + + +# =================================================================== +# TestCreateChildPropagation +# =================================================================== + + +class TestCreateChildPropagation: + """Tests for _ReplaySafeRunTree.create_child() override.""" + + def test_create_child_returns_replay_safe_run_tree(self) -> None: + """create_child() must return a _ReplaySafeRunTree wrapping the child.""" + executor = _make_executor() + mock_run = _make_mock_run() + parent = _ReplaySafeRunTree(mock_run, executor=executor) + + child = parent.create_child(name="child-op", run_type="chain") + + assert isinstance(child, _ReplaySafeRunTree) + # The wrapped child should be the result of the inner run's create_child + mock_run.create_child.assert_called_once() + + @patch(_PATCH_GET_WF_RANDOM) + @patch(_PATCH_WF_NOW) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_create_child_injects_deterministic_ids_in_workflow( + self, + _mock_in_wf: Any, + mock_now: Any, + mock_get_random: Any, + ) -> None: + """In workflow context, create_child injects deterministic run_id and start_time.""" + import random as stdlib_random + + rng = stdlib_random.Random(42) + mock_get_random.return_value = rng + fake_now = datetime(2025, 1, 1, tzinfo=timezone.utc) + mock_now.return_value = fake_now + + expected_id = _uuid_from_random(stdlib_random.Random(42)) # same seed + + executor = _make_executor() + mock_run = _make_mock_run() + parent = _ReplaySafeRunTree(mock_run, executor=executor) + + # Simulate what _setup_run does: passes run_id=None explicitly + child = parent.create_child(name="child-op", run_type="chain", run_id=None) + + assert isinstance(child, _ReplaySafeRunTree) + # Verify the kwargs passed to inner create_child had deterministic values + call_kwargs = mock_run.create_child.call_args.kwargs + assert call_kwargs["run_id"] == expected_id + assert call_kwargs["start_time"] == fake_now + + def test_create_child_passes_through_kwargs(self) -> None: + """create_child passes through all kwargs to the inner run's create_child.""" + executor = _make_executor() + mock_run = _make_mock_run() + parent = _ReplaySafeRunTree(mock_run, executor=executor) + + child = parent.create_child( + name="child-op", + run_type="llm", + inputs={"prompt": "hello"}, + tags=["test"], + extra_kwarg="future-proof", + ) + + assert isinstance(child, _ReplaySafeRunTree) + call_kwargs = mock_run.create_child.call_args.kwargs + assert call_kwargs["name"] == "child-op" + assert call_kwargs["run_type"] == "llm" + assert call_kwargs["inputs"] == {"prompt": "hello"} + assert call_kwargs["tags"] == ["test"] + assert call_kwargs["extra_kwarg"] == "future-proof" + + def test_create_child_propagates_executor_to_child(self) -> None: + """The child _ReplaySafeRunTree must receive the same executor reference.""" + executor = _make_executor() + mock_run = _make_mock_run() + parent = _ReplaySafeRunTree(mock_run, executor=executor) + + child = parent.create_child(name="child-op", run_type="chain") + + assert isinstance(child, _ReplaySafeRunTree) + # Child should have the same executor + assert child._executor is executor + + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_create_child_no_deterministic_ids_outside_workflow( + self, _mock_in_wf: Any + ) -> None: + """Outside workflow context, create_child does NOT inject deterministic IDs.""" + executor = _make_executor() + mock_run = _make_mock_run() + parent = _ReplaySafeRunTree(mock_run, executor=executor) + + child = parent.create_child(name="child-op", run_type="chain", run_id=None) + + assert isinstance(child, _ReplaySafeRunTree) + # run_id should remain None (not overridden) + call_kwargs = mock_run.create_child.call_args.kwargs + assert call_kwargs.get("run_id") is None + + @patch(_PATCH_GET_WF_RANDOM) + @patch(_PATCH_WF_NOW) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_create_child_respects_explicit_run_id( + self, + _mock_in_wf: Any, + mock_now: Any, + mock_get_random: Any, + ) -> None: + """If run_id is explicitly provided (not None), create_child preserves it.""" + import random as stdlib_random + + mock_get_random.return_value = stdlib_random.Random(42) + mock_now.return_value = datetime(2025, 1, 1, tzinfo=timezone.utc) + + executor = _make_executor() + mock_run = _make_mock_run() + parent = _ReplaySafeRunTree(mock_run, executor=executor) + + explicit_id = uuid.uuid4() + child = parent.create_child( + name="child-op", run_type="chain", run_id=explicit_id + ) + + assert isinstance(child, _ReplaySafeRunTree) + call_kwargs = mock_run.create_child.call_args.kwargs + assert call_kwargs["run_id"] == explicit_id + + +# =================================================================== +# TestExecutorBackedPostPatch +# =================================================================== + + +class TestExecutorBackedPostPatch: + """Tests for executor-backed post()/patch() in _ReplaySafeRunTree.""" + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_post_submits_to_executor_in_workflow( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """In workflow context, post() submits to executor, not inline.""" + executor = _make_executor() + mock_run = _make_mock_run() + calling_thread = threading.current_thread() + post_thread: list[threading.Thread] = [] + + def record_thread(*args: Any, **kwargs: Any) -> None: + post_thread.append(threading.current_thread()) + + mock_run.post.side_effect = record_thread + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.post() + + # Wait for executor to finish + executor.shutdown(wait=True) + + # post should have been called on the inner run via executor + mock_run.post.assert_called_once() + # Verify it ran on the executor thread, not the calling thread + assert len(post_thread) == 1 + assert post_thread[0] is not calling_thread + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_patch_submits_to_executor_in_workflow( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """In workflow context, patch() submits to executor, not inline.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.patch() + + executor.shutdown(wait=True) + mock_run.patch.assert_called_once() + + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_post_delegates_directly_outside_workflow(self, _mock_in_wf: Any) -> None: + """Outside workflow, post() delegates directly to the inner run.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.post() + + mock_run.post.assert_called_once() + + @patch(_PATCH_IN_WORKFLOW, return_value=False) + def test_patch_delegates_directly_outside_workflow(self, _mock_in_wf: Any) -> None: + """Outside workflow, patch() delegates directly to the inner run.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.patch(exclude_inputs=True) + + mock_run.patch.assert_called_once_with(exclude_inputs=True) + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_post_error_logged_via_done_callback( + self, + _mock_in_wf: Any, + _mock_replaying: Any, + caplog: pytest.LogCaptureFixture, + ) -> None: + """Errors from fire-and-forget post() are logged via Future.add_done_callback.""" + executor = _make_executor() + mock_run = _make_mock_run() + mock_run.post.side_effect = RuntimeError("LangSmith API error") + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + with caplog.at_level(logging.ERROR): + tree.post() + executor.shutdown(wait=True) + + # The error should have been logged + assert any("LangSmith API error" in record.message for record in caplog.records) + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_patch_error_logged_via_done_callback( + self, + _mock_in_wf: Any, + _mock_replaying: Any, + caplog: pytest.LogCaptureFixture, + ) -> None: + """Errors from fire-and-forget patch() are logged via Future.add_done_callback.""" + executor = _make_executor() + mock_run = _make_mock_run() + mock_run.patch.side_effect = RuntimeError("LangSmith patch error") + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + with caplog.at_level(logging.ERROR): + tree.patch() + executor.shutdown(wait=True) + + # The error should have been logged + assert any( + "LangSmith patch error" in record.message for record in caplog.records + ) + + +# =================================================================== +# TestReplaySuppression +# =================================================================== + + +class TestReplaySuppression: + """Tests for _is_replaying() check before executor submission.""" + + @patch(_PATCH_IS_REPLAYING, return_value=True) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_post_noop_during_replay( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """post() is a no-op during replay — no executor submission.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.post() + + executor.shutdown(wait=True) + mock_run.post.assert_not_called() + + @patch(_PATCH_IS_REPLAYING, return_value=True) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_patch_noop_during_replay( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """patch() is a no-op during replay.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.patch() + + executor.shutdown(wait=True) + mock_run.patch.assert_not_called() + + @patch(_PATCH_IS_REPLAYING, return_value=True) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_end_noop_during_replay( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """end() is a no-op during replay.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.end(outputs={"result": "done"}) + + mock_run.end.assert_not_called() + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_end_delegates_during_normal_execution( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """end() delegates to self._run.end() during normal (non-replay) execution.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.end(outputs={"result": "done"}, error="some error") + + mock_run.end.assert_called_once_with( + outputs={"result": "done"}, error="some error" + ) + + +# =================================================================== +# TestContextBridgeRunTree +# =================================================================== + + +class TestContextBridgeRunTree: + """Tests for _ContextBridgeRunTree subclass.""" + + def _make_bridge(self, **kwargs: Any) -> Any: + """Create a _ContextBridgeRunTree for testing.""" + from temporalio.contrib.langsmith._interceptor import _ContextBridgeRunTree + + executor = kwargs.pop("executor", _make_executor()) + mock_client = kwargs.pop("ls_client", MagicMock()) + return _ContextBridgeRunTree( + ls_client=mock_client, + executor=executor, + **kwargs, + ) + + def test_post_raises_runtime_error(self) -> None: + """Bridge's post() raises RuntimeError — bridge must never be posted.""" + bridge = self._make_bridge() + with pytest.raises(RuntimeError, match="must never be posted"): + bridge.post() + + def test_patch_raises_runtime_error(self) -> None: + """Bridge's patch() raises RuntimeError — bridge must never be patched.""" + bridge = self._make_bridge() + with pytest.raises(RuntimeError, match="must never be patched"): + bridge.patch() + + def test_end_raises_runtime_error(self) -> None: + """Bridge's end() raises RuntimeError — bridge must never be ended.""" + bridge = self._make_bridge() + with pytest.raises(RuntimeError, match="must never be ended"): + bridge.end(outputs={"status": "ok"}) + + @patch(_PATCH_GET_WF_RANDOM) + @patch(_PATCH_WF_NOW) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_create_child_returns_root_replay_safe_run_tree( + self, + _mock_in_wf: Any, + mock_now: Any, + mock_get_random: Any, + ) -> None: + """Bridge's create_child creates a root _ReplaySafeRunTree (no parent_run_id).""" + import random as stdlib_random + + mock_get_random.return_value = stdlib_random.Random(42) + mock_now.return_value = datetime(2025, 1, 1, tzinfo=timezone.utc) + + executor = _make_executor() + mock_client = MagicMock() + bridge = self._make_bridge(ls_client=mock_client, executor=executor) + + child = bridge.create_child(name="traceable-fn", run_type="chain") + + assert isinstance(child, _ReplaySafeRunTree) + # Child should be a root run — no parent_run_id + assert child._run.parent_run_id is None + + def test_create_child_inherits_client_session_and_replicas(self) -> None: + """Bridge's children inherit ls_client, session_name, replicas.""" + executor = _make_executor() + mock_client = MagicMock() + mock_replicas = [MagicMock(), MagicMock()] + bridge = self._make_bridge( + ls_client=mock_client, + executor=executor, + session_name="my-project", + replicas=mock_replicas, + ) + + with patch(_PATCH_IN_WORKFLOW, return_value=False): + child = bridge.create_child(name="traceable-fn", run_type="chain") + + assert isinstance(child, _ReplaySafeRunTree) + # Child should have the bridge's ls_client, session_name, and replicas + assert child.ls_client is mock_client + assert child.session_name == "my-project" + assert child.replicas is mock_replicas + + def test_create_child_propagates_executor(self) -> None: + """Bridge propagates executor to children.""" + executor = _make_executor() + bridge = self._make_bridge(executor=executor) + + with patch(_PATCH_IN_WORKFLOW, return_value=False): + child = bridge.create_child(name="traceable-fn", run_type="chain") + + assert isinstance(child, _ReplaySafeRunTree) + assert child._executor is executor + + def test_create_child_maps_run_id_to_id(self) -> None: + """Bridge's create_child maps run_id kwarg to id on the resulting RunTree. + + The run_id kwarg is mapped to id, matching LangSmith's + RunTree.create_child convention (run_trees.py:545). + """ + executor = _make_executor() + bridge = self._make_bridge(executor=executor) + explicit_id = uuid.uuid4() + + with patch(_PATCH_IN_WORKFLOW, return_value=False): + child = bridge.create_child( + name="traceable-fn", run_type="chain", run_id=explicit_id + ) + + assert isinstance(child, _ReplaySafeRunTree) + # The underlying RunTree should have id set to the passed run_id + assert child._run.id == explicit_id + + def test_bridge_not_in_collected_runs(self) -> None: + """Bridge's post/patch/end raise RuntimeError — bridge is never traced.""" + bridge = self._make_bridge() + + with pytest.raises(RuntimeError): + bridge.post() + with pytest.raises(RuntimeError): + bridge.patch() + with pytest.raises(RuntimeError): + bridge.end() + + +# =================================================================== +# TestPostTimingDelayedExecution +# =================================================================== + + +class TestPostTimingDelayedExecution: + """Tests for post() timing when executor is busy. + + When post() is delayed (executor busy), create_run includes finalized data + (outputs/end_time), and the subsequent update_run from patch() is idempotent. + """ + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_post_patch_fifo_ordering( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """post() always completes before patch() starts (FIFO via single-worker executor).""" + executor = _make_executor() + mock_run = _make_mock_run() + call_order: list[str] = [] + + def record_post(*args: Any, **kwargs: Any) -> None: + call_order.append("post") + + def record_patch(*args: Any, **kwargs: Any) -> None: + call_order.append("patch") + + mock_run.post.side_effect = record_post + mock_run.patch.side_effect = record_patch + + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.post() + tree.patch() + + executor.shutdown(wait=True) + + assert call_order == ["post", "patch"] + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_delayed_post_reads_finalized_fields( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """When post() is delayed, create_run sees finalized outputs/end_time. + + Simulates: block executor → submit post() (queued) → call end() on + "workflow thread" to set outputs/end_time → release blocker → verify + post() saw the finalized fields via _get_dicts_safe(). + """ + executor = _make_executor() + mock_run = _make_mock_run() + + # Barrier to block executor so post() is delayed + blocker = threading.Event() + post_saw_outputs: list[Any] = [] + post_saw_end_time: list[Any] = [] + + # Block the executor with a dummy task + def blocking_task() -> None: + blocker.wait(timeout=5.0) + + executor.submit(blocking_task) + + # Record what fields post() sees when it finally runs + def capturing_post(*args: Any, **kwargs: Any) -> None: + post_saw_outputs.append(getattr(mock_run, "outputs", None)) + post_saw_end_time.append(getattr(mock_run, "end_time", None)) + + mock_run.post.side_effect = capturing_post + + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + # Submit post() — it's queued behind the blocker + tree.post() + + # Simulate end() on the "workflow thread" while post() is still queued + finalized_outputs = {"result": "done"} + finalized_end_time = datetime(2025, 6, 1, tzinfo=timezone.utc) + mock_run.outputs = finalized_outputs + mock_run.end_time = finalized_end_time + + # Release the blocker — post() now runs and reads finalized fields + blocker.set() + executor.shutdown(wait=True) + + # post() should have seen the finalized outputs and end_time + assert len(post_saw_outputs) == 1 + assert post_saw_outputs[0] == finalized_outputs + assert len(post_saw_end_time) == 1 + assert post_saw_end_time[0] == finalized_end_time + + +# =================================================================== +# TestPostShutdownFallback +# =================================================================== + + +class TestPostShutdownFallback: + """Tests for post-shutdown executor fallback to synchronous execution.""" + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_post_falls_back_to_sync_after_shutdown( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """After executor.shutdown(), post() falls back to synchronous execution.""" + executor = _make_executor() + executor.shutdown(wait=True) + + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + # Should not raise RuntimeError, should fall back to sync + tree.post() + + mock_run.post.assert_called_once() + + @patch(_PATCH_IS_REPLAYING, return_value=False) + @patch(_PATCH_IN_WORKFLOW, return_value=True) + def test_patch_falls_back_to_sync_after_shutdown( + self, _mock_in_wf: Any, _mock_replaying: Any + ) -> None: + """After executor.shutdown(), patch() falls back to synchronous execution.""" + executor = _make_executor() + executor.shutdown(wait=True) + + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + + tree.patch() + + mock_run.patch.assert_called_once() + + +# =================================================================== +# TestAioToThreadErrorGate +# =================================================================== + + +class TestAioToThreadErrorGate: + """Tests for aio_to_thread error gate when plugin is not configured.""" + + @pytest.mark.asyncio + @patch(_PATCH_IN_WORKFLOW, return_value=True) + @patch(_PATCH_IS_REPLAYING, return_value=False) + async def test_error_gate_raises_without_plugin( + self, _mock_replaying: Any, _mock_in_wf: Any + ) -> None: + """Async @traceable in workflow without plugin raises a clear error. + + When _setup_run creates a plain RunTree (no _ReplaySafeRunTree parent), + the aio_to_thread patch should detect this and raise an error telling + the user to configure the LangSmith plugin. + """ + import langsmith._internal._aiter as _aiter + + import temporalio.contrib.langsmith._interceptor as _mod + from temporalio.contrib.langsmith._interceptor import _patch_aio_to_thread + + # Save original state and restore after test to avoid global mutation + original_aio_to_thread = _aiter.aio_to_thread + original_patched_flag = _mod._aio_to_thread_patched + try: + # Reset the flag so _patch_aio_to_thread applies fresh + _mod._aio_to_thread_patched = False + _patch_aio_to_thread() + + # The patched aio_to_thread should raise when a plain RunTree is + # created (no _ReplaySafeRunTree or _ContextBridgeRunTree parent). + # This simulates _setup_run creating a root RunTree. + def _mock_setup_run(*args: Any, **kwargs: Any) -> RunTree: + return RunTree(name="test", run_type="chain") + + with pytest.raises(RuntimeError, match="Use the LangSmith plugin"): + # The error gate should fire when _setup_run returns a plain RunTree + # This test validates the error gate exists and fires + await _aiter.aio_to_thread(_mock_setup_run) + finally: + _aiter.aio_to_thread = original_aio_to_thread # type: ignore[assignment] + _mod._aio_to_thread_patched = original_patched_flag + + +# =================================================================== +# Test_ReplaySafeRunTreeConstructor +# =================================================================== + + +class Test_ReplaySafeRunTreeConstructor: + """Tests for _ReplaySafeRunTree accepting executor parameter.""" + + def test_constructor_requires_executor(self) -> None: + """_ReplaySafeRunTree requires an executor parameter.""" + mock_run = _make_mock_run() + executor = _make_executor() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + assert tree._executor is executor + + def test_constructor_stores_executor(self) -> None: + """The executor is stored and accessible.""" + executor = _make_executor() + mock_run = _make_mock_run() + tree = _ReplaySafeRunTree(mock_run, executor=executor) + assert tree._executor is executor diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 936627b5b..e907c1c9c 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -742,3 +742,203 @@ async def user_pipeline() -> str: assert ( hierarchy == expected ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + +# --------------------------------------------------------------------------- +# TestBackgroundIOIntegration — _ContextBridgeRunTree + sync @traceable +# --------------------------------------------------------------------------- + + +@traceable(name="sync_inner_llm_call") +def _sync_inner_llm_call(prompt: str) -> str: + """Sync @traceable simulating an LLM call.""" + return f"sync-response to: {prompt}" + + +@traceable(name="sync_outer_chain") +def _sync_outer_chain(prompt: str) -> str: + """Sync @traceable that calls another sync @traceable.""" + return _sync_inner_llm_call(prompt) + + +@traceable(name="async_calls_sync") +async def _async_calls_sync(prompt: str) -> str: + """Async @traceable that calls a sync @traceable — the interesting mixed case.""" + return _sync_inner_llm_call(prompt) + + +@workflow.defn +class BridgeTraceableWorkflow: + """Workflow exercising _ContextBridgeRunTree with async, sync, and mixed @traceable. + + Covers three code paths through create_child: + - async→async nesting + - sync→sync nesting (sync @traceable entry to bridge) + - async→sync nesting (cross-boundary case) + """ + + @workflow.run + async def run(self) -> str: + r1 = await _outer_chain("async") + r2 = _sync_outer_chain("sync") + r3 = await _async_calls_sync("mixed") + # Activity with nested @traceable + await workflow.execute_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + return f"{r1}|{r2}|{r3}" + + +class TestBackgroundIOIntegration: + """Integration tests for workflows using add_temporal_runs=False without external context. + + Exercises the _ContextBridgeRunTree path with sync, async, and mixed @traceable + nesting. Verifies root-run creation, correct nesting hierarchy, and replay safety. + """ + + async def test_bridge_traceable_no_external_context( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """Exercises _ContextBridgeRunTree: add_temporal_runs=False, no external context. + + Uses a workflow with async→async, sync→sync, and async→sync @traceable + nesting, plus an activity with nested @traceable. Verifies: + - Each top-level @traceable becomes a root run (bridge creates root children) + - Nested @traceable calls nest correctly under their parent + - Activity @traceable also produces correct hierarchy + - No phantom bridge run appears in collected runs + - No duplicate run IDs after replay (max_cached_workflows=0) + """ + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=False + ) + + async with new_worker( + temporal_client, + BridgeTraceableWorkflow, + activities=[nested_traceable_activity], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + BridgeTraceableWorkflow.run, + id=f"bridge-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + result = await handle.result() + + assert "|" in result + + hierarchy = dump_runs(collector) + expected = [ + "outer_chain", + " inner_llm_call", + "sync_outer_chain", + " sync_inner_llm_call", + "async_calls_sync", + " sync_inner_llm_call", + # Activity @traceable: no LangSmith context propagated (bridge is + # workflow-internal only), so activity traces are independent roots. + "outer_chain", + " inner_llm_call", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + # Verify no duplicate run IDs (replay safety with max_cached_workflows=0) + run_ids = [r.id for r in collector.runs] + assert len(run_ids) == len( + set(run_ids) + ), f"Duplicate run IDs found (replay issue): {run_ids}" + + async def test_bridge_passes_project_name_to_children( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """Bridge children inherit project_name (session_name) from plugin config.""" + temporal_client, collector, mock_ls_client = _make_client_and_collector( + client, add_temporal_runs=False, project_name="my-ls-project" + ) + + async with new_worker( + temporal_client, + BridgeTraceableWorkflow, + activities=[nested_traceable_activity], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + BridgeTraceableWorkflow.run, + id=f"bridge-proj-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + await handle.result() + + # Verify create_run calls include session_name from project_name + for call in mock_ls_client.create_run.call_args_list: + session = call.kwargs.get("session_name") + assert session == "my-ls-project", ( + f"Expected session_name='my-ls-project', got {session!r} " + f"in create_run call: {call.kwargs.get('name')}" + ) + + async def test_mixed_sync_async_traceable_with_temporal_runs( + self, + client: Client, + env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] + ) -> None: + """Exercises _ReplaySafeRunTree.create_child with mixed sync/async @traceable. + + With add_temporal_runs=True, the interceptor creates a real + _ReplaySafeRunTree as parent. This test verifies create_child + propagation works at every level regardless of sync/async, with + correct parent-child hierarchy and no duplicate run IDs after replay. + """ + temporal_client, collector, _ = _make_client_and_collector( + client, add_temporal_runs=True + ) + + async with new_worker( + temporal_client, + BridgeTraceableWorkflow, + activities=[nested_traceable_activity], + max_cached_workflows=0, + ) as worker: + handle = await temporal_client.start_workflow( + BridgeTraceableWorkflow.run, + id=f"mixed-temporal-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + result = await handle.result() + + assert "|" in result + + hierarchy = dump_runs(collector) + # With add_temporal_runs=True, Temporal operations get their own runs. + # @traceable calls nest under the RunWorkflow run. + expected = [ + "StartWorkflow:BridgeTraceableWorkflow", + " RunWorkflow:BridgeTraceableWorkflow", + " outer_chain", + " inner_llm_call", + " sync_outer_chain", + " sync_inner_llm_call", + " async_calls_sync", + " sync_inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " outer_chain", + " inner_llm_call", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + # Verify no duplicate run IDs (replay safety with max_cached_workflows=0) + run_ids = [r.id for r in collector.runs] + assert len(run_ids) == len( + set(run_ids) + ), f"Duplicate run IDs found (replay issue): {run_ids}" diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index 17050bfe5..1b75099b7 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from concurrent.futures import ThreadPoolExecutor from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -12,10 +13,10 @@ from temporalio.contrib.langsmith import LangSmithInterceptor from temporalio.contrib.langsmith._interceptor import ( HEADER_KEY, - ReplaySafeRunTree, _extract_context, _inject_context, _maybe_run, + _ReplaySafeRunTree, ) # --------------------------------------------------------------------------- @@ -61,6 +62,11 @@ def _mock_activity_info(**overrides: Any) -> MagicMock: return info +def _make_executor() -> ThreadPoolExecutor: + """Create a single-worker executor for tests.""" + return ThreadPoolExecutor(max_workers=1) + + def _get_runtree_name(MockRunTree: MagicMock) -> str: """Extract the 'name' kwarg from RunTree constructor call.""" MockRunTree.assert_called_once() @@ -107,16 +113,16 @@ def test_inject_extract_roundtrip(self, MockRunTree: Any) -> None: mock_extracted = MagicMock() MockRunTree.from_headers.return_value = mock_extracted - extracted = _extract_context(result) - # extracted should be a ReplaySafeRunTree wrapping the reconstructed run - assert isinstance(extracted, ReplaySafeRunTree) + extracted = _extract_context(result, _make_executor()) + # extracted should be a _ReplaySafeRunTree wrapping the reconstructed run + assert isinstance(extracted, _ReplaySafeRunTree) assert extracted._run is mock_extracted MockRunTree.from_headers.assert_called_once() def test_extract_missing_header(self) -> None: """When the _temporal-langsmith-context header is absent, returns None.""" headers: dict[str, Payload] = {} - result = _extract_context(headers) + result = _extract_context(headers, _make_executor()) assert result is None def test_inject_preserves_existing_headers(self) -> None: @@ -155,10 +161,11 @@ def test_replay_noop_post_end_patch( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: - assert isinstance(run, ReplaySafeRunTree) + assert isinstance(run, _ReplaySafeRunTree) assert run._run is mock_run - # RunTree IS created (wrapped in ReplaySafeRunTree) + # RunTree IS created (wrapped in _ReplaySafeRunTree) MockRunTree.assert_called_once() # But post/end/patch are no-ops during replay mock_run.post.assert_not_called() @@ -171,7 +178,7 @@ def test_replay_noop_post_end_patch( def test_create_trace_when_not_replaying( self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any ) -> None: - """When not replaying (but in workflow), _maybe_run creates a ReplaySafeRunTree.""" + """When not replaying (but in workflow), _maybe_run creates a _ReplaySafeRunTree.""" mock_run = _make_mock_run() MockRunTree.return_value = mock_run mock_client = MagicMock() @@ -179,8 +186,9 @@ def test_create_trace_when_not_replaying( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: - assert isinstance(run, ReplaySafeRunTree) + assert isinstance(run, _ReplaySafeRunTree) assert run._run is mock_run MockRunTree.assert_called_once() assert MockRunTree.call_args.kwargs["name"] == "TestRun" @@ -198,8 +206,9 @@ def test_create_trace_outside_workflow( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: - assert isinstance(run, ReplaySafeRunTree) + assert isinstance(run, _ReplaySafeRunTree) assert run._run is mock_run MockRunTree.assert_called_once() @@ -226,6 +235,7 @@ def test_exception_marks_run_errored( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: assert run is not None assert run._run is mock_run @@ -252,6 +262,7 @@ def test_benign_application_error_not_marked( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: assert run is not None assert run._run is mock_run @@ -280,6 +291,7 @@ def test_non_benign_application_error_marked( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: assert run is not None assert run._run is mock_run @@ -302,6 +314,7 @@ def test_success_completes_normally( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: assert run is not None assert run._run is mock_run @@ -327,6 +340,7 @@ def test_cancelled_error_propagates_without_marking_run( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ) as run: assert run is not None assert run._run is mock_run @@ -541,11 +555,11 @@ async def test_execute_activity_creates_run_with_context_and_metadata( assert metadata["temporalWorkflowID"] == "wf-123" assert metadata["temporalRunID"] == "run-456" assert metadata["temporalActivityID"] == "act-789" - # Verify tracing_context sets parent (wrapped in ReplaySafeRunTree) + # Verify tracing_context sets parent (wrapped in _ReplaySafeRunTree) mock_tracing_ctx.assert_called() ctx_kwargs = mock_tracing_ctx.call_args.kwargs parent = ctx_kwargs.get("parent") - assert isinstance(parent, ReplaySafeRunTree) + assert isinstance(parent, _ReplaySafeRunTree) assert parent._run is mock_run # Verify super() called and result passed through mock_next.execute_activity.assert_called_once() @@ -927,7 +941,8 @@ async def test_execute_nexus_operation_start( await interceptor.execute_nexus_operation_start(mock_input) # Verify _extract_nexus_context was called (not _extract_context) - mock_extract_nexus.assert_called_once_with(mock_input.ctx.headers) + mock_extract_nexus.assert_called_once() + assert mock_extract_nexus.call_args[0][0] is mock_input.ctx.headers # Verify trace name assert ( _get_runtree_name(MockRunTree) @@ -960,7 +975,8 @@ async def test_execute_nexus_operation_cancel( await interceptor.execute_nexus_operation_cancel(mock_input) - mock_extract_nexus.assert_called_once_with(mock_input.ctx.headers) + mock_extract_nexus.assert_called_once() + assert mock_extract_nexus.call_args[0][0] is mock_input.ctx.headers assert ( _get_runtree_name(MockRunTree) == "RunCancelNexusOperationHandler:MyService/cancel_op" @@ -991,6 +1007,7 @@ def test_runtree_always_receives_ls_client( mock_client, "TestRun", add_temporal_runs=True, + executor=_make_executor(), ): pass @@ -1021,6 +1038,7 @@ def test_false_skips_traces(self, _mock_in_wf: Any, MockRunTree: Any) -> None: mock_client, "TestRun", add_temporal_runs=False, + executor=_make_executor(), ) as run: assert run is None MockRunTree.assert_not_called() From 96c139b1d525dd2d7f3ae0f0ef6c315263a8cd7b Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 25 Mar 2026 16:44:08 -0400 Subject: [PATCH 13/30] Replace unnecessary Any type annotations with specific types Replace ~35 Any annotations across _plugin.py and _interceptor.py with precise types (langsmith.Client, RunTree, _ReplaySafeRunTree, specific SDK interceptor input types, etc.). Add _InputWithHeaders Protocol for private helpers matching the OTel interceptor pattern. Narrow return types to match base class signatures exactly. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 133 +++++++++++++------ temporalio/contrib/langsmith/_plugin.py | 7 +- 2 files changed, 93 insertions(+), 47 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 30fc769e7..0b2069350 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -6,14 +6,16 @@ import logging import random import uuid -from collections.abc import Iterator, Mapping +from collections.abc import Callable, Iterator, Mapping, Sequence from concurrent.futures import Future, ThreadPoolExecutor from contextlib import contextmanager -from typing import Any, ClassVar, NoReturn +from typing import Any, ClassVar, NoReturn, Protocol +import langsmith +import nexusrpc.handler from langsmith import tracing_context from langsmith.run_helpers import get_current_run_tree -from langsmith.run_trees import RunTree +from langsmith.run_trees import RunTree, WriteReplica import temporalio.activity import temporalio.client @@ -41,9 +43,13 @@ _payload_converter = temporalio.converter.PayloadConverter.default +class _InputWithHeaders(Protocol): + headers: Mapping[str, Payload] + + def _inject_context( headers: Mapping[str, Payload], - run_tree: Any, + run_tree: RunTree, ) -> dict[str, Payload]: """Inject LangSmith context into Temporal payload headers. @@ -81,7 +87,7 @@ def _inject_current_context( def _extract_context( headers: Mapping[str, Payload], executor: ThreadPoolExecutor, -) -> Any | None: +) -> _ReplaySafeRunTree | None: """Extract LangSmith context from Temporal payload headers. Reconstructs a :class:`RunTree` from the ``_temporal-langsmith-context`` header on @@ -98,8 +104,8 @@ def _extract_context( def _inject_nexus_context( - headers: dict[str, str], - run_tree: Any, + headers: Mapping[str, str], + run_tree: RunTree, ) -> dict[str, str]: """Inject LangSmith context into Nexus string headers.""" ls_headers = run_tree.to_headers() @@ -110,9 +116,9 @@ def _inject_nexus_context( def _extract_nexus_context( - headers: dict[str, str], + headers: Mapping[str, str], executor: ThreadPoolExecutor, -) -> Any | None: +) -> _ReplaySafeRunTree | None: """Extract LangSmith context from Nexus string headers.""" raw = headers.get(HEADER_KEY) if not raw: @@ -153,7 +159,7 @@ def _patch_aio_to_thread() -> None: import contextvars async def _safe_aio_to_thread( - func: Any, + func: Callable[..., Any], /, *args: Any, __ctx: contextvars.Context | None = None, @@ -269,7 +275,7 @@ def __setattr__(self, name: str, value: Any) -> None: """Delegate attribute setting to the wrapped RunTree.""" setattr(self._run, name, value) - def to_headers(self) -> dict[str, Any]: + def to_headers(self) -> dict[str, str]: """Delegate to the wrapped RunTree's to_headers.""" return self._run.to_headers() @@ -293,7 +299,9 @@ def create_child(self, *args: Any, **kwargs: Any) -> _ReplaySafeRunTree: child_run = self._run.create_child(*args, **kwargs) return _ReplaySafeRunTree(child_run, executor=self._executor) - def _submit_or_fallback(self, fn: Any, *args: Any, **kwargs: Any) -> None: + def _submit_or_fallback( + self, fn: Callable[..., object], *args: Any, **kwargs: Any + ) -> None: """Submit work to executor, falling back to synchronous after shutdown.""" def _log_future_exception(future: Future[None]) -> None: @@ -356,10 +364,10 @@ class _ContextBridgeRunTree(_ReplaySafeRunTree): def __init__( # pyright: ignore[reportMissingSuperCall] self, *, - ls_client: Any, + ls_client: langsmith.Client, executor: ThreadPoolExecutor, session_name: str | None = None, - replicas: list[Any] | None = None, + replicas: Sequence[WriteReplica] | None = None, ) -> None: """Create a context bridge with the given LangSmith client.""" # Create a minimal RunTree for the bridge — it will never be posted @@ -429,7 +437,7 @@ def _is_benign_error(exc: Exception) -> bool: @contextmanager def _maybe_run( - client: Any, + client: langsmith.Client, name: str, *, add_temporal_runs: bool, @@ -437,10 +445,10 @@ def _maybe_run( inputs: dict[str, Any] | None = None, metadata: dict[str, Any] | None = None, tags: list[str] | None = None, - parent: Any | None = None, + parent: RunTree | None = None, project_name: str | None = None, executor: ThreadPoolExecutor, -) -> Iterator[Any | None]: +) -> Iterator[_ReplaySafeRunTree | None]: """Create a LangSmith run, handling errors. - If add_temporal_runs is False, yields None (no run created). @@ -537,7 +545,7 @@ class LangSmithInterceptor( def __init__( self, *, - client: Any | None = None, + client: langsmith.Client | None = None, project_name: str | None = None, add_temporal_runs: bool = False, default_metadata: dict[str, Any] | None = None, @@ -563,9 +571,9 @@ def maybe_run( name: str, *, run_type: str = "chain", - parent: Any | None = None, + parent: RunTree | None = None, extra_metadata: dict[str, Any] | None = None, - ) -> Iterator[Any | None]: + ) -> Iterator[_ReplaySafeRunTree | None]: """Create a LangSmith run with this interceptor's config already applied.""" metadata = {**self._default_metadata, **(extra_metadata or {})} with _maybe_run( @@ -629,30 +637,38 @@ def __init__( self._config = config @contextmanager - def _traced_call(self, name: str, input: Any) -> Iterator[None]: + def _traced_call(self, name: str, input: _InputWithHeaders) -> Iterator[None]: """Wrap a client call with a LangSmith run and inject context into headers.""" with self._config.maybe_run(name): input.headers = _inject_current_context(input.headers) yield - async def start_workflow(self, input: Any) -> Any: + async def start_workflow( + self, input: temporalio.client.StartWorkflowInput + ) -> temporalio.client.WorkflowHandle[Any, Any]: prefix = "SignalWithStartWorkflow" if input.start_signal else "StartWorkflow" with self._traced_call(f"{prefix}:{input.workflow}", input): return await super().start_workflow(input) - async def query_workflow(self, input: Any) -> Any: + async def query_workflow(self, input: temporalio.client.QueryWorkflowInput) -> Any: with self._traced_call(f"QueryWorkflow:{input.query}", input): return await super().query_workflow(input) - async def signal_workflow(self, input: Any) -> None: + async def signal_workflow( + self, input: temporalio.client.SignalWorkflowInput + ) -> None: with self._traced_call(f"SignalWorkflow:{input.signal}", input): return await super().signal_workflow(input) - async def start_workflow_update(self, input: Any) -> Any: + async def start_workflow_update( + self, input: temporalio.client.StartWorkflowUpdateInput + ) -> temporalio.client.WorkflowUpdateHandle[Any]: with self._traced_call(f"StartWorkflowUpdate:{input.update}", input): return await super().start_workflow_update(input) - async def start_update_with_start_workflow(self, input: Any) -> Any: + async def start_update_with_start_workflow( + self, input: temporalio.client.StartWorkflowUpdateWithStartInput + ) -> temporalio.client.WorkflowUpdateHandle[Any]: with self._config.maybe_run( f"StartUpdateWithStartWorkflow:{input.start_workflow_input.workflow}", ): @@ -683,7 +699,9 @@ def __init__( super().__init__(next) self._config = config - async def execute_activity(self, input: Any) -> Any: + async def execute_activity( + self, input: temporalio.worker.ExecuteActivityInput + ) -> Any: parent = _extract_context(input.headers, self._config._executor) info = temporalio.activity.info() extra_metadata = { @@ -729,7 +747,7 @@ class _LangSmithWorkflowInboundInterceptor( """Instruments workflow execution with LangSmith runs.""" _config: ClassVar[LangSmithInterceptor] - _current_run: Any | None = None + _current_run: _ReplaySafeRunTree | None = None def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: super().init( @@ -743,7 +761,7 @@ def _workflow_maybe_run( headers: Mapping[str, Payload] | None = None, *, is_handler: bool = False, - ) -> Iterator[Any | None]: + ) -> Iterator[_ReplaySafeRunTree | None]: """Workflow-specific run creation with metadata. Extracts parent from headers (if provided) and stores the run (or parent @@ -809,7 +827,9 @@ def _workflow_maybe_run( finally: self._current_run = prev_run - async def execute_workflow(self, input: Any) -> Any: + async def execute_workflow( + self, input: temporalio.worker.ExecuteWorkflowInput + ) -> Any: wf_type = temporalio.workflow.info().workflow_type with self._workflow_maybe_run( f"RunWorkflow:{wf_type}", @@ -817,25 +837,29 @@ async def execute_workflow(self, input: Any) -> Any: ): return await super().execute_workflow(input) - async def handle_signal(self, input: Any) -> None: + async def handle_signal(self, input: temporalio.worker.HandleSignalInput) -> None: with self._workflow_maybe_run( f"HandleSignal:{input.signal}", input.headers, is_handler=True ): return await super().handle_signal(input) - async def handle_query(self, input: Any) -> Any: + async def handle_query(self, input: temporalio.worker.HandleQueryInput) -> Any: with self._workflow_maybe_run( f"HandleQuery:{input.query}", input.headers, is_handler=True ): return await super().handle_query(input) - def handle_update_validator(self, input: Any) -> None: + def handle_update_validator( + self, input: temporalio.worker.HandleUpdateInput + ) -> None: with self._workflow_maybe_run( f"ValidateUpdate:{input.update}", input.headers, is_handler=True ): return super().handle_update_validator(input) - async def handle_update_handler(self, input: Any) -> Any: + async def handle_update_handler( + self, input: temporalio.worker.HandleUpdateInput + ) -> Any: with self._workflow_maybe_run( f"HandleUpdate:{input.update}", input.headers, is_handler=True ): @@ -863,7 +887,9 @@ def __init__( self._inbound = inbound @contextmanager - def _traced_outbound(self, name: str, input: Any) -> Iterator[Any | None]: + def _traced_outbound( + self, name: str, input: _InputWithHeaders + ) -> Iterator[_ReplaySafeRunTree | None]: """Outbound workflow run creation with context injection into input.headers.""" with self._config.maybe_run(name, parent=self._inbound._current_run) as run: context_source = run or self._inbound._current_run @@ -871,34 +897,46 @@ def _traced_outbound(self, name: str, input: Any) -> Iterator[Any | None]: input.headers = _inject_context(input.headers, context_source) yield run - def start_activity(self, input: Any) -> Any: + def start_activity( + self, input: temporalio.worker.StartActivityInput + ) -> temporalio.workflow.ActivityHandle: with self._traced_outbound(f"StartActivity:{input.activity}", input): return super().start_activity(input) - def start_local_activity(self, input: Any) -> Any: + def start_local_activity( + self, input: temporalio.worker.StartLocalActivityInput + ) -> temporalio.workflow.ActivityHandle: with self._traced_outbound(f"StartActivity:{input.activity}", input): return super().start_local_activity(input) - async def start_child_workflow(self, input: Any) -> Any: + async def start_child_workflow( + self, input: temporalio.worker.StartChildWorkflowInput + ) -> temporalio.workflow.ChildWorkflowHandle: with self._traced_outbound(f"StartChildWorkflow:{input.workflow}", input): return await super().start_child_workflow(input) - async def signal_child_workflow(self, input: Any) -> None: + async def signal_child_workflow( + self, input: temporalio.worker.SignalChildWorkflowInput + ) -> None: with self._traced_outbound(f"SignalChildWorkflow:{input.signal}", input): return await super().signal_child_workflow(input) - async def signal_external_workflow(self, input: Any) -> None: + async def signal_external_workflow( + self, input: temporalio.worker.SignalExternalWorkflowInput + ) -> None: with self._traced_outbound(f"SignalExternalWorkflow:{input.signal}", input): return await super().signal_external_workflow(input) - def continue_as_new(self, input: Any) -> NoReturn: + def continue_as_new(self, input: temporalio.worker.ContinueAsNewInput) -> NoReturn: # No trace created, but inject context from inbound's current run current_run = getattr(self._inbound, "_current_run", None) if current_run: input.headers = _inject_context(input.headers, current_run) super().continue_as_new(input) - async def start_nexus_operation(self, input: Any) -> Any: + async def start_nexus_operation( + self, input: temporalio.worker.StartNexusOperationInput[Any, Any] + ) -> temporalio.workflow.NexusOperationHandle[Any]: with self._config.maybe_run( f"StartNexusOperation:{input.service}/{input.operation_name}", parent=self._inbound._current_run, @@ -929,7 +967,12 @@ def __init__( super().__init__(next) self._config = config - async def execute_nexus_operation_start(self, input: Any) -> Any: + async def execute_nexus_operation_start( + self, input: temporalio.worker.ExecuteNexusOperationStartInput + ) -> ( + nexusrpc.handler.StartOperationResultSync[Any] + | nexusrpc.handler.StartOperationResultAsync + ): parent = _extract_nexus_context(input.ctx.headers, self._config._executor) with self._config.maybe_run( f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", @@ -938,7 +981,9 @@ async def execute_nexus_operation_start(self, input: Any) -> Any: ): return await self.next.execute_nexus_operation_start(input) - async def execute_nexus_operation_cancel(self, input: Any) -> Any: + async def execute_nexus_operation_cancel( + self, input: temporalio.worker.ExecuteNexusOperationCancelInput + ) -> None: parent = _extract_nexus_context(input.ctx.headers, self._config._executor) with self._config.maybe_run( f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py index ea176436c..ae3e2ac2e 100644 --- a/temporalio/contrib/langsmith/_plugin.py +++ b/temporalio/contrib/langsmith/_plugin.py @@ -7,6 +7,8 @@ from contextlib import asynccontextmanager from typing import Any +import langsmith + from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor from temporalio.plugin import SimplePlugin from temporalio.worker import WorkflowRunner @@ -23,7 +25,7 @@ class LangSmithPlugin(SimplePlugin): def __init__( self, *, - client: Any | None = None, + client: langsmith.Client | None = None, project_name: str | None = None, add_temporal_runs: bool = False, metadata: dict[str, Any] | None = None, @@ -67,8 +69,7 @@ async def run_context() -> AsyncIterator[None]: yield finally: interceptor._executor.shutdown(wait=True) - if interceptor._client is not None: - interceptor._client.flush() + interceptor._client.flush() super().__init__( "langchain.LangSmithPlugin", From d1e66c465f80456cd964977c8f79f9a5765fc7a1 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 25 Mar 2026 16:55:30 -0400 Subject: [PATCH 14/30] Fix basedpyright warnings in test files Prefix unused mock parameters with underscore (_args, _kwargs) and rename unused variable (_collector) to satisfy basedpyright's reportUnusedParameter and reportUnusedVariable checks. Co-Authored-By: Claude Opus 4.6 (1M context) --- tests/contrib/langsmith/test_background_io.py | 10 +++++----- tests/contrib/langsmith/test_integration.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/contrib/langsmith/test_background_io.py b/tests/contrib/langsmith/test_background_io.py index 75027ce1f..3d1f2f2f5 100644 --- a/tests/contrib/langsmith/test_background_io.py +++ b/tests/contrib/langsmith/test_background_io.py @@ -204,7 +204,7 @@ def test_post_submits_to_executor_in_workflow( calling_thread = threading.current_thread() post_thread: list[threading.Thread] = [] - def record_thread(*args: Any, **kwargs: Any) -> None: + def record_thread(*_args: Any, **_kwargs: Any) -> None: post_thread.append(threading.current_thread()) mock_run.post.side_effect = record_thread @@ -520,10 +520,10 @@ def test_post_patch_fifo_ordering( mock_run = _make_mock_run() call_order: list[str] = [] - def record_post(*args: Any, **kwargs: Any) -> None: + def record_post(*_args: Any, **_kwargs: Any) -> None: call_order.append("post") - def record_patch(*args: Any, **kwargs: Any) -> None: + def record_patch(*_args: Any, **_kwargs: Any) -> None: call_order.append("patch") mock_run.post.side_effect = record_post @@ -564,7 +564,7 @@ def blocking_task() -> None: executor.submit(blocking_task) # Record what fields post() sees when it finally runs - def capturing_post(*args: Any, **kwargs: Any) -> None: + def capturing_post(*_args: Any, **_kwargs: Any) -> None: post_saw_outputs.append(getattr(mock_run, "outputs", None)) post_saw_end_time.append(getattr(mock_run, "end_time", None)) @@ -670,7 +670,7 @@ async def test_error_gate_raises_without_plugin( # The patched aio_to_thread should raise when a plain RunTree is # created (no _ReplaySafeRunTree or _ContextBridgeRunTree parent). # This simulates _setup_run creating a root RunTree. - def _mock_setup_run(*args: Any, **kwargs: Any) -> RunTree: + def _mock_setup_run(*_args: Any, **_kwargs: Any) -> RunTree: return RunTree(name="test", run_type="chain") with pytest.raises(RuntimeError, match="Use the LangSmith plugin"): diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index e907c1c9c..7c59c8edf 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -860,7 +860,7 @@ async def test_bridge_passes_project_name_to_children( env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: """Bridge children inherit project_name (session_name) from plugin config.""" - temporal_client, collector, mock_ls_client = _make_client_and_collector( + temporal_client, _collector, mock_ls_client = _make_client_and_collector( client, add_temporal_runs=False, project_name="my-ls-project" ) From ee09c1f8f0427b7221956b5528804877778533ae Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 25 Mar 2026 17:27:31 -0400 Subject: [PATCH 15/30] Clean up types, dead code, and test assertions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove useless _get_current_run_safe wrapper (inline get_current_run_tree) - Restore generic type params on interceptor return types (ActivityHandle[Any], ChildWorkflowHandle[Any, Any]) to match base class exactly - Fix _make_bridge return type (Any → _ContextBridgeRunTree) - Fix _poll_query helper types (Any → WorkflowHandle, Callable) - Strengthen weak assertions in mixed sync/async integration tests - Add _InputWithHeaders Protocol for private helper input params Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 13 ++++--------- tests/contrib/langsmith/test_background_io.py | 3 ++- tests/contrib/langsmith/test_integration.py | 16 +++++++++++----- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 0b2069350..16c13a25f 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -64,11 +64,6 @@ def _inject_context( } -def _get_current_run_safe() -> RunTree | None: - """Get the current ambient LangSmith run tree.""" - return get_current_run_tree() - - def _inject_current_context( headers: Mapping[str, Payload], ) -> Mapping[str, Payload]: @@ -477,7 +472,7 @@ def _maybe_run( # If no explicit parent, inherit from ambient @traceable context if parent is None: - parent = _get_current_run_safe() + parent = get_current_run_tree() kwargs: dict[str, Any] = dict( name=name, @@ -899,19 +894,19 @@ def _traced_outbound( def start_activity( self, input: temporalio.worker.StartActivityInput - ) -> temporalio.workflow.ActivityHandle: + ) -> temporalio.workflow.ActivityHandle[Any]: with self._traced_outbound(f"StartActivity:{input.activity}", input): return super().start_activity(input) def start_local_activity( self, input: temporalio.worker.StartLocalActivityInput - ) -> temporalio.workflow.ActivityHandle: + ) -> temporalio.workflow.ActivityHandle[Any]: with self._traced_outbound(f"StartActivity:{input.activity}", input): return super().start_local_activity(input) async def start_child_workflow( self, input: temporalio.worker.StartChildWorkflowInput - ) -> temporalio.workflow.ChildWorkflowHandle: + ) -> temporalio.workflow.ChildWorkflowHandle[Any, Any]: with self._traced_outbound(f"StartChildWorkflow:{input.workflow}", input): return await super().start_child_workflow(input) diff --git a/tests/contrib/langsmith/test_background_io.py b/tests/contrib/langsmith/test_background_io.py index 3d1f2f2f5..5c9eaad60 100644 --- a/tests/contrib/langsmith/test_background_io.py +++ b/tests/contrib/langsmith/test_background_io.py @@ -18,6 +18,7 @@ from langsmith.run_trees import RunTree from temporalio.contrib.langsmith._interceptor import ( + _ContextBridgeRunTree, _ReplaySafeRunTree, _uuid_from_random, ) @@ -380,7 +381,7 @@ def test_end_delegates_during_normal_execution( class TestContextBridgeRunTree: """Tests for _ContextBridgeRunTree subclass.""" - def _make_bridge(self, **kwargs: Any) -> Any: + def _make_bridge(self, **kwargs: Any) -> _ContextBridgeRunTree: """Create a _ContextBridgeRunTree for testing.""" from temporalio.contrib.langsmith._interceptor import _ContextBridgeRunTree diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 7c59c8edf..8d31f5dad 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -4,6 +4,7 @@ import asyncio import uuid +from collections.abc import Callable from datetime import timedelta from typing import Any from unittest.mock import MagicMock @@ -13,7 +14,12 @@ from langsmith import traceable, tracing_context from temporalio import activity, common, nexus, workflow -from temporalio.client import Client, WorkflowFailureError, WorkflowQueryFailedError +from temporalio.client import ( + Client, + WorkflowFailureError, + WorkflowHandle, + WorkflowQueryFailedError, +) from temporalio.contrib.langsmith import LangSmithPlugin from temporalio.exceptions import ApplicationError from temporalio.service import RPCError @@ -278,8 +284,8 @@ def _make_temporal_client( async def _poll_query( - handle: Any, - query: Any, + handle: WorkflowHandle[Any, Any], + query: Callable[..., Any], *, expected: Any = True, timeout_secs: float = 10.0, @@ -829,7 +835,7 @@ async def test_bridge_traceable_no_external_context( ) result = await handle.result() - assert "|" in result + assert result == "response to: async|sync-response to: sync|sync-response to: mixed" hierarchy = dump_runs(collector) expected = [ @@ -914,7 +920,7 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( ) result = await handle.result() - assert "|" in result + assert result == "response to: async|sync-response to: sync|sync-response to: mixed" hierarchy = dump_runs(collector) # With add_temporal_runs=True, Temporal operations get their own runs. From 4e70ef8f9cad31fe8bda0c45af69c85039e362df Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 25 Mar 2026 17:32:02 -0400 Subject: [PATCH 16/30] Fix formatting in test_integration.py Co-Authored-By: Claude Opus 4.6 (1M context) --- tests/contrib/langsmith/test_integration.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 8d31f5dad..b63c41f0e 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -49,6 +49,7 @@ async def _outer_chain(prompt: str) -> str: return await _inner_llm_call(prompt) +@traceable @activity.defn async def traceable_activity() -> str: """Activity that calls a @traceable function.""" @@ -56,6 +57,7 @@ async def traceable_activity() -> str: return result +@traceable @activity.defn async def nested_traceable_activity() -> str: """Activity with two levels of @traceable nesting.""" @@ -106,6 +108,7 @@ async def run_operation( # --------------------------------------------------------------------------- +@traceable @activity.defn async def simple_activity() -> str: return "activity-done" @@ -203,11 +206,13 @@ def validate_my_update(self, value: str) -> None: # --------------------------------------------------------------------------- +@traceable @activity.defn async def failing_activity() -> str: raise ApplicationError("activity-failed", non_retryable=True) +@traceable @activity.defn async def benign_failing_activity() -> str: from temporalio.exceptions import ApplicationErrorCategory @@ -835,7 +840,10 @@ async def test_bridge_traceable_no_external_context( ) result = await handle.result() - assert result == "response to: async|sync-response to: sync|sync-response to: mixed" + assert ( + result + == "response to: async|sync-response to: sync|sync-response to: mixed" + ) hierarchy = dump_runs(collector) expected = [ @@ -920,7 +928,10 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( ) result = await handle.result() - assert result == "response to: async|sync-response to: sync|sync-response to: mixed" + assert ( + result + == "response to: async|sync-response to: sync|sync-response to: mixed" + ) hierarchy = dump_runs(collector) # With add_temporal_runs=True, Temporal operations get their own runs. From 2b84421b9dacdffb9dbaadd9b3868c2a840863ba Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 25 Mar 2026 17:38:31 -0400 Subject: [PATCH 17/30] Add @traceable to all activity definitions in integration tests Wrap all 5 activity definitions with @traceable as outer decorator to test LangSmith tracing through the full activity execution path. Update all 9 expected trace hierarchies to account for the additional @traceable run nested under each RunActivity. Fix outputs assertion to only check interceptor runs (colon-prefixed names) since @traceable captures actual return values rather than the interceptor's {'status': 'ok'}. Co-Authored-By: Claude Opus 4.6 (1M context) --- tests/contrib/langsmith/test_integration.py | 63 +++++++++++++-------- tests/contrib/langsmith/test_plugin.py | 21 ++++--- 2 files changed, 52 insertions(+), 32 deletions(-) diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index b63c41f0e..1bb3216bc 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -344,6 +344,7 @@ async def test_workflow_activity_trace_hierarchy( " RunWorkflow:SimpleWorkflow", " StartActivity:simple_activity", " RunActivity:simple_activity", + " simple_activity", ] assert ( hierarchy == expected @@ -362,9 +363,10 @@ async def test_workflow_activity_trace_hierarchy( # Verify successful runs have outputs == {"status": "ok"} for run in collector.runs: - assert run.outputs == { - "status": "ok" - }, f"Expected {run.name} outputs={{'status': 'ok'}}, got {run.outputs}" + if ":" in run.name: # Interceptor runs use "Type:Name" format + assert run.outputs == { + "status": "ok" + }, f"Expected {run.name} outputs={{'status': 'ok'}}, got {run.outputs}" # --------------------------------------------------------------------------- @@ -404,7 +406,8 @@ async def test_no_duplicate_traces_on_replay( " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", - " inner_llm_call", + " traceable_activity", + " inner_llm_call", ] assert hierarchy == expected, ( f"Hierarchy mismatch (possible replay duplicates).\n" @@ -449,6 +452,7 @@ async def test_activity_failure_marked( " RunWorkflow:ActivityFailureWorkflow", " StartActivity:failing_activity", " RunActivity:failing_activity", + " failing_activity", ] assert ( hierarchy == expected @@ -528,6 +532,7 @@ async def test_benign_error_not_marked( " RunWorkflow:BenignErrorWorkflow", " StartActivity:benign_failing_activity", " RunActivity:benign_failing_activity", + " benign_failing_activity", ] assert ( hierarchy == expected @@ -628,30 +633,35 @@ async def user_pipeline() -> str: " RunWorkflow:ComprehensiveWorkflow", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", " outer_chain", " inner_llm_call", " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", - " inner_llm_call", + " traceable_activity", + " inner_llm_call", " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", " StartWorkflow:SimpleNexusWorkflow", " RunWorkflow:SimpleNexusWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", - " inner_llm_call", + " traceable_activity", + " inner_llm_call", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", " QueryWorkflow:my_query", " HandleQuery:my_query", " SignalWorkflow:my_signal", @@ -739,16 +749,21 @@ async def user_pipeline() -> str: hierarchy = dump_runs(collector) expected = [ "user_pipeline", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", " outer_chain", " inner_llm_call", - " outer_chain", - " inner_llm_call", - " outer_chain", + " traceable_activity", " inner_llm_call", - " inner_llm_call", - " inner_llm_call", - " outer_chain", + " traceable_activity", " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", ] assert ( hierarchy == expected @@ -853,10 +868,9 @@ async def test_bridge_traceable_no_external_context( " sync_inner_llm_call", "async_calls_sync", " sync_inner_llm_call", - # Activity @traceable: no LangSmith context propagated (bridge is - # workflow-internal only), so activity traces are independent roots. - "outer_chain", - " inner_llm_call", + "nested_traceable_activity", + " outer_chain", + " inner_llm_call", ] assert ( hierarchy == expected @@ -947,8 +961,9 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( " sync_inner_llm_call", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", ] assert ( hierarchy == expected diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index f902d20ef..7901004d2 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -109,30 +109,35 @@ async def user_pipeline() -> str: " RunWorkflow:ComprehensiveWorkflow", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", " outer_chain", " inner_llm_call", " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", - " inner_llm_call", + " traceable_activity", + " inner_llm_call", " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", " StartWorkflow:SimpleNexusWorkflow", " RunWorkflow:SimpleNexusWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", - " inner_llm_call", + " traceable_activity", + " inner_llm_call", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", " QueryWorkflow:my_query", " HandleQuery:my_query", " SignalWorkflow:my_signal", From ded720b720e4504fff49f1fbcaa7c3629ae2fb18 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 27 Mar 2026 12:36:04 -0400 Subject: [PATCH 18/30] tests --- tests/contrib/langsmith/test_integration.py | 208 +++++++++++++++++++- tests/contrib/langsmith/test_plugin.py | 31 +++ 2 files changed, 234 insertions(+), 5 deletions(-) diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 1bb3216bc..bd1ae4636 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -130,6 +130,38 @@ async def run(self) -> str: # --------------------------------------------------------------------------- +@traceable(name="step_with_activity") +async def _step_with_activity() -> str: + """A @traceable step that wraps an activity call.""" + return await workflow.execute_activity( + nested_traceable_activity, + start_to_close_timeout=timedelta(seconds=10), + ) + + +@traceable(name="step_with_child_workflow") +async def _step_with_child_workflow() -> str: + """A @traceable step that wraps a child workflow call.""" + return await workflow.execute_child_workflow( + TraceableActivityWorkflow.run, + id=f"step-child-{workflow.info().workflow_id}", + ) + + +@traceable(name="step_with_nexus") +async def _step_with_nexus() -> str: + """A @traceable step that wraps a nexus operation.""" + nexus_client = workflow.create_nexus_client( + endpoint=make_nexus_endpoint_name(workflow.info().task_queue), + service=NexusService, + ) + nexus_handle = await nexus_client.start_operation( + operation=NexusService.run_operation, + input="test-input", + ) + return await nexus_handle + + @workflow.defn class ComprehensiveWorkflow: def __init__(self) -> None: @@ -139,24 +171,28 @@ def __init__(self) -> None: @workflow.run async def run(self) -> str: - # Regular activity + # Direct activity await workflow.execute_activity( nested_traceable_activity, start_to_close_timeout=timedelta(seconds=10), ) - # Local activity + # @traceable step wrapping activity (tests outbound nesting) + await _step_with_activity() + # Direct local activity await workflow.execute_local_activity( nested_traceable_activity, start_to_close_timeout=timedelta(seconds=10), ) - # Direct @traceable call + # Direct @traceable call (no outbound) await _outer_chain("from-workflow") - # Child workflow + # Direct child workflow await workflow.execute_child_workflow( TraceableActivityWorkflow.run, id=f"child-{workflow.info().workflow_id}", ) - # Nexus operation + # @traceable step wrapping child workflow (tests outbound nesting) + await _step_with_child_workflow() + # Direct nexus operation nexus_client = workflow.create_nexus_client( endpoint=make_nexus_endpoint_name(workflow.info().task_queue), service=NexusService, @@ -166,6 +202,8 @@ async def run(self) -> str: input="test-input", ) await nexus_handle + # @traceable step wrapping nexus operation (tests outbound nesting) + await _step_with_nexus() # Wait for signal self._waiting_for_signal = True await workflow.wait_condition(lambda: self._signal_received) @@ -631,24 +669,44 @@ async def user_pipeline() -> str: "user_pipeline", " StartWorkflow:ComprehensiveWorkflow", " RunWorkflow:ComprehensiveWorkflow", + # Direct activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", + # @traceable step wrapping activity + " step_with_activity", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + # Direct local activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", + # Direct @traceable " outer_chain", " inner_llm_call", + # Direct child workflow " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", + # @traceable step wrapping child workflow + " step_with_child_workflow", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", + # Direct nexus operation " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", " StartWorkflow:SimpleNexusWorkflow", @@ -657,6 +715,17 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", + # @traceable step wrapping nexus operation + " step_with_nexus", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", + # Post-signal activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", @@ -749,18 +818,37 @@ async def user_pipeline() -> str: hierarchy = dump_runs(collector) expected = [ "user_pipeline", + # Direct activity " nested_traceable_activity", " outer_chain", " inner_llm_call", + # @traceable step wrapping activity + " step_with_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + # Direct local activity " nested_traceable_activity", " outer_chain", " inner_llm_call", + # Direct @traceable " outer_chain", " inner_llm_call", + # Direct child workflow (activity @traceable propagated via headers) " traceable_activity", " inner_llm_call", + # @traceable step wrapping child workflow + " step_with_child_workflow", + " traceable_activity", + " inner_llm_call", + # Direct nexus operation (activity @traceable propagated via headers) " traceable_activity", " inner_llm_call", + # @traceable step wrapping nexus operation + " step_with_nexus", + " traceable_activity", + " inner_llm_call", + # Post-signal activity " nested_traceable_activity", " outer_chain", " inner_llm_call", @@ -974,3 +1062,113 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( assert len(run_ids) == len( set(run_ids) ), f"Duplicate run IDs found (replay issue): {run_ids}" + + +# --- Nexus service for Bug 2 test (direct @traceable in handler) --- + + +@traceable(name="nexus_direct_traceable") +async def _nexus_direct_traceable(input: str) -> str: + """A @traceable function called directly from a nexus handler.""" + return await _inner_llm_call(input) + + +@nexusrpc.handler.service_handler +class DirectTraceableNexusService: + """Nexus service that calls @traceable directly (not via activity).""" + + @nexusrpc.handler.sync_operation + async def direct_traceable_op( + self, + ctx: nexusrpc.handler.StartOperationContext, # type:ignore[reportUnusedParameter] + input: str, + ) -> str: + return await _nexus_direct_traceable(input) + + +@workflow.defn +class NexusDirectTraceableWorkflow: + """Workflow that calls a nexus operation whose handler uses @traceable directly.""" + + @workflow.run + async def run(self) -> str: + nexus_client = workflow.create_nexus_client( + endpoint=make_nexus_endpoint_name(workflow.info().task_queue), + service=DirectTraceableNexusService, + ) + return await nexus_client.execute_operation( + operation=DirectTraceableNexusService.direct_traceable_op, + input="nexus-input", + ) + + +# --------------------------------------------------------------------------- +# TestNexusInboundTracing — Bug 2: missing tracing_context in nexus handler +# --------------------------------------------------------------------------- + + +class TestNexusInboundTracing: + """Tests that @traceable calls inside nexus handlers work without temporal runs. + + Bug 2: The nexus inbound interceptor doesn't set up tracing_context() + when add_temporal_runs=False, so @traceable functions inside the handler + have no LangSmith client and no parent — their runs aren't collected. + """ + + async def test_nexus_direct_traceable_without_temporal_runs( + self, + client: Client, + env: WorkflowEnvironment, + ) -> None: + """Test F: @traceable in nexus handler works with add_temporal_runs=False. + + The worker must be started OUTSIDE tracing_context so that nexus handler + tasks inherit a clean contextvars state. Only the client call gets + tracing_context — the interceptor's tracing_context setup (or lack + thereof) is the only thing that should provide context to the handler. + """ + if env.supports_time_skipping: + pytest.skip("Time-skipping server doesn't persist headers.") + + task_queue = f"nexus-direct-{uuid.uuid4()}" + collector = InMemoryRunCollector() + mock_ls = make_mock_ls_client(collector) + temporal_client = _make_temporal_client( + client, mock_ls, add_temporal_runs=False + ) + + # Worker starts OUTSIDE tracing_context — nexus handler tasks get clean context + async with new_worker( + temporal_client, + NexusDirectTraceableWorkflow, + nexus_service_handlers=[DirectTraceableNexusService()], + task_queue=task_queue, + max_cached_workflows=0, + ) as worker: + await env.create_nexus_endpoint( + make_nexus_endpoint_name(worker.task_queue), + worker.task_queue, + ) + # Only the client call gets tracing context, not the worker + with tracing_context(client=mock_ls, enabled=True): + handle = await temporal_client.start_workflow( + NexusDirectTraceableWorkflow.run, + id=f"nexus-direct-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + result = await handle.result() + + assert result == "response to: nexus-input" + + hierarchy = dump_runs(collector) + # @traceable runs from inside the nexus handler should be collected + # and nested under user_pipeline via context propagation. + # Without the fix, the nexus handler has no tracing_context so + # @traceable runs won't be collected — hierarchy will be empty. + expected = [ + "nexus_direct_traceable", + " inner_llm_call", + ] + assert ( + hierarchy == expected + ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index 7901004d2..c20ced1ac 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -107,24 +107,44 @@ async def user_pipeline() -> str: "user_pipeline", " StartWorkflow:ComprehensiveWorkflow", " RunWorkflow:ComprehensiveWorkflow", + # Direct activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", + # @traceable step wrapping activity + " step_with_activity", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + # Direct local activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", + # Direct @traceable " outer_chain", " inner_llm_call", + # Direct child workflow " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", + # @traceable step wrapping child workflow + " step_with_child_workflow", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", + # Direct nexus operation " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", " StartWorkflow:SimpleNexusWorkflow", @@ -133,6 +153,17 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", + # @traceable step wrapping nexus operation + " step_with_nexus", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", + # Post-signal activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", From 6ff9cc93fd33ce111fc1eedda11be240ea0b761e Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 27 Mar 2026 15:26:47 -0400 Subject: [PATCH 19/30] Fix context propagation bugs and remove handler suppression Bug 1: Replace stale _current_run snapshot with ambient context in outbound interceptor. Add _get_current_run_for_propagation() helper that filters _ContextBridgeRunTree from ambient context. Outbound methods now read get_current_run_tree() for @traceable nesting instead of a frozen reference from workflow entry. Bug 2: Add tracing_context() to Nexus inbound interceptor for both execute_nexus_operation_start and execute_nexus_operation_cancel, matching the activity inbound pattern. Ensures @traceable functions in Nexus handlers have a LangSmith client even with add_temporal_runs=False. Remove handler suppression (is_handler check, _workflow_is_active flag) to align with OTel interceptor which creates spans for all handlers unconditionally. Add dump_traces() to test infrastructure for per-root-trace assertions. Restructure comprehensive tests so user_pipeline only wraps start_workflow, with polling/signals/queries as independent root traces. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 131 +++++------ tests/contrib/langsmith/conftest.py | 38 ++-- tests/contrib/langsmith/test_integration.py | 222 ++++++++++--------- tests/contrib/langsmith/test_interceptor.py | 50 ++--- tests/contrib/langsmith/test_plugin.py | 105 +++++---- 5 files changed, 302 insertions(+), 244 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 16c13a25f..cd34d4406 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -123,6 +123,18 @@ def _extract_nexus_context( return _ReplaySafeRunTree(run, executor=executor) if run else None +def _get_current_run_for_propagation() -> RunTree | None: + """Get the current ambient run for context propagation. + + Filters out _ContextBridgeRunTree, which is internal scaffolding + that should never be serialized into headers or used as parent runs. + """ + run = get_current_run_tree() + if isinstance(run, _ContextBridgeRunTree): + return None + return run + + # --------------------------------------------------------------------------- # Sandbox safety: patch @traceable's aio_to_thread # --------------------------------------------------------------------------- @@ -742,46 +754,26 @@ class _LangSmithWorkflowInboundInterceptor( """Instruments workflow execution with LangSmith runs.""" _config: ClassVar[LangSmithInterceptor] - _current_run: _ReplaySafeRunTree | None = None def init(self, outbound: temporalio.worker.WorkflowOutboundInterceptor) -> None: - super().init( - _LangSmithWorkflowOutboundInterceptor(outbound, self._config, self) - ) + super().init(_LangSmithWorkflowOutboundInterceptor(outbound, self._config)) @contextmanager def _workflow_maybe_run( self, name: str, headers: Mapping[str, Payload] | None = None, - *, - is_handler: bool = False, ) -> Iterator[_ReplaySafeRunTree | None]: """Workflow-specific run creation with metadata. - Extracts parent from headers (if provided) and stores the run (or parent - fallback) as ``_current_run`` so the outbound interceptor can propagate - context even when ``add_temporal_runs=False``. - - Always sets up ``tracing_context`` so ``@traceable`` functions called - from workflow code can discover the parent and LangSmith client, - independent of the ``add_temporal_runs`` toggle. - - When ``is_handler`` is True and no LangSmith context is found in - headers, skips trace creation if a workflow run is already active - (``_current_run`` is set). This suppresses orphan traces from - uninstrumented client operations (e.g. query polling) while still - allowing handler traces when invoked with propagated context. + Extracts parent from headers (if provided) and sets up + ``tracing_context`` so ``@traceable`` functions called from workflow + code can discover the parent and LangSmith client, independent of the + ``add_temporal_runs`` toggle. """ parent = _extract_context(headers, self._config._executor) if headers else None if parent is not None: parent.ls_client = self._config._client - # Handler from an uninstrumented client during workflow execution: - # no LangSmith headers but _current_run is set. Skip trace creation - # to avoid orphan/duplicate handler traces (e.g. query polling). - if is_handler and parent is None and self._current_run is not None: - yield None - return info = temporalio.workflow.info() extra_metadata = { "temporalWorkflowID": info.workflow_id, @@ -815,12 +807,7 @@ def _workflow_maybe_run( parent=parent, extra_metadata=extra_metadata, ) as run: - prev_run = self._current_run - self._current_run = run or parent - try: - yield run - finally: - self._current_run = prev_run + yield run async def execute_workflow( self, input: temporalio.worker.ExecuteWorkflowInput @@ -833,31 +820,23 @@ async def execute_workflow( return await super().execute_workflow(input) async def handle_signal(self, input: temporalio.worker.HandleSignalInput) -> None: - with self._workflow_maybe_run( - f"HandleSignal:{input.signal}", input.headers, is_handler=True - ): + with self._workflow_maybe_run(f"HandleSignal:{input.signal}", input.headers): return await super().handle_signal(input) async def handle_query(self, input: temporalio.worker.HandleQueryInput) -> Any: - with self._workflow_maybe_run( - f"HandleQuery:{input.query}", input.headers, is_handler=True - ): + with self._workflow_maybe_run(f"HandleQuery:{input.query}", input.headers): return await super().handle_query(input) def handle_update_validator( self, input: temporalio.worker.HandleUpdateInput ) -> None: - with self._workflow_maybe_run( - f"ValidateUpdate:{input.update}", input.headers, is_handler=True - ): + with self._workflow_maybe_run(f"ValidateUpdate:{input.update}", input.headers): return super().handle_update_validator(input) async def handle_update_handler( self, input: temporalio.worker.HandleUpdateInput ) -> Any: - with self._workflow_maybe_run( - f"HandleUpdate:{input.update}", input.headers, is_handler=True - ): + with self._workflow_maybe_run(f"HandleUpdate:{input.update}", input.headers): return await super().handle_update_handler(input) @@ -875,19 +854,22 @@ def __init__( self, next: temporalio.worker.WorkflowOutboundInterceptor, config: LangSmithInterceptor, - inbound: _LangSmithWorkflowInboundInterceptor, ) -> None: super().__init__(next) self._config = config - self._inbound = inbound @contextmanager def _traced_outbound( self, name: str, input: _InputWithHeaders ) -> Iterator[_ReplaySafeRunTree | None]: - """Outbound workflow run creation with context injection into input.headers.""" - with self._config.maybe_run(name, parent=self._inbound._current_run) as run: - context_source = run or self._inbound._current_run + """Outbound workflow run creation with context injection into input.headers. + + Uses ambient context (``get_current_run_tree()``) instead of a cached + snapshot, so ``@traceable`` step functions that wrap outbound calls + correctly parent the outbound run under themselves. + """ + with self._config.maybe_run(name) as run: + context_source = run or _get_current_run_for_propagation() if context_source: input.headers = _inject_context(input.headers, context_source) yield run @@ -923,8 +905,8 @@ async def signal_external_workflow( return await super().signal_external_workflow(input) def continue_as_new(self, input: temporalio.worker.ContinueAsNewInput) -> NoReturn: - # No trace created, but inject context from inbound's current run - current_run = getattr(self._inbound, "_current_run", None) + # No trace created, but inject context from ambient run + current_run = _get_current_run_for_propagation() if current_run: input.headers = _inject_context(input.headers, current_run) super().continue_as_new(input) @@ -934,9 +916,8 @@ async def start_nexus_operation( ) -> temporalio.workflow.NexusOperationHandle[Any]: with self._config.maybe_run( f"StartNexusOperation:{input.service}/{input.operation_name}", - parent=self._inbound._current_run, ) as run: - context_source = run or self._inbound._current_run + context_source = run or _get_current_run_for_propagation() if context_source: input.headers = _inject_nexus_context( input.headers or {}, context_source @@ -969,20 +950,42 @@ async def execute_nexus_operation_start( | nexusrpc.handler.StartOperationResultAsync ): parent = _extract_nexus_context(input.ctx.headers, self._config._executor) - with self._config.maybe_run( - f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", - run_type="tool", - parent=parent, - ): - return await self.next.execute_nexus_operation_start(input) + if parent is not None and hasattr(parent, "ls_client"): + parent.ls_client = self._config._client + ctx_kwargs: dict[str, Any] = { + "client": self._config._client, + "enabled": True, + } + if self._config._project_name: + ctx_kwargs["project_name"] = self._config._project_name + if parent: + ctx_kwargs["parent"] = parent + with tracing_context(**ctx_kwargs): + with self._config.maybe_run( + f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", + run_type="tool", + parent=parent, + ): + return await self.next.execute_nexus_operation_start(input) async def execute_nexus_operation_cancel( self, input: temporalio.worker.ExecuteNexusOperationCancelInput ) -> None: parent = _extract_nexus_context(input.ctx.headers, self._config._executor) - with self._config.maybe_run( - f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", - run_type="tool", - parent=parent, - ): - return await self.next.execute_nexus_operation_cancel(input) + if parent is not None and hasattr(parent, "ls_client"): + parent.ls_client = self._config._client + ctx_kwargs: dict[str, Any] = { + "client": self._config._client, + "enabled": True, + } + if self._config._project_name: + ctx_kwargs["project_name"] = self._config._project_name + if parent: + ctx_kwargs["parent"] = parent + with tracing_context(**ctx_kwargs): + with self._config.maybe_run( + f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", + run_type="tool", + parent=parent, + ): + return await self.next.execute_nexus_operation_cancel(input) diff --git a/tests/contrib/langsmith/conftest.py b/tests/contrib/langsmith/conftest.py index 4ceb0dda3..588b283b6 100644 --- a/tests/contrib/langsmith/conftest.py +++ b/tests/contrib/langsmith/conftest.py @@ -58,24 +58,18 @@ def clear(self) -> None: self._by_id.clear() -def dump_runs(collector: InMemoryRunCollector) -> list[str]: - """Reconstruct parent-child hierarchy from collected runs. +def dump_traces(collector: InMemoryRunCollector) -> list[list[str]]: + """Reconstruct parent-child hierarchy grouped by root trace. - Returns a list of indented strings, e.g.: - ["StartWorkflow:MyWf", " RunWorkflow:MyWf", " StartActivity:do_thing"] + Returns a list of traces, where each trace is a list of indented + strings (same format as dump_runs). Each trace starts from a + different root run. """ runs = collector.runs children: dict[str | None, list[_RunRecord]] = {} for r in runs: children.setdefault(r.parent_run_id, []).append(r) - result: list[str] = [] - - def _walk(parent_id: str | None, depth: int) -> None: - for child in children.get(parent_id, []): - result.append(" " * depth + child.name) - _walk(child.id, depth + 1) - # Strict: reject dangling parent references known_ids = {r.id for r in runs} for r in runs: @@ -84,10 +78,26 @@ def _walk(parent_id: str | None, depth: int) -> None: f"Run {r.name!r} (id={r.id}) has parent_run_id={r.parent_run_id} " f"which is not in the collected runs — dangling parent reference" ) - # Only walk true roots (parent_run_id is None) - _walk(None, 0) - return result + traces: list[list[str]] = [] + for root in children.get(None, []): + trace: list[str] = [] + + def _walk(parent_id: str | None, depth: int) -> None: + for child in children.get(parent_id, []): + trace.append(" " * depth + child.name) + _walk(child.id, depth + 1) + + trace.append(root.name) + _walk(root.id, 1) + traces.append(trace) + + return traces + + +def dump_runs(collector: InMemoryRunCollector) -> list[str]: + """Flat list of all runs across all traces.""" + return [run for trace in dump_traces(collector) for run in trace] def make_mock_ls_client(collector: InMemoryRunCollector) -> MagicMock: diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index bd1ae4636..44c8039c5 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -27,6 +27,7 @@ from tests.contrib.langsmith.conftest import ( InMemoryRunCollector, dump_runs, + dump_traces, make_mock_ls_client, ) from tests.helpers import new_worker @@ -171,28 +172,21 @@ def __init__(self) -> None: @workflow.run async def run(self) -> str: - # Direct activity await workflow.execute_activity( nested_traceable_activity, start_to_close_timeout=timedelta(seconds=10), ) - # @traceable step wrapping activity (tests outbound nesting) await _step_with_activity() - # Direct local activity await workflow.execute_local_activity( nested_traceable_activity, start_to_close_timeout=timedelta(seconds=10), ) - # Direct @traceable call (no outbound) await _outer_chain("from-workflow") - # Direct child workflow await workflow.execute_child_workflow( TraceableActivityWorkflow.run, id=f"child-{workflow.info().workflow_id}", ) - # @traceable step wrapping child workflow (tests outbound nesting) await _step_with_child_workflow() - # Direct nexus operation nexus_client = workflow.create_nexus_client( endpoint=make_nexus_endpoint_name(workflow.info().task_queue), service=NexusService, @@ -202,17 +196,14 @@ async def run(self) -> str: input="test-input", ) await nexus_handle - # @traceable step wrapping nexus operation (tests outbound nesting) await _step_with_nexus() - # Wait for signal + self._waiting_for_signal = True await workflow.wait_condition(lambda: self._signal_received) - # Post-signal activity (verifies context survives signal wait) await workflow.execute_activity( nested_traceable_activity, start_to_close_timeout=timedelta(seconds=10), ) - # Wait for update to complete await workflow.wait_condition(lambda: self._complete) return "comprehensive-done" @@ -326,6 +317,7 @@ def _make_temporal_client( return Client(**config) +@traceable(name="poll_query") async def _poll_query( handle: WorkflowHandle[Any, Any], query: Callable[..., Any], @@ -594,9 +586,8 @@ async def test_comprehensive_with_temporal_runs( ) -> None: """Full trace hierarchy with worker restart mid-workflow. - Starts workflow on first worker, kills it at signal wait point, - then starts fresh worker+plugin to signal and complete the workflow. - Verifies combined hierarchy from both worker lifetimes in one assertion. + user_pipeline only wraps start_workflow (completing before the worker + starts), so poll/signal/query traces are naturally separate root traces. """ if env.supports_time_skipping: pytest.skip("Time-skipping server doesn't persist headers.") @@ -605,13 +596,23 @@ async def test_comprehensive_with_temporal_runs( workflow_id = f"comprehensive-{uuid.uuid4()}" collector = InMemoryRunCollector() mock_ls = make_mock_ls_client(collector) + temporal_client_1 = _make_temporal_client( + client, mock_ls, add_temporal_runs=True + ) @traceable(name="user_pipeline") - async def user_pipeline() -> str: - # Phase 1: Start workflow, run until signal wait - temporal_client_1 = _make_temporal_client( - client, mock_ls, add_temporal_runs=True + async def user_pipeline() -> WorkflowHandle[Any, Any]: + return await temporal_client_1.start_workflow( + ComprehensiveWorkflow.run, + id=workflow_id, + task_queue=task_queue, ) + + with tracing_context(client=mock_ls, enabled=True): + # Start workflow — no worker yet, just a server RPC + handle = await user_pipeline() + + # Phase 1: worker picks up workflow, poll until signal wait async with new_worker( temporal_client_1, ComprehensiveWorkflow, @@ -626,20 +627,16 @@ async def user_pipeline() -> str: make_nexus_endpoint_name(worker.task_queue), worker.task_queue, ) - handle = await temporal_client_1.start_workflow( - ComprehensiveWorkflow.run, - id=workflow_id, - task_queue=worker.task_queue, - ) - # Poll via raw client to avoid creating trace runs - raw_handle = client.get_workflow_handle(workflow_id) assert await _poll_query( - raw_handle, + handle, ComprehensiveWorkflow.is_waiting_for_signal, expected=True, ), "Workflow never reached signal wait point" + # Raw-client query (no LangSmith interceptor) — root-level trace + raw_handle = client.get_workflow_handle(workflow_id) + await raw_handle.query(ComprehensiveWorkflow.is_waiting_for_signal) - # Phase 2: Fresh worker+plugin, signal to resume, complete + # Phase 2: fresh worker, signal to resume, complete temporal_client_2 = _make_temporal_client( client, mock_ls, add_temporal_runs=True ) @@ -653,52 +650,49 @@ async def user_pipeline() -> str: task_queue=task_queue, max_cached_workflows=0, ): - handle = temporal_client_2.get_workflow_handle(workflow_id) - await handle.query(ComprehensiveWorkflow.my_query) - await handle.signal(ComprehensiveWorkflow.my_signal, "hello") - await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") - return await handle.result() - - with tracing_context(client=mock_ls, enabled=True): - result = await user_pipeline() + handle_2 = temporal_client_2.get_workflow_handle(workflow_id) + await handle_2.query(ComprehensiveWorkflow.my_query) + await handle_2.signal(ComprehensiveWorkflow.my_signal, "hello") + await handle_2.execute_update(ComprehensiveWorkflow.my_update, "finish") + result = await handle_2.result() assert result == "comprehensive-done" - hierarchy = dump_runs(collector) - expected = [ + traces = dump_traces(collector) + + # user_pipeline trace: StartWorkflow + full workflow execution tree + workflow_traces = [t for t in traces if t[0] == "user_pipeline"] + assert len(workflow_traces) == 1 + assert workflow_traces[0] == [ "user_pipeline", " StartWorkflow:ComprehensiveWorkflow", " RunWorkflow:ComprehensiveWorkflow", - # Direct activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - # @traceable step wrapping activity + # step-wrapped activity " step_with_activity", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - # Direct local activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - # Direct @traceable " outer_chain", " inner_llm_call", - # Direct child workflow " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # @traceable step wrapping child workflow + # step-wrapped child workflow " step_with_child_workflow", " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", @@ -706,7 +700,6 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # Direct nexus operation " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", " StartWorkflow:SimpleNexusWorkflow", @@ -715,7 +708,7 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # @traceable step wrapping nexus operation + # step-wrapped nexus operation " step_with_nexus", " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", @@ -725,30 +718,58 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # Post-signal activity + # post-signal " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - " QueryWorkflow:my_query", - " HandleQuery:my_query", - " SignalWorkflow:my_signal", - " HandleSignal:my_signal", - " StartWorkflowUpdate:my_update", - " ValidateUpdate:my_update", - " HandleUpdate:my_update", ] - assert ( - hierarchy == expected - ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + # poll_query trace (separate root, variable number of iterations) + poll_traces = [t for t in traces if t[0] == "poll_query"] + assert len(poll_traces) == 1 + poll = poll_traces[0] + assert poll[0] == "poll_query" + poll_children = poll[1:] + for i in range(0, len(poll_children), 2): + assert poll_children[i] == " QueryWorkflow:is_waiting_for_signal" + assert poll_children[i + 1] == " HandleQuery:is_waiting_for_signal" + + # Raw-client query — no parent context, appears as root + raw_query_traces = [t for t in traces if t[0].startswith("HandleQuery:")] + assert len(raw_query_traces) == 1 + + # Phase 2: each operation is its own root trace + query_traces = [t for t in traces if t[0] == "QueryWorkflow:my_query"] + assert len(query_traces) == 1 + assert query_traces[0] == [ + "QueryWorkflow:my_query", + " HandleQuery:my_query", + ] + + signal_traces = [t for t in traces if t[0] == "SignalWorkflow:my_signal"] + assert len(signal_traces) == 1 + assert signal_traces[0] == [ + "SignalWorkflow:my_signal", + " HandleSignal:my_signal", + ] + + update_traces = [t for t in traces if t[0] == "StartWorkflowUpdate:my_update"] + assert len(update_traces) == 1 + assert update_traces[0] == [ + "StartWorkflowUpdate:my_update", + " ValidateUpdate:my_update", + " HandleUpdate:my_update", + ] async def test_comprehensive_without_temporal_runs( self, client: Client, env: WorkflowEnvironment ) -> None: - """Same comprehensive workflow with add_temporal_runs=False and worker restart. + """Same workflow with add_temporal_runs=False and worker restart. Only @traceable runs appear. Context propagation via headers still works. + user_pipeline only wraps start_workflow, so poll traces are separate roots. """ if env.supports_time_skipping: pytest.skip("Time-skipping server doesn't persist headers.") @@ -757,13 +778,22 @@ async def test_comprehensive_without_temporal_runs( workflow_id = f"comprehensive-no-runs-{uuid.uuid4()}" collector = InMemoryRunCollector() mock_ls = make_mock_ls_client(collector) + temporal_client_1 = _make_temporal_client( + client, mock_ls, add_temporal_runs=False + ) @traceable(name="user_pipeline") - async def user_pipeline() -> str: - # Phase 1: Start workflow, run until signal wait - temporal_client_1 = _make_temporal_client( - client, mock_ls, add_temporal_runs=False + async def user_pipeline() -> WorkflowHandle[Any, Any]: + return await temporal_client_1.start_workflow( + ComprehensiveWorkflow.run, + id=workflow_id, + task_queue=task_queue, ) + + with tracing_context(client=mock_ls, enabled=True): + handle = await user_pipeline() + + # Phase 1: worker picks up workflow, poll until signal wait async with new_worker( temporal_client_1, ComprehensiveWorkflow, @@ -778,20 +808,16 @@ async def user_pipeline() -> str: make_nexus_endpoint_name(worker.task_queue), worker.task_queue, ) - handle = await temporal_client_1.start_workflow( - ComprehensiveWorkflow.run, - id=workflow_id, - task_queue=worker.task_queue, - ) - # Poll via raw client to avoid creating trace runs + # Raw-client query — no interceptor, produces nothing raw_handle = client.get_workflow_handle(workflow_id) + await raw_handle.query(ComprehensiveWorkflow.is_waiting_for_signal) assert await _poll_query( - raw_handle, + handle, ComprehensiveWorkflow.is_waiting_for_signal, expected=True, ), "Workflow never reached signal wait point" - # Phase 2: Fresh worker+plugin, signal to resume, complete + # Phase 2: fresh worker, signal to resume, complete temporal_client_2 = _make_temporal_client( client, mock_ls, add_temporal_runs=False ) @@ -805,57 +831,56 @@ async def user_pipeline() -> str: task_queue=task_queue, max_cached_workflows=0, ): - handle = temporal_client_2.get_workflow_handle(workflow_id) - await handle.signal(ComprehensiveWorkflow.my_signal, "hello") - await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") - return await handle.result() - - with tracing_context(client=mock_ls, enabled=True): - result = await user_pipeline() + handle_2 = temporal_client_2.get_workflow_handle(workflow_id) + await handle_2.signal(ComprehensiveWorkflow.my_signal, "hello") + await handle_2.execute_update(ComprehensiveWorkflow.my_update, "finish") + result = await handle_2.result() assert result == "comprehensive-done" - hierarchy = dump_runs(collector) - expected = [ + traces = dump_traces(collector) + + # Main workflow trace (only @traceable runs, nested under user_pipeline) + workflow_traces = [t for t in traces if t[0] == "user_pipeline"] + assert len(workflow_traces) == 1 + expected_workflow = [ "user_pipeline", - # Direct activity " nested_traceable_activity", " outer_chain", " inner_llm_call", - # @traceable step wrapping activity " step_with_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - # Direct local activity " nested_traceable_activity", " outer_chain", " inner_llm_call", - # Direct @traceable " outer_chain", " inner_llm_call", - # Direct child workflow (activity @traceable propagated via headers) " traceable_activity", " inner_llm_call", - # @traceable step wrapping child workflow " step_with_child_workflow", " traceable_activity", " inner_llm_call", - # Direct nexus operation (activity @traceable propagated via headers) " traceable_activity", " inner_llm_call", - # @traceable step wrapping nexus operation " step_with_nexus", " traceable_activity", " inner_llm_call", - # Post-signal activity + # post-signal " nested_traceable_activity", " outer_chain", " inner_llm_call", ] - assert ( - hierarchy == expected - ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + assert workflow_traces[0] == expected_workflow, ( + f"Workflow trace mismatch.\n" + f"Expected:\n{expected_workflow}\nActual:\n{workflow_traces[0]}" + ) + + # Poll query — separate root, just the @traceable wrapper, no Temporal children + poll_traces = [t for t in traces if t[0] == "poll_query"] + assert len(poll_traces) == 1 + assert poll_traces[0] == ["poll_query"] # --------------------------------------------------------------------------- @@ -1064,7 +1089,7 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( ), f"Duplicate run IDs found (replay issue): {run_ids}" -# --- Nexus service for Bug 2 test (direct @traceable in handler) --- +# --- Nexus service with direct @traceable call in handler --- @traceable(name="nexus_direct_traceable") @@ -1103,24 +1128,19 @@ async def run(self) -> str: # --------------------------------------------------------------------------- -# TestNexusInboundTracing — Bug 2: missing tracing_context in nexus handler +# TestNexusInboundTracing # --------------------------------------------------------------------------- class TestNexusInboundTracing: - """Tests that @traceable calls inside nexus handlers work without temporal runs. - - Bug 2: The nexus inbound interceptor doesn't set up tracing_context() - when add_temporal_runs=False, so @traceable functions inside the handler - have no LangSmith client and no parent — their runs aren't collected. - """ + """Verifies nexus handlers receive tracing_context for @traceable collection.""" async def test_nexus_direct_traceable_without_temporal_runs( self, client: Client, env: WorkflowEnvironment, ) -> None: - """Test F: @traceable in nexus handler works with add_temporal_runs=False. + """@traceable in nexus handler works with add_temporal_runs=False. The worker must be started OUTSIDE tracing_context so that nexus handler tasks inherit a clean contextvars state. Only the client call gets @@ -1162,9 +1182,7 @@ async def test_nexus_direct_traceable_without_temporal_runs( hierarchy = dump_runs(collector) # @traceable runs from inside the nexus handler should be collected - # and nested under user_pipeline via context propagation. - # Without the fix, the nexus handler has no tracing_context so - # @traceable runs won't be collected — hierarchy will be empty. + # via the interceptor's tracing_context setup. expected = [ "nexus_direct_traceable", " inner_llm_call", diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index 1b75099b7..da3ae6165 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -726,9 +726,9 @@ class TestWorkflowOutboundInterceptor: def _make_outbound_interceptor( self, *, add_temporal_runs: bool = True ) -> tuple[Any, MagicMock, Any]: - """Create outbound interceptor with mock next and inbound reference. + """Create outbound interceptor with mock next and ambient run. - Returns (outbound_interceptor, mock_next, inbound_interceptor). + Returns (outbound_interceptor, mock_next, mock_current_run). """ config = LangSmithInterceptor( client=MagicMock(), add_temporal_runs=add_temporal_runs @@ -765,15 +765,12 @@ def _make_outbound_interceptor( _LangSmithWorkflowOutboundInterceptor, ) - outbound = _LangSmithWorkflowOutboundInterceptor( - mock_outbound_next, config, inbound - ) + outbound = _LangSmithWorkflowOutboundInterceptor(mock_outbound_next, config) - # Set a current run on the inbound to simulate active workflow execution + # Simulate active workflow execution via ambient context mock_current_run = _make_mock_run() - inbound._current_run = mock_current_run - return outbound, mock_outbound_next, inbound + return outbound, mock_outbound_next, mock_current_run @pytest.mark.parametrize( "method,input_attr,input_val,expected_name", @@ -831,15 +828,16 @@ async def test_creates_trace_and_injects_headers( """Each outbound method creates the correct trace and injects headers.""" mock_run = _make_mock_run() MockRunTree.return_value = mock_run - outbound, mock_next, _ = self._make_outbound_interceptor() + outbound, mock_next, mock_current_run = self._make_outbound_interceptor() mock_input = MagicMock() setattr(mock_input, input_attr, input_val) mock_input.headers = {} - result = getattr(outbound, method)(mock_input) - if asyncio.iscoroutine(result): - await result + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_current_run): + result = getattr(outbound, method)(mock_input) + if asyncio.iscoroutine(result): + await result assert _get_runtree_name(MockRunTree) == expected_name assert HEADER_KEY in mock_input.headers @@ -852,17 +850,18 @@ async def test_creates_trace_and_injects_headers( def test_continue_as_new( self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any ) -> None: - """continue_as_new does NOT create a new trace, but injects context from current run.""" - outbound, mock_next, _inbound = self._make_outbound_interceptor() + """continue_as_new does NOT create a new trace, but injects context from ambient run.""" + outbound, mock_next, mock_current_run = self._make_outbound_interceptor() mock_input = MagicMock() mock_input.headers = {} - outbound.continue_as_new(mock_input) + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_current_run): + outbound.continue_as_new(mock_input) # No new RunTree should be created for continue_as_new MockRunTree.assert_not_called() - # But headers SHOULD be modified (context from inbound's _current_run) + # But headers SHOULD be modified (context from ambient run) assert HEADER_KEY in mock_input.headers mock_next.continue_as_new.assert_called_once() @@ -881,14 +880,15 @@ async def test_start_nexus_operation( """start_nexus_operation creates a trace named StartNexusOperation:{service}/{operation}.""" mock_run = _make_mock_run() MockRunTree.return_value = mock_run - outbound, mock_next, _ = self._make_outbound_interceptor() + outbound, mock_next, mock_current_run = self._make_outbound_interceptor() mock_input = MagicMock() mock_input.service = "MyService" mock_input.operation_name = "do_op" mock_input.headers = {} - await outbound.start_nexus_operation(mock_input) + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_current_run): + await outbound.start_nexus_operation(mock_input) assert _get_runtree_name(MockRunTree) == "StartNexusOperation:MyService/do_op" # Nexus uses string headers, so context injection uses _inject_nexus_context @@ -1063,8 +1063,8 @@ async def test_false_still_propagates_context( ) -> None: """With add_temporal_runs=False, no runs are created but context still propagates. - 1. Workflow outbound: injects the inbound's _current_run (parent fallback) - into headers even though no StartActivity run is created. + 1. Workflow outbound: injects the ambient run's context into headers even + though no StartActivity run is created. 2. Activity inbound: sets tracing_context(parent=extracted_parent) unconditionally (before _maybe_run), so @traceable code nests correctly even without a RunActivity run. @@ -1090,19 +1090,17 @@ async def test_false_still_propagates_context( mock_outbound_next = MagicMock() mock_outbound_next.start_activity = MagicMock() inbound.init(mock_outbound_next) - outbound = _LangSmithWorkflowOutboundInterceptor( - mock_outbound_next, config, inbound - ) + outbound = _LangSmithWorkflowOutboundInterceptor(mock_outbound_next, config) - # Simulate an inbound parent context (as if extracted from headers) + # Simulate an ambient parent context (as if from active workflow execution) mock_parent = _make_mock_run() - inbound._current_run = mock_parent mock_input = MagicMock() mock_input.activity = "do_thing" mock_input.headers = {} - outbound.start_activity(mock_input) + with patch(_PATCH_GET_CURRENT_RUN, return_value=mock_parent): + outbound.start_activity(mock_input) # No RunTree should be created (add_temporal_runs=False) MockRunTree.assert_not_called() diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index c20ced1ac..c6d81176f 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -3,15 +3,16 @@ from __future__ import annotations import uuid +from typing import Any from unittest.mock import MagicMock import pytest from langsmith import traceable, tracing_context -from temporalio.client import Client +from temporalio.client import Client, WorkflowHandle from temporalio.contrib.langsmith import LangSmithInterceptor, LangSmithPlugin from temporalio.testing import WorkflowEnvironment -from tests.contrib.langsmith.conftest import dump_runs +from tests.contrib.langsmith.conftest import dump_traces from tests.contrib.langsmith.test_integration import ( ComprehensiveWorkflow, NexusService, @@ -56,7 +57,11 @@ class TestPluginIntegration: async def test_comprehensive_plugin_trace_hierarchy( self, client: Client, env: WorkflowEnvironment ) -> None: - """Plugin wired to a real Temporal worker produces the full trace hierarchy.""" + """Plugin wired to a real Temporal worker produces the full trace hierarchy. + + user_pipeline only wraps start_workflow, so poll/query/signal/update + traces are naturally separate root traces. + """ if env.supports_time_skipping: pytest.skip("Time-skipping server doesn't persist headers.") @@ -64,8 +69,20 @@ async def test_comprehensive_plugin_trace_hierarchy( client, add_temporal_runs=True ) + task_queue = f"plugin-comprehensive-{uuid.uuid4()}" + workflow_id = f"plugin-comprehensive-{uuid.uuid4()}" + @traceable(name="user_pipeline") - async def user_pipeline() -> str: + async def user_pipeline() -> WorkflowHandle[Any, Any]: + return await temporal_client.start_workflow( + ComprehensiveWorkflow.run, + id=workflow_id, + task_queue=task_queue, + ) + + with tracing_context(client=mock_ls_client, enabled=True): + handle = await user_pipeline() + async with new_worker( temporal_client, ComprehensiveWorkflow, @@ -73,70 +90,60 @@ async def user_pipeline() -> str: SimpleNexusWorkflow, activities=[nested_traceable_activity, traceable_activity], nexus_service_handlers=[NexusService()], + task_queue=task_queue, max_cached_workflows=0, ) as worker: await env.create_nexus_endpoint( make_nexus_endpoint_name(worker.task_queue), worker.task_queue, ) - workflow_id = f"plugin-comprehensive-{uuid.uuid4()}" - handle = await temporal_client.start_workflow( - ComprehensiveWorkflow.run, - id=workflow_id, - task_queue=worker.task_queue, - ) - # Poll via raw client to avoid creating trace runs - raw_handle = client.get_workflow_handle(workflow_id) assert await _poll_query( - raw_handle, + handle, ComprehensiveWorkflow.is_waiting_for_signal, expected=True, ), "Workflow never reached signal wait point" await handle.query(ComprehensiveWorkflow.my_query) await handle.signal(ComprehensiveWorkflow.my_signal, "hello") await handle.execute_update(ComprehensiveWorkflow.my_update, "finish") - return await handle.result() - - with tracing_context(client=mock_ls_client, enabled=True): - result = await user_pipeline() + result = await handle.result() assert result == "comprehensive-done" - hierarchy = dump_runs(collector) - expected = [ + traces = dump_traces(collector) + + # user_pipeline trace: StartWorkflow + full workflow execution tree + workflow_traces = [t for t in traces if t[0] == "user_pipeline"] + assert len(workflow_traces) == 1 + assert workflow_traces[0] == [ "user_pipeline", " StartWorkflow:ComprehensiveWorkflow", " RunWorkflow:ComprehensiveWorkflow", - # Direct activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - # @traceable step wrapping activity + # step-wrapped activity " step_with_activity", " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - # Direct local activity " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - # Direct @traceable " outer_chain", " inner_llm_call", - # Direct child workflow " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", " StartActivity:traceable_activity", " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # @traceable step wrapping child workflow + # step-wrapped child workflow " step_with_child_workflow", " StartChildWorkflow:TraceableActivityWorkflow", " RunWorkflow:TraceableActivityWorkflow", @@ -144,7 +151,6 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # Direct nexus operation " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", " StartWorkflow:SimpleNexusWorkflow", @@ -153,7 +159,7 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # @traceable step wrapping nexus operation + # step-wrapped nexus operation " step_with_nexus", " StartNexusOperation:NexusService/run_operation", " RunStartNexusOperationHandler:NexusService/run_operation", @@ -163,20 +169,43 @@ async def user_pipeline() -> str: " RunActivity:traceable_activity", " traceable_activity", " inner_llm_call", - # Post-signal activity + # post-signal " StartActivity:nested_traceable_activity", " RunActivity:nested_traceable_activity", " nested_traceable_activity", " outer_chain", " inner_llm_call", - " QueryWorkflow:my_query", - " HandleQuery:my_query", - " SignalWorkflow:my_signal", - " HandleSignal:my_signal", - " StartWorkflowUpdate:my_update", - " ValidateUpdate:my_update", - " HandleUpdate:my_update", ] - assert ( - hierarchy == expected - ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + # poll_query trace (separate root, variable number of iterations) + poll_traces = [t for t in traces if t[0] == "poll_query"] + assert len(poll_traces) == 1 + poll = poll_traces[0] + assert poll[0] == "poll_query" + poll_children = poll[1:] + for i in range(0, len(poll_children), 2): + assert poll_children[i] == " QueryWorkflow:is_waiting_for_signal" + assert poll_children[i + 1] == " HandleQuery:is_waiting_for_signal" + + # Each remaining operation is its own root trace + query_traces = [t for t in traces if t[0] == "QueryWorkflow:my_query"] + assert len(query_traces) == 1 + assert query_traces[0] == [ + "QueryWorkflow:my_query", + " HandleQuery:my_query", + ] + + signal_traces = [t for t in traces if t[0] == "SignalWorkflow:my_signal"] + assert len(signal_traces) == 1 + assert signal_traces[0] == [ + "SignalWorkflow:my_signal", + " HandleSignal:my_signal", + ] + + update_traces = [t for t in traces if t[0] == "StartWorkflowUpdate:my_update"] + assert len(update_traces) == 1 + assert update_traces[0] == [ + "StartWorkflowUpdate:my_update", + " ValidateUpdate:my_update", + " HandleUpdate:my_update", + ] From d9fb85aa8bc2788078b6740c1d4a651f23991355 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Mon, 30 Mar 2026 15:34:36 -0400 Subject: [PATCH 20/30] Skip LangSmith tracing for built-in Temporal queries Built-in queries like __temporal_workflow_metadata, __stack_trace, and __enhanced_stack_trace are fired automatically by infrastructure (e.g. the Temporal Web UI) and are not user-facing. Filter them out of LangSmith traces when add_temporal_runs=True to reduce noise. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 9 ++ tests/contrib/langsmith/test_integration.py | 90 ++++++++++++++++++++ 2 files changed, 99 insertions(+) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index cd34d4406..e0eab4e80 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -36,6 +36,13 @@ HEADER_KEY = "_temporal-langsmith-context" +_BUILTIN_QUERIES: frozenset[str] = frozenset( + { + "__stack_trace", + "__enhanced_stack_trace", + } +) + # --------------------------------------------------------------------------- # Context helpers # --------------------------------------------------------------------------- @@ -824,6 +831,8 @@ async def handle_signal(self, input: temporalio.worker.HandleSignalInput) -> Non return await super().handle_signal(input) async def handle_query(self, input: temporalio.worker.HandleQueryInput) -> Any: + if input.query.startswith("__temporal") or input.query in _BUILTIN_QUERIES: + return await super().handle_query(input) with self._workflow_maybe_run(f"HandleQuery:{input.query}", input.headers): return await super().handle_query(input) diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 44c8039c5..202a1546e 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -1190,3 +1190,93 @@ async def test_nexus_direct_traceable_without_temporal_runs( assert ( hierarchy == expected ), f"Hierarchy mismatch.\nExpected:\n{expected}\nActual:\n{hierarchy}" + + +# --------------------------------------------------------------------------- +# TestBuiltinQueryFiltering +# --------------------------------------------------------------------------- + + +@workflow.defn +class QueryFilteringWorkflow: + """Workflow with a user query and a signal to complete.""" + + def __init__(self) -> None: + self._complete = False + + @workflow.run + async def run(self) -> str: + await workflow.wait_condition(lambda: self._complete) + return "done" + + @workflow.signal + def complete(self) -> None: + self._complete = True + + @workflow.query + def my_query(self) -> str: + return "query-result" + + +class TestBuiltinQueryFiltering: + """Verifies __temporal_ prefixed queries are not traced.""" + + async def test_temporal_prefixed_query_not_traced( + self, + client: Client, + ) -> None: + """__temporal_workflow_metadata query should not produce a trace, + but user-defined queries should still be traced. + + Uses add_temporal_runs=False on the query client to suppress + client-side QueryWorkflow traces, isolating the test to + worker-side HandleQuery traces only. + """ + + task_queue = f"query-filter-{uuid.uuid4()}" + collector = InMemoryRunCollector() + mock_ls = make_mock_ls_client(collector) + + # Worker client: add_temporal_runs=True so HandleQuery traces are created + worker_client = _make_temporal_client(client, mock_ls, add_temporal_runs=True) + # Query client: add_temporal_runs=False to suppress client-side traces + query_client = _make_temporal_client(client, mock_ls, add_temporal_runs=False) + + async with new_worker( + worker_client, + QueryFilteringWorkflow, + task_queue=task_queue, + max_cached_workflows=0, + ) as worker: + handle = await query_client.start_workflow( + QueryFilteringWorkflow.run, + id=f"query-filter-{uuid.uuid4()}", + task_queue=worker.task_queue, + ) + + # Wait for workflow to start by polling the user query + assert await _poll_query( + handle, + QueryFilteringWorkflow.my_query, + expected="query-result", + ), "Workflow never started" + + collector.clear() + + # Built-in queries — should NOT be traced + await handle.query("__temporal_workflow_metadata") + await handle.query("__stack_trace") + await handle.query("__enhanced_stack_trace") + + # User query — should be traced + await handle.query(QueryFilteringWorkflow.my_query) + + await handle.signal(QueryFilteringWorkflow.complete) + assert await handle.result() == "done" + + # Built-in queries should be absent; only user query and signal remain. + traces = dump_traces(collector) + assert traces == [ + ["HandleQuery:my_query"], + ["HandleSignal:complete"], + ], f"Unexpected traces: {traces}" From d271abac8dc9e47a3768a6aa485bad550ecfb3cd Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Mon, 30 Mar 2026 16:51:15 -0400 Subject: [PATCH 21/30] Remove dead error gate from _safe_aio_to_thread MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The isinstance check that raised RuntimeError("Use the LangSmith plugin...") was unreachable — when the plugin is active, _workflow_maybe_run always provides a _ReplaySafeRunTree parent, so _setup_run always returns a _ReplaySafeRunTree child. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 17 +------ tests/contrib/langsmith/test_background_io.py | 50 +------------------ 2 files changed, 2 insertions(+), 65 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index e0eab4e80..3abc0d198 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -157,10 +157,6 @@ def _patch_aio_to_thread() -> None: sandbox blocks ``run_in_executor``. This patch runs those functions synchronously (they are CPU-bound, no I/O) when inside a workflow. - Also serves as an error gate: if ``_setup_run`` creates a plain ``RunTree`` - (no ``_ReplaySafeRunTree`` or ``_ContextBridgeRunTree`` parent), the - ``post()`` call would block. The patch detects this and raises a clear - error telling the user to configure the LangSmith plugin. """ global _aio_to_thread_patched # noqa: PLW0603 if _aio_to_thread_patched: @@ -191,18 +187,7 @@ async def _safe_aio_to_thread( # and patch() are no-ops during replay, which handles I/O # suppression. _setup_run must run normally during replay to # maintain parent-child linkage across the replay boundary. - result = func(*args, **kwargs) - # Error gate: if _setup_run created a plain RunTree (no - # _ReplaySafeRunTree parent found), post() would block on - # compressed_traces.lock. Detect this and raise a clear error. - if isinstance(result, RunTree) and not isinstance( - result, _ReplaySafeRunTree - ): - raise RuntimeError( - "Use the LangSmith plugin to enable @langsmith.traceable " - "in Temporal workflows." - ) - return result + return func(*args, **kwargs) _aiter.aio_to_thread = _safe_aio_to_thread # type: ignore[assignment] _aio_to_thread_patched = True diff --git a/tests/contrib/langsmith/test_background_io.py b/tests/contrib/langsmith/test_background_io.py index 5c9eaad60..5899fb3a7 100644 --- a/tests/contrib/langsmith/test_background_io.py +++ b/tests/contrib/langsmith/test_background_io.py @@ -1,7 +1,7 @@ """Unit tests for _ReplaySafeRunTree and _ContextBridgeRunTree. Covers create_child propagation, executor-backed post/patch, -replay suppression, post-shutdown fallback, and aio_to_thread error gate. +replay suppression, and post-shutdown fallback. """ from __future__ import annotations @@ -635,54 +635,6 @@ def test_patch_falls_back_to_sync_after_shutdown( mock_run.patch.assert_called_once() -# =================================================================== -# TestAioToThreadErrorGate -# =================================================================== - - -class TestAioToThreadErrorGate: - """Tests for aio_to_thread error gate when plugin is not configured.""" - - @pytest.mark.asyncio - @patch(_PATCH_IN_WORKFLOW, return_value=True) - @patch(_PATCH_IS_REPLAYING, return_value=False) - async def test_error_gate_raises_without_plugin( - self, _mock_replaying: Any, _mock_in_wf: Any - ) -> None: - """Async @traceable in workflow without plugin raises a clear error. - - When _setup_run creates a plain RunTree (no _ReplaySafeRunTree parent), - the aio_to_thread patch should detect this and raise an error telling - the user to configure the LangSmith plugin. - """ - import langsmith._internal._aiter as _aiter - - import temporalio.contrib.langsmith._interceptor as _mod - from temporalio.contrib.langsmith._interceptor import _patch_aio_to_thread - - # Save original state and restore after test to avoid global mutation - original_aio_to_thread = _aiter.aio_to_thread - original_patched_flag = _mod._aio_to_thread_patched - try: - # Reset the flag so _patch_aio_to_thread applies fresh - _mod._aio_to_thread_patched = False - _patch_aio_to_thread() - - # The patched aio_to_thread should raise when a plain RunTree is - # created (no _ReplaySafeRunTree or _ContextBridgeRunTree parent). - # This simulates _setup_run creating a root RunTree. - def _mock_setup_run(*_args: Any, **_kwargs: Any) -> RunTree: - return RunTree(name="test", run_type="chain") - - with pytest.raises(RuntimeError, match="Use the LangSmith plugin"): - # The error gate should fire when _setup_run returns a plain RunTree - # This test validates the error gate exists and fires - await _aiter.aio_to_thread(_mock_setup_run) - finally: - _aiter.aio_to_thread = original_aio_to_thread # type: ignore[assignment] - _mod._aio_to_thread_patched = original_patched_flag - - # =================================================================== # Test_ReplaySafeRunTreeConstructor # =================================================================== From 4f5d040089d1120dff6457ee70c16fee7a33680f Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Mon, 30 Mar 2026 18:54:24 -0400 Subject: [PATCH 22/30] Fix pydoctor cross-refs and mock collector trace duplication - Replace :class:`RunTree` cross-references with backtick literals in docstrings to fix pydoctor build failure (exit status 3). - Add run ID dedup to InMemoryRunCollector.record_create to match real LangSmith API upsert semantics. Fixes flaky Windows CI failure where combined replay+new-event activations caused duplicate trace records with deterministic IDs. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 6 +++--- tests/contrib/langsmith/conftest.py | 5 ++++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 3abc0d198..68bd32b6a 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -92,7 +92,7 @@ def _extract_context( ) -> _ReplaySafeRunTree | None: """Extract LangSmith context from Temporal payload headers. - Reconstructs a :class:`RunTree` from the ``_temporal-langsmith-context`` header on + Reconstructs a ``RunTree`` from the ``_temporal-langsmith-context`` header on the receiving side, wrapped in a :class:`_ReplaySafeRunTree` so inbound interceptors can establish a parent-child relationship with the sender's run. Returns ``None`` if no header is present. @@ -244,9 +244,9 @@ def _uuid_from_random(rng: random.Random) -> uuid.UUID: class _ReplaySafeRunTree(RunTree): - """Wrapper around a :class:`RunTree` with replay-safe ``post``, ``end``, and ``patch``. + """Wrapper around a ``RunTree`` with replay-safe ``post``, ``end``, and ``patch``. - Inherits from :class:`RunTree` so ``isinstance`` checks pass, but does + Inherits from ``RunTree`` so ``isinstance`` checks pass, but does **not** call ``super().__init__()``—the wrapped ``_run`` is the real RunTree. Attribute access is delegated via ``__getattr__``/``__setattr__``. diff --git a/tests/contrib/langsmith/conftest.py b/tests/contrib/langsmith/conftest.py index 588b283b6..776747ea7 100644 --- a/tests/contrib/langsmith/conftest.py +++ b/tests/contrib/langsmith/conftest.py @@ -31,8 +31,11 @@ def __init__(self) -> None: self._by_id: dict[str, _RunRecord] = {} def record_create(self, **kwargs: Any) -> None: + run_id = str(kwargs.get("id", kwargs.get("run_id", ""))) + if run_id in self._by_id: + return rec = _RunRecord( - id=str(kwargs.get("id", kwargs.get("run_id", ""))), + id=run_id, parent_run_id=( str(kwargs["parent_run_id"]) if kwargs.get("parent_run_id") else None ), From 54d47a96476db9e1eb887184be5ec445038ba904 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Wed, 1 Apr 2026 17:37:18 -0400 Subject: [PATCH 23/30] Address PR review feedback: comments, end() determinism, yield simplification MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Reword sandbox/event loop terminology to use each in correct context - Make _safe_aio_to_thread docstring prescriptive (must not block) - Fix end() to use workflow.now() instead of datetime.now(), remove sandbox_unrestricted() from end() - Remove dead uuid4 try/except in read-only context - Remove redundant lazy import langsmith in __init__ - Improve _ContextBridgeRunTree, ls_client, _traced_outbound docs - Change get_current_run_tree → _get_current_run_for_propagation at call sites that propagate context - Simplify _maybe_run to yield None; callers use ambient context via _get_current_run_for_propagation() instead of the yielded value - Full comment audit: fix stale refs, move misplaced comments Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 139 ++++++++---------- tests/contrib/langsmith/test_background_io.py | 11 +- tests/contrib/langsmith/test_interceptor.py | 51 ++++--- 3 files changed, 91 insertions(+), 110 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 68bd32b6a..970e87ec9 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -76,11 +76,11 @@ def _inject_current_context( ) -> Mapping[str, Payload]: """Inject the current ambient LangSmith context into Temporal payload headers. - Reads ``get_current_run_tree()`` and injects if present. Returns headers - unchanged if no context is active. Called unconditionally so that context - propagation is independent of the ``add_temporal_runs`` toggle. + Reads ``_get_current_run_for_propagation()`` and injects if present. Returns + headers unchanged if no context is active. Called unconditionally so that + context propagation is independent of the ``add_temporal_runs`` toggle. """ - current = get_current_run_tree() + current = _get_current_run_for_propagation() if current is not None: return _inject_context(headers, current) return headers @@ -143,7 +143,7 @@ def _get_current_run_for_propagation() -> RunTree | None: # --------------------------------------------------------------------------- -# Sandbox safety: patch @traceable's aio_to_thread +# Workflow event loop safety: patch @traceable's aio_to_thread # --------------------------------------------------------------------------- _aio_to_thread_patched = False @@ -154,8 +154,9 @@ def _patch_aio_to_thread() -> None: The ``@traceable`` decorator on async functions uses ``aio_to_thread()`` → ``loop.run_in_executor()`` for run setup/teardown. The Temporal workflow - sandbox blocks ``run_in_executor``. This patch runs those functions - synchronously (they are CPU-bound, no I/O) when inside a workflow. + event loop does not support ``run_in_executor``. This patch runs those + functions synchronously on the workflow thread when inside a workflow. + Functions passed here must not perform blocking I/O. """ global _aio_to_thread_patched # noqa: PLW0603 @@ -178,15 +179,8 @@ async def _safe_aio_to_thread( if not temporalio.workflow.in_workflow(): return await _original(func, *args, __ctx=__ctx, **kwargs) with temporalio.workflow.unsafe.sandbox_unrestricted(): - # Run func directly in the current context (no ctx.run) so - # that context var changes (e.g. _PARENT_RUN_TREE set by - # @traceable's _setup_run) propagate to the caller. - # This is safe because workflows are single-threaded. - # - # No replay-time tracing disable — _ReplaySafeRunTree.post() - # and patch() are no-ops during replay, which handles I/O - # suppression. _setup_run must run normally during replay to - # maintain parent-child linkage across the replay boundary. + # Run without ctx.run() so context var changes propagate + # to the caller. Safe because workflows are single-threaded. return func(*args, **kwargs) _aiter.aio_to_thread = _safe_aio_to_thread # type: ignore[assignment] @@ -250,10 +244,12 @@ class _ReplaySafeRunTree(RunTree): **not** call ``super().__init__()``—the wrapped ``_run`` is the real RunTree. Attribute access is delegated via ``__getattr__``/``__setattr__``. - During replay, ``post()``, ``end()``, and ``patch()`` become no-ops. - In workflow context, ``post()`` and ``patch()`` submit to a single-worker - ``ThreadPoolExecutor`` for FIFO ordering, avoiding blocking on the - workflow task thread. + During replay, ``post()``, ``end()``, and ``patch()`` become no-ops + (I/O suppression), but ``create_child()`` still runs to maintain + parent-child linkage so ``@traceable``'s ``_setup_run`` can build the + run tree across the replay boundary. In workflow context, ``post()`` + and ``patch()`` submit to a single-worker ``ThreadPoolExecutor`` for + FIFO ordering, avoiding blocking on the workflow task thread. """ def __init__( # pyright: ignore[reportMissingSuperCall] @@ -330,15 +326,15 @@ def post(self, exclude_child_runs: bool = True) -> None: def end(self, **kwargs: Any) -> None: """End the run, skipping during replay. - No I/O — just sets attributes on self._run. Runs synchronously. + Pre-computes ``end_time`` via ``workflow.now()`` in workflow context + so ``RunTree.end()`` doesn't call ``datetime.now()`` (non-deterministic + and sandbox-restricted). """ if _is_replaying(): return if temporalio.workflow.in_workflow(): - with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._run.end(**kwargs) - else: - self._run.end(**kwargs) + kwargs.setdefault("end_time", temporalio.workflow.now()) + self._run.end(**kwargs) def patch(self, *, exclude_inputs: bool = False) -> None: """Patch the run to LangSmith, skipping during replay.""" @@ -352,7 +348,12 @@ def patch(self, *, exclude_inputs: bool = False) -> None: class _ContextBridgeRunTree(_ReplaySafeRunTree): - """Lightweight bridge for ``add_temporal_runs=False`` without external context. + """Invisible parent placeholder for ``add_temporal_runs=False`` without propagated context. + + When ``add_temporal_runs=False`` and no parent was propagated via headers, + ``@traceable`` functions still need a parent context to create + ``_ReplaySafeRunTree`` children. This placeholder provides that context + without appearing in LangSmith — its children become root runs. Never posted, patched, or ended — no trace of it exists in LangSmith. ``create_child()`` creates root ``_ReplaySafeRunTree`` objects (no @@ -397,25 +398,24 @@ def end(self, **kwargs: Any) -> NoReturn: def create_child(self, *args: Any, **kwargs: Any) -> _ReplaySafeRunTree: """Create a root _ReplaySafeRunTree (no parent_run_id). - Creates a fresh ``RunTree(...)`` directly (not via - ``self._run.create_child``) to avoid setting ``parent_run_id``, - ``parent_dotted_order``, and ``trace_id`` to the bridge's values. - Maps ``run_id`` → ``id`` matching LangSmith's ``create_child`` convention. + Creates a fresh ``RunTree(...)`` directly (bypassing + ``self._run.create_child``) so children are independent root runs + with no link back to the bridge. """ self._inject_deterministic_ids(kwargs) - # Map run_id → id (matching RunTree.create_child convention) + # RunTree expects "id", but callers pass "run_id". RunTree.create_child + # also does the same mapping internally. if "run_id" in kwargs: kwargs["id"] = kwargs.pop("run_id") - # Inherit ls_client and session_name from bridge via constructor. - # session_name is a Pydantic field (alias="project_name") so it - # must be passed at construction to avoid the "default" fallback. + # Inherit ls_client and session_name from bridge. + # session_name must be passed at construction time. kwargs.setdefault("ls_client", self._run.ls_client) kwargs.setdefault("session_name", self._run.session_name) child_run = RunTree(*args, **kwargs) - # Set replicas post-construction (not a RunTree Pydantic field) + # Replicas must be set post-construction if self._run.replicas is not None: child_run.replicas = self._run.replicas return _ReplaySafeRunTree(child_run, executor=self._executor) @@ -447,13 +447,13 @@ def _maybe_run( parent: RunTree | None = None, project_name: str | None = None, executor: ThreadPoolExecutor, -) -> Iterator[_ReplaySafeRunTree | None]: +) -> Iterator[None]: """Create a LangSmith run, handling errors. - If add_temporal_runs is False, yields None (no run created). Context propagation is handled unconditionally by callers. - When a run IS created, uses :class:`_ReplaySafeRunTree` for - replay and sandbox safety, then sets it as ambient context via + replay and event loop safety, then sets it as ambient context via ``tracing_context(parent=run_tree)`` so ``get_current_run_tree()`` returns it and ``_inject_current_context()`` can inject it. - On exception: marks run as errored (unless benign ApplicationError), re-raises. @@ -476,7 +476,7 @@ def _maybe_run( # If no explicit parent, inherit from ambient @traceable context if parent is None: - parent = get_current_run_tree() + parent = _get_current_run_for_propagation() kwargs: dict[str, Any] = dict( name=name, @@ -484,25 +484,15 @@ def _maybe_run( inputs=inputs or {}, ls_client=client, ) - # Deterministic IDs and start times in workflow context so that - # replayed workflows produce identical LangSmith runs instead of - # duplicates. In production, a workflow can be evicted from the - # worker cache and later replayed on a different worker — without - # deterministic IDs the replayed execution would create a second - # run for the same logical operation. Uses a workflow-bound random - # generator seeded from the workflow's deterministic seed, so UUIDs - # are identical across replays. + # Deterministic IDs so replayed workflows produce identical runs + # instead of duplicates (see _get_workflow_random for details). rng = _get_workflow_random() + # In read-only contexts (queries, update validators), _get_workflow_random() + # returns None. Deterministic IDs aren't needed — these aren't replayed. + # LangSmith will auto-generate a random UUID. if rng is not None: kwargs["id"] = _uuid_from_random(rng) kwargs["start_time"] = temporalio.workflow.now() - elif temporalio.workflow.in_workflow(): - # Read-only context (e.g. query handler) — use workflow.uuid4() - try: - kwargs["id"] = temporalio.workflow.uuid4() - kwargs["start_time"] = temporalio.workflow.now() - except Exception: - pass # Not in a real workflow context (e.g., unit test mock) if project_name is not None: kwargs["project_name"] = project_name if parent is not None: @@ -518,7 +508,7 @@ def _maybe_run( run_tree.post() try: with tracing_context(parent=run_tree, client=client): - yield run_tree + yield None except Exception as exc: if not _is_benign_error(exc): run_tree.end(error=f"{type(exc).__name__}: {exc}") @@ -552,10 +542,7 @@ def __init__( ) -> None: """Initialize the LangSmith interceptor with tracing configuration.""" super().__init__() - # Import langsmith.Client lazily to avoid hard dependency at import time if client is None: - import langsmith - client = langsmith.Client() self._client = client self._project_name = project_name @@ -572,7 +559,7 @@ def maybe_run( run_type: str = "chain", parent: RunTree | None = None, extra_metadata: dict[str, Any] | None = None, - ) -> Iterator[_ReplaySafeRunTree | None]: + ) -> Iterator[None]: """Create a LangSmith run with this interceptor's config already applied.""" metadata = {**self._default_metadata, **(extra_metadata or {})} with _maybe_run( @@ -709,12 +696,11 @@ async def execute_activity( "temporalActivityID": info.activity_id or "", } # Unconditionally set tracing context so @traceable functions inside - # activities can use the plugin's LangSmith client and inherit parent. - # When add_temporal_runs=True: maybe_run overrides with the RunActivity run. - # When add_temporal_runs=False: parent (if any) remains active for @traceable, - # and the client is available even without a parent. - # Override the parent's ls_client so @traceable children (via create_child) - # use the plugin's client rather than lazily creating a real one. + # activities inherit the plugin's client and parent, regardless of + # the add_temporal_runs toggle. + # + # Override ls_client so @traceable children use the plugin's client + # rather than lazily creating one with different configuration. if parent is not None and hasattr(parent, "ls_client"): parent.ls_client = self._config._client ctx_kwargs: dict[str, Any] = { @@ -755,7 +741,7 @@ def _workflow_maybe_run( self, name: str, headers: Mapping[str, Payload] | None = None, - ) -> Iterator[_ReplaySafeRunTree | None]: + ) -> Iterator[None]: """Workflow-specific run creation with metadata. Extracts parent from headers (if provided) and sets up @@ -771,10 +757,6 @@ def _workflow_maybe_run( "temporalWorkflowID": info.workflow_id, "temporalRunID": info.run_id, } - # Set up tracing context for @traceable functions inside the workflow. - # When add_temporal_runs=True, _maybe_run overrides with the - # RunWorkflow run as parent. When False, this outer context ensures - # @traceable still sees the propagated parent from headers. ctx_kwargs: dict[str, Any] = { "client": self._config._client, "enabled": True, @@ -853,20 +835,17 @@ def __init__( self._config = config @contextmanager - def _traced_outbound( - self, name: str, input: _InputWithHeaders - ) -> Iterator[_ReplaySafeRunTree | None]: + def _traced_outbound(self, name: str, input: _InputWithHeaders) -> Iterator[None]: """Outbound workflow run creation with context injection into input.headers. - Uses ambient context (``get_current_run_tree()``) instead of a cached - snapshot, so ``@traceable`` step functions that wrap outbound calls - correctly parent the outbound run under themselves. + Uses ambient context so ``@traceable`` step functions that wrap + outbound calls correctly parent the outbound run under themselves. """ - with self._config.maybe_run(name) as run: - context_source = run or _get_current_run_for_propagation() + with self._config.maybe_run(name): + context_source = _get_current_run_for_propagation() if context_source: input.headers = _inject_context(input.headers, context_source) - yield run + yield None def start_activity( self, input: temporalio.worker.StartActivityInput @@ -910,8 +889,8 @@ async def start_nexus_operation( ) -> temporalio.workflow.NexusOperationHandle[Any]: with self._config.maybe_run( f"StartNexusOperation:{input.service}/{input.operation_name}", - ) as run: - context_source = run or _get_current_run_for_propagation() + ): + context_source = _get_current_run_for_propagation() if context_source: input.headers = _inject_nexus_context( input.headers or {}, context_source diff --git a/tests/contrib/langsmith/test_background_io.py b/tests/contrib/langsmith/test_background_io.py index 5899fb3a7..1ef7ca475 100644 --- a/tests/contrib/langsmith/test_background_io.py +++ b/tests/contrib/langsmith/test_background_io.py @@ -356,10 +356,11 @@ def test_end_noop_during_replay( mock_run.end.assert_not_called() + @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) def test_end_delegates_during_normal_execution( - self, _mock_in_wf: Any, _mock_replaying: Any + self, _mock_in_wf: Any, _mock_replaying: Any, _mock_now: Any ) -> None: """end() delegates to self._run.end() during normal (non-replay) execution.""" executor = _make_executor() @@ -368,9 +369,11 @@ def test_end_delegates_during_normal_execution( tree.end(outputs={"result": "done"}, error="some error") - mock_run.end.assert_called_once_with( - outputs={"result": "done"}, error="some error" - ) + mock_run.end.assert_called_once() + call_kwargs = mock_run.end.call_args.kwargs + assert call_kwargs["outputs"] == {"result": "done"} + assert call_kwargs["error"] == "some error" + assert "end_time" in call_kwargs # =================================================================== diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index da3ae6165..58c506d52 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -4,6 +4,7 @@ import asyncio from concurrent.futures import ThreadPoolExecutor +from datetime import datetime, timezone from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -28,6 +29,7 @@ _PATCH_RUNTREE = f"{_MOD}.RunTree" _PATCH_IN_WORKFLOW = f"{_MOD}.temporalio.workflow.in_workflow" _PATCH_IS_REPLAYING = f"{_MOD}.temporalio.workflow.unsafe.is_replaying_history_events" +_PATCH_WF_NOW = f"{_MOD}.temporalio.workflow.now" _PATCH_WF_INFO = f"{_MOD}.temporalio.workflow.info" _PATCH_SANDBOX = f"{_MOD}.temporalio.workflow.unsafe.sandbox_unrestricted" _PATCH_TRACING_CTX = f"{_MOD}.tracing_context" @@ -162,9 +164,8 @@ def test_replay_noop_post_end_patch( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert isinstance(run, _ReplaySafeRunTree) - assert run._run is mock_run + ): + pass # RunTree IS created (wrapped in _ReplaySafeRunTree) MockRunTree.assert_called_once() # But post/end/patch are no-ops during replay @@ -172,11 +173,12 @@ def test_replay_noop_post_end_patch( mock_run.end.assert_not_called() mock_run.patch.assert_not_called() + @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) def test_create_trace_when_not_replaying( - self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any + self, _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any, _mock_now: Any ) -> None: """When not replaying (but in workflow), _maybe_run creates a _ReplaySafeRunTree.""" mock_run = _make_mock_run() @@ -187,9 +189,8 @@ def test_create_trace_when_not_replaying( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert isinstance(run, _ReplaySafeRunTree) - assert run._run is mock_run + ): + pass MockRunTree.assert_called_once() assert MockRunTree.call_args.kwargs["name"] == "TestRun" @@ -207,9 +208,8 @@ def test_create_trace_outside_workflow( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert isinstance(run, _ReplaySafeRunTree) - assert run._run is mock_run + ): + pass MockRunTree.assert_called_once() @@ -236,9 +236,7 @@ def test_exception_marks_run_errored( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert run is not None - assert run._run is mock_run + ): raise RuntimeError("boom") # run.end should have been called with error containing "boom" mock_run.end.assert_called() @@ -263,9 +261,7 @@ def test_benign_application_error_not_marked( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert run is not None - assert run._run is mock_run + ): raise ApplicationError( "benign", category=ApplicationErrorCategory.BENIGN, @@ -292,9 +288,7 @@ def test_non_benign_application_error_marked( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert run is not None - assert run._run is mock_run + ): raise ApplicationError("bad", non_retryable=True) mock_run.end.assert_called() end_kwargs = mock_run.end.call_args.kwargs @@ -315,9 +309,8 @@ def test_success_completes_normally( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert run is not None - assert run._run is mock_run + ): + pass mock_run.end.assert_called_once() end_kwargs = mock_run.end.call_args.kwargs assert end_kwargs.get("outputs") == {"status": "ok"} @@ -341,9 +334,7 @@ def test_cancelled_error_propagates_without_marking_run( "TestRun", add_temporal_runs=True, executor=_make_executor(), - ) as run: - assert run is not None - assert run._run is mock_run + ): raise asyncio.CancelledError() # run.end should NOT have been called with error= end_calls = mock_run.end.call_args_list @@ -457,7 +448,7 @@ async def test_add_temporal_runs_false_skips_trace( and no headers are injected. _inject_current_context() is called unconditionally, but - get_current_run_tree() returns None so headers are unchanged. + _get_current_run_for_propagation() returns None so headers are unchanged. """ interceptor, mock_next = self._make_client_interceptor(add_temporal_runs=False) mock_input = MagicMock() @@ -625,6 +616,7 @@ def _make_workflow_interceptors( return wf_interceptor, mock_next @pytest.mark.asyncio + @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @@ -637,6 +629,7 @@ async def test_execute_workflow( _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, + _mock_now: Any, ) -> None: """execute_workflow creates a run named RunWorkflow:{workflow_type}.""" mock_wf_info.return_value = _mock_workflow_info(workflow_type="MyWorkflow") @@ -679,6 +672,7 @@ async def test_execute_workflow( ids=["signal", "query", "validator", "update_handler"], ) @pytest.mark.asyncio + @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @@ -691,6 +685,7 @@ async def test_handler_creates_trace( _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, + _mock_now: Any, method: str, input_attr: str, input_val: str, @@ -810,6 +805,7 @@ def _make_outbound_interceptor( ], ) @pytest.mark.asyncio + @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @@ -820,6 +816,7 @@ async def test_creates_trace_and_injects_headers( _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, + _mock_now: Any, method: str, input_attr: str, input_val: str, @@ -866,6 +863,7 @@ def test_continue_as_new( mock_next.continue_as_new.assert_called_once() @pytest.mark.asyncio + @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @@ -876,6 +874,7 @@ async def test_start_nexus_operation( _mock_replaying: Any, MockRunTree: Any, mock_sandbox: Any, + _mock_now: Any, ) -> None: """start_nexus_operation creates a trace named StartNexusOperation:{service}/{operation}.""" mock_run = _make_mock_run() From c37bac86d76b6621fa6880cc36bdc30f3998304e Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 3 Apr 2026 13:08:34 -0400 Subject: [PATCH 24/30] Create per-worker LangSmith interceptors instead of sharing one across workers Previously, all workers sharing a LangSmithPlugin used the same LangSmithInterceptor (and its ThreadPoolExecutor). Now each worker gets its own interceptor via a factory in configure_worker, while client interception uses a shared wrapper that only implements client.Interceptor to avoid being pulled into workers by _init_from_config. Also removes the sync fallback from _submit (formerly _submit_or_fallback) so executor-after-shutdown errors surface immediately instead of silently degrading. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 20 +-- temporalio/contrib/langsmith/_plugin.py | 58 +++++++-- tests/contrib/langsmith/test_background_io.py | 25 ++-- tests/contrib/langsmith/test_plugin.py | 117 +++++++++++++++++- 4 files changed, 178 insertions(+), 42 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 970e87ec9..f31e62542 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -294,22 +294,16 @@ def create_child(self, *args: Any, **kwargs: Any) -> _ReplaySafeRunTree: child_run = self._run.create_child(*args, **kwargs) return _ReplaySafeRunTree(child_run, executor=self._executor) - def _submit_or_fallback( - self, fn: Callable[..., object], *args: Any, **kwargs: Any - ) -> None: - """Submit work to executor, falling back to synchronous after shutdown.""" + def _submit(self, fn: Callable[..., object], *args: Any, **kwargs: Any) -> None: + """Submit work to the background executor.""" def _log_future_exception(future: Future[None]) -> None: exc = future.exception() if exc is not None: logger.error("LangSmith background I/O error: %s", exc) - try: - future = self._executor.submit(fn, *args, **kwargs) - future.add_done_callback(_log_future_exception) - except RuntimeError: - # Executor shut down — fall back to synchronous execution - fn(*args, **kwargs) + future = self._executor.submit(fn, *args, **kwargs) + future.add_done_callback(_log_future_exception) def post(self, exclude_child_runs: bool = True) -> None: """Post the run to LangSmith, skipping during replay.""" @@ -317,9 +311,7 @@ def post(self, exclude_child_runs: bool = True) -> None: if _is_replaying(): return with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._submit_or_fallback( - self._run.post, exclude_child_runs=exclude_child_runs - ) + self._submit(self._run.post, exclude_child_runs=exclude_child_runs) else: self._run.post(exclude_child_runs=exclude_child_runs) @@ -342,7 +334,7 @@ def patch(self, *, exclude_inputs: bool = False) -> None: if _is_replaying(): return with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._submit_or_fallback(self._run.patch, exclude_inputs=exclude_inputs) + self._submit(self._run.patch, exclude_inputs=exclude_inputs) else: self._run.patch(exclude_inputs=exclude_inputs) diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py index ae3e2ac2e..70ace7bd6 100644 --- a/temporalio/contrib/langsmith/_plugin.py +++ b/temporalio/contrib/langsmith/_plugin.py @@ -9,12 +9,33 @@ import langsmith +import temporalio.client from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor -from temporalio.plugin import SimplePlugin +from temporalio.plugin import SimplePlugin, WorkerConfig from temporalio.worker import WorkflowRunner from temporalio.worker.workflow_sandbox import SandboxedWorkflowRunner +class _ClientOnlyLangSmithInterceptor(temporalio.client.Interceptor): + """Wrapper that exposes only the client interceptor interface. + + This prevents ``_init_from_config`` from detecting it as a + ``worker.Interceptor`` and automatically pulling it into every worker. + Each worker gets its own ``LangSmithInterceptor`` via + ``LangSmithPlugin.configure_worker`` instead. + """ + + def __init__(self, interceptor: LangSmithInterceptor) -> None: + super().__init__() + self._interceptor = interceptor + + def intercept_client( + self, next: temporalio.client.OutboundInterceptor + ) -> temporalio.client.OutboundInterceptor: + """Delegate to the wrapped interceptor.""" + return self._interceptor.intercept_client(next) + + class LangSmithPlugin(SimplePlugin): """LangSmith tracing plugin for Temporal SDK. @@ -42,14 +63,19 @@ def __init__( metadata: Default metadata to attach to all runs. tags: Default tags to attach to all runs. """ - interceptor = LangSmithInterceptor( - client=client, - project_name=project_name, - add_temporal_runs=add_temporal_runs, - default_metadata=metadata, - default_tags=tags, - ) - interceptors = [interceptor] + + def make_interceptor() -> LangSmithInterceptor: + return LangSmithInterceptor( + client=client, + project_name=project_name, + add_temporal_runs=add_temporal_runs, + default_metadata=metadata, + default_tags=tags, + ) + + wrapper = _ClientOnlyLangSmithInterceptor(make_interceptor()) + ls_client = wrapper._interceptor._client + self._make_interceptor = make_interceptor def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: if not runner: @@ -68,12 +94,20 @@ async def run_context() -> AsyncIterator[None]: try: yield finally: - interceptor._executor.shutdown(wait=True) - interceptor._client.flush() + ls_client.flush() super().__init__( "langchain.LangSmithPlugin", - interceptors=interceptors, + interceptors=[wrapper], workflow_runner=workflow_runner, run_context=run_context, ) + + def configure_worker(self, config: WorkerConfig) -> WorkerConfig: + """Create a fresh LangSmithInterceptor for each worker.""" + config = super().configure_worker(config) + worker_interceptor = self._make_interceptor() + interceptors = list(config.get("interceptors") or []) + interceptors.append(worker_interceptor) + config["interceptors"] = interceptors + return config diff --git a/tests/contrib/langsmith/test_background_io.py b/tests/contrib/langsmith/test_background_io.py index 1ef7ca475..faca0e581 100644 --- a/tests/contrib/langsmith/test_background_io.py +++ b/tests/contrib/langsmith/test_background_io.py @@ -597,45 +597,42 @@ def capturing_post(*_args: Any, **_kwargs: Any) -> None: # =================================================================== -# TestPostShutdownFallback +# TestPostShutdownRaises # =================================================================== -class TestPostShutdownFallback: - """Tests for post-shutdown executor fallback to synchronous execution.""" +class TestPostShutdownRaises: + """Tests that post/patch raise after executor shutdown.""" @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) - def test_post_falls_back_to_sync_after_shutdown( + def test_post_raises_after_shutdown( self, _mock_in_wf: Any, _mock_replaying: Any ) -> None: - """After executor.shutdown(), post() falls back to synchronous execution.""" + """After executor.shutdown(), post() raises RuntimeError.""" executor = _make_executor() executor.shutdown(wait=True) mock_run = _make_mock_run() tree = _ReplaySafeRunTree(mock_run, executor=executor) - # Should not raise RuntimeError, should fall back to sync - tree.post() - - mock_run.post.assert_called_once() + with pytest.raises(RuntimeError): + tree.post() @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) - def test_patch_falls_back_to_sync_after_shutdown( + def test_patch_raises_after_shutdown( self, _mock_in_wf: Any, _mock_replaying: Any ) -> None: - """After executor.shutdown(), patch() falls back to synchronous execution.""" + """After executor.shutdown(), patch() raises RuntimeError.""" executor = _make_executor() executor.shutdown(wait=True) mock_run = _make_mock_run() tree = _ReplaySafeRunTree(mock_run, executor=executor) - tree.patch() - - mock_run.patch.assert_called_once() + with pytest.raises(RuntimeError): + tree.patch() # =================================================================== diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index c6d81176f..c54df7d1c 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -3,15 +3,18 @@ from __future__ import annotations import uuid -from typing import Any +from typing import Any, cast from unittest.mock import MagicMock import pytest from langsmith import traceable, tracing_context +import temporalio.worker from temporalio.client import Client, WorkflowHandle from temporalio.contrib.langsmith import LangSmithInterceptor, LangSmithPlugin +from temporalio.contrib.langsmith._plugin import _ClientOnlyLangSmithInterceptor from temporalio.testing import WorkflowEnvironment +from temporalio.worker.workflow_sandbox import SandboxedWorkflowRunner from tests.contrib.langsmith.conftest import dump_traces from tests.contrib.langsmith.test_integration import ( ComprehensiveWorkflow, @@ -42,7 +45,9 @@ def test_construction_stores_all_config(self) -> None: ) assert plugin.interceptors is not None assert len(plugin.interceptors) > 0 - interceptor = plugin.interceptors[0] + wrapper = plugin.interceptors[0] + assert isinstance(wrapper, _ClientOnlyLangSmithInterceptor) + interceptor = wrapper._interceptor assert isinstance(interceptor, LangSmithInterceptor) assert interceptor._client is mock_client assert interceptor._project_name == "my-project" @@ -50,6 +55,114 @@ def test_construction_stores_all_config(self) -> None: assert interceptor._default_metadata == {"env": "prod"} assert interceptor._default_tags == ["v1"] + def test_construction_without_client(self) -> None: + """Plugin creates a LangSmith client when none is provided.""" + plugin = LangSmithPlugin() + assert plugin.interceptors is not None + wrapper = plugin.interceptors[0] + assert isinstance(wrapper, _ClientOnlyLangSmithInterceptor) + assert wrapper._interceptor._client is not None + + def test_configure_worker_creates_fresh_interceptor(self) -> None: + """Each configure_worker call produces a distinct LangSmithInterceptor.""" + mock_client = MagicMock() + plugin = LangSmithPlugin( + client=mock_client, + project_name="test-project", + add_temporal_runs=True, + metadata={"k": "v"}, + tags=["t1"], + ) + + # Build a minimal worker config — super().configure_worker needs + # config["client"].config(active_config=True) and a workflow_runner + mock_temporal_client = MagicMock() + mock_temporal_client.config.return_value = {} + base_config: dict[str, Any] = { + "client": mock_temporal_client, + "workflow_runner": SandboxedWorkflowRunner(), + } + + config1 = plugin.configure_worker(cast(Any, dict(base_config))) + config2 = plugin.configure_worker(cast(Any, dict(base_config))) + + interceptors1 = [ + i + for i in config1.get("interceptors", []) + if isinstance(i, LangSmithInterceptor) + ] + interceptors2 = [ + i + for i in config2.get("interceptors", []) + if isinstance(i, LangSmithInterceptor) + ] + assert len(interceptors1) == 1 + assert len(interceptors2) == 1 + assert interceptors1[0] is not interceptors2[0] + + # Verify the fresh interceptor has the correct config + fresh = interceptors1[0] + assert fresh._client is mock_client + assert fresh._project_name == "test-project" + assert fresh._add_temporal_runs is True + assert fresh._default_metadata == {"k": "v"} + assert fresh._default_tags == ["t1"] + + def test_wrapper_not_worker_interceptor(self) -> None: + """The client-only wrapper must not be a worker.Interceptor.""" + mock_client = MagicMock() + plugin = LangSmithPlugin(client=mock_client) + assert plugin.interceptors is not None + wrapper = plugin.interceptors[0] + assert isinstance(wrapper, _ClientOnlyLangSmithInterceptor) + assert not isinstance(wrapper, temporalio.worker.Interceptor) + + def test_no_duplicate_interceptors_after_merge(self) -> None: + """Simulating the full Worker merge flow yields exactly one LangSmithInterceptor.""" + mock_client = MagicMock() + plugin = LangSmithPlugin(client=mock_client, add_temporal_runs=True) + + # Step 1: configure_client — adds the wrapper to client interceptors + client_config = plugin.configure_client(cast(Any, {"interceptors": []})) + + # Step 2: build worker config where config["client"].config(active_config=True) + # returns the client config (so the wrapper is in the client's interceptors) + mock_temporal_client = MagicMock() + mock_temporal_client.config.return_value = client_config + + # Step 3: configure_worker — adds a fresh LangSmithInterceptor + worker_config = plugin.configure_worker( + cast( + Any, + { + "client": mock_temporal_client, + "interceptors": [], + "workflow_runner": SandboxedWorkflowRunner(), + }, + ) + ) + + # Step 4: simulate _init_from_config merge (worker/_worker.py:435-441) + client_interceptors = client_config.get("interceptors", []) + interceptors_from_client = [ + i + for i in client_interceptors + if isinstance(i, temporalio.worker.Interceptor) + ] + assert ( + len(interceptors_from_client) == 0 + ), "Wrapper should not pass worker.Interceptor filter" + final_interceptors = interceptors_from_client + list( + worker_config.get("interceptors", []) + ) + + # The wrapper should NOT pass the isinstance filter, so only the fresh + # one from configure_worker should be present + langsmith_interceptors = [ + i for i in final_interceptors if isinstance(i, LangSmithInterceptor) + ] + assert len(langsmith_interceptors) == 1 + class TestPluginIntegration: """End-to-end test using LangSmithPlugin as a Temporal client plugin.""" From a232d16fc59ea4015bad78a646ca64746044cd74 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 3 Apr 2026 13:14:57 -0400 Subject: [PATCH 25/30] Remove unnecessary sandbox_unrestricted from post/patch in _ReplaySafeRunTree executor.submit() is not blocked by the workflow sandbox, so the sandbox_unrestricted context manager around _submit calls in post() and patch() was unnecessary. Removes the wrappers and corresponding unit test assertions. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 6 ++---- tests/contrib/langsmith/test_interceptor.py | 16 ---------------- 2 files changed, 2 insertions(+), 20 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index f31e62542..99dd211dd 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -310,8 +310,7 @@ def post(self, exclude_child_runs: bool = True) -> None: if temporalio.workflow.in_workflow(): if _is_replaying(): return - with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._submit(self._run.post, exclude_child_runs=exclude_child_runs) + self._submit(self._run.post, exclude_child_runs=exclude_child_runs) else: self._run.post(exclude_child_runs=exclude_child_runs) @@ -333,8 +332,7 @@ def patch(self, *, exclude_inputs: bool = False) -> None: if temporalio.workflow.in_workflow(): if _is_replaying(): return - with temporalio.workflow.unsafe.sandbox_unrestricted(): - self._submit(self._run.patch, exclude_inputs=exclude_inputs) + self._submit(self._run.patch, exclude_inputs=exclude_inputs) else: self._run.patch(exclude_inputs=exclude_inputs) diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index 58c506d52..47ec8eabc 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -31,7 +31,6 @@ _PATCH_IS_REPLAYING = f"{_MOD}.temporalio.workflow.unsafe.is_replaying_history_events" _PATCH_WF_NOW = f"{_MOD}.temporalio.workflow.now" _PATCH_WF_INFO = f"{_MOD}.temporalio.workflow.info" -_PATCH_SANDBOX = f"{_MOD}.temporalio.workflow.unsafe.sandbox_unrestricted" _PATCH_TRACING_CTX = f"{_MOD}.tracing_context" _PATCH_EXTRACT_NEXUS = f"{_MOD}._extract_nexus_context" _PATCH_INJECT_NEXUS = f"{_MOD}._inject_nexus_context" @@ -617,7 +616,6 @@ def _make_workflow_interceptors( @pytest.mark.asyncio @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) - @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) @@ -628,7 +626,6 @@ async def test_execute_workflow( _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any, - mock_sandbox: Any, _mock_now: Any, ) -> None: """execute_workflow creates a run named RunWorkflow:{workflow_type}.""" @@ -650,8 +647,6 @@ async def test_execute_workflow( "temporalWorkflowID": "test-wf-id", "temporalRunID": "test-run-id", } - # Verify sandbox_unrestricted was called (for post/patch inside workflow) - mock_sandbox.assert_called() # Verify super() called and result passed through mock_next.execute_workflow.assert_called_once() assert result == "wf_result" @@ -673,7 +668,6 @@ async def test_execute_workflow( ) @pytest.mark.asyncio @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) - @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) @@ -684,7 +678,6 @@ async def test_handler_creates_trace( _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any, - mock_sandbox: Any, _mock_now: Any, method: str, input_attr: str, @@ -706,7 +699,6 @@ async def test_handler_creates_trace( await result assert _get_runtree_name(MockRunTree) == expected_name - mock_sandbox.assert_called() getattr(mock_next, method).assert_called_once() @@ -806,7 +798,6 @@ def _make_outbound_interceptor( ) @pytest.mark.asyncio @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) - @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) @@ -815,7 +806,6 @@ async def test_creates_trace_and_injects_headers( _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any, - mock_sandbox: Any, _mock_now: Any, method: str, input_attr: str, @@ -838,7 +828,6 @@ async def test_creates_trace_and_injects_headers( assert _get_runtree_name(MockRunTree) == expected_name assert HEADER_KEY in mock_input.headers - mock_sandbox.assert_called() getattr(mock_next, method).assert_called_once() @patch(_PATCH_RUNTREE) @@ -864,7 +853,6 @@ def test_continue_as_new( @pytest.mark.asyncio @patch(_PATCH_WF_NOW, return_value=datetime.now(timezone.utc)) - @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) @@ -873,7 +861,6 @@ async def test_start_nexus_operation( _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any, - mock_sandbox: Any, _mock_now: Any, ) -> None: """start_nexus_operation creates a trace named StartNexusOperation:{service}/{operation}.""" @@ -892,7 +879,6 @@ async def test_start_nexus_operation( assert _get_runtree_name(MockRunTree) == "StartNexusOperation:MyService/do_op" # Nexus uses string headers, so context injection uses _inject_nexus_context # The headers dict should be modified - mock_sandbox.assert_called() mock_next.start_nexus_operation.assert_called_once() @@ -1044,7 +1030,6 @@ def test_false_skips_traces(self, _mock_in_wf: Any, MockRunTree: Any) -> None: @pytest.mark.asyncio @patch(_PATCH_TRACING_CTX) - @patch(_PATCH_SANDBOX) @patch(_PATCH_RUNTREE) @patch(_PATCH_IS_REPLAYING, return_value=False) @patch(_PATCH_IN_WORKFLOW, return_value=True) @@ -1057,7 +1042,6 @@ async def test_false_still_propagates_context( _mock_in_wf: Any, _mock_replaying: Any, MockRunTree: Any, - _mock_sandbox: Any, mock_tracing_ctx: Any, ) -> None: """With add_temporal_runs=False, no runs are created but context still propagates. From 5bdc3f40af01a6baa0d2a68964f0e0743674d0c3 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 3 Apr 2026 13:49:48 -0400 Subject: [PATCH 26/30] Rename _ContextBridgeRunTree to _RootReplaySafeRunTreeFactory MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The old name was misleading — it doesn't bridge contexts. It's a factory that sits in the LangSmith tracing context as a placeholder parent so @traceable can call create_child(), producing independent root _ReplaySafeRunTree instances with no parent link. Also removes unnecessary sandbox_unrestricted from post/patch since executor.submit() is not blocked by the workflow sandbox. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 76 ++++++++++-------- tests/contrib/langsmith/test_background_io.py | 80 +++++++++---------- tests/contrib/langsmith/test_integration.py | 42 +++++----- 3 files changed, 104 insertions(+), 94 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 99dd211dd..eccc3ae70 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -133,11 +133,12 @@ def _extract_nexus_context( def _get_current_run_for_propagation() -> RunTree | None: """Get the current ambient run for context propagation. - Filters out _ContextBridgeRunTree, which is internal scaffolding - that should never be serialized into headers or used as parent runs. + Filters out ``_RootReplaySafeRunTreeFactory``, which is internal + scaffolding that should never be serialized into headers or used as + parent runs. """ run = get_current_run_tree() - if isinstance(run, _ContextBridgeRunTree): + if isinstance(run, _RootReplaySafeRunTreeFactory): return None return run @@ -337,18 +338,26 @@ def patch(self, *, exclude_inputs: bool = False) -> None: self._run.patch(exclude_inputs=exclude_inputs) -class _ContextBridgeRunTree(_ReplaySafeRunTree): - """Invisible parent placeholder for ``add_temporal_runs=False`` without propagated context. +class _RootReplaySafeRunTreeFactory(_ReplaySafeRunTree): + """Factory that produces independent root ``_ReplaySafeRunTree`` instances with no parent link. When ``add_temporal_runs=False`` and no parent was propagated via headers, - ``@traceable`` functions still need a parent context to create - ``_ReplaySafeRunTree`` children. This placeholder provides that context - without appearing in LangSmith — its children become root runs. - - Never posted, patched, or ended — no trace of it exists in LangSmith. - ``create_child()`` creates root ``_ReplaySafeRunTree`` objects (no - ``parent_run_id``) so that ``@traceable`` calls appear as independent - root runs. + ``@traceable`` functions still need *something* in the LangSmith + ``tracing_context`` to call ``create_child()`` on — otherwise they + cannot create ``_ReplaySafeRunTree`` children at all and instead default to + creating generic ``RunTree``s, which are not replay safe. This class fills + that role: it sits in the context as the nominal parent so + ``@traceable`` has a ``create_child()`` target. + + However, ``create_child()`` deliberately creates fresh ``RunTree`` + instances with **no** ``parent_run_id``. This means every child appears + as an independent root run in LangSmith rather than being nested under + a phantom parent that was never meant to be visible. + + ``post()``, ``patch()``, and ``end()`` all raise ``RuntimeError`` + because this object is purely internal scaffolding — it must never + appear in LangSmith. If any of these methods are called, it indicates + a programming error. """ def __init__( # pyright: ignore[reportMissingSuperCall] @@ -359,38 +368,38 @@ def __init__( # pyright: ignore[reportMissingSuperCall] session_name: str | None = None, replicas: Sequence[WriteReplica] | None = None, ) -> None: - """Create a context bridge with the given LangSmith client.""" - # Create a minimal RunTree for the bridge — it will never be posted - bridge_run = RunTree( - name="__bridge__", + """Create a root factory with the given LangSmith client.""" + # Create a minimal RunTree for the factory — it will never be posted + factory_run = RunTree( + name="__root_factory__", run_type="chain", ls_client=ls_client, ) if session_name is not None: - bridge_run.session_name = session_name + factory_run.session_name = session_name if replicas is not None: - bridge_run.replicas = replicas - object.__setattr__(self, "_run", bridge_run) + factory_run.replicas = replicas + object.__setattr__(self, "_run", factory_run) object.__setattr__(self, "_executor", executor) def post(self, exclude_child_runs: bool = True) -> NoReturn: - """Bridge must never be posted.""" - raise RuntimeError("ContextBridgeRunTree must never be posted") + """Factory must never be posted.""" + raise RuntimeError("_RootReplaySafeRunTreeFactory must never be posted") def patch(self, *, exclude_inputs: bool = False) -> NoReturn: - """Bridge must never be patched.""" - raise RuntimeError("ContextBridgeRunTree must never be patched") + """Factory must never be patched.""" + raise RuntimeError("_RootReplaySafeRunTreeFactory must never be patched") def end(self, **kwargs: Any) -> NoReturn: - """Bridge must never be ended.""" - raise RuntimeError("ContextBridgeRunTree must never be ended") + """Factory must never be ended.""" + raise RuntimeError("_RootReplaySafeRunTreeFactory must never be ended") def create_child(self, *args: Any, **kwargs: Any) -> _ReplaySafeRunTree: """Create a root _ReplaySafeRunTree (no parent_run_id). Creates a fresh ``RunTree(...)`` directly (bypassing ``self._run.create_child``) so children are independent root runs - with no link back to the bridge. + with no link back to the factory. """ self._inject_deterministic_ids(kwargs) @@ -399,7 +408,7 @@ def create_child(self, *args: Any, **kwargs: Any) -> _ReplaySafeRunTree: if "run_id" in kwargs: kwargs["id"] = kwargs.pop("run_id") - # Inherit ls_client and session_name from bridge. + # Inherit ls_client and session_name from factory. # session_name must be passed at construction time. kwargs.setdefault("ls_client", self._run.ls_client) kwargs.setdefault("session_name", self._run.session_name) @@ -752,16 +761,17 @@ def _workflow_maybe_run( "enabled": True, } # When add_temporal_runs=False and no external parent, create a - # _ContextBridgeRunTree so @traceable calls get a _ReplaySafeRunTree - # parent via create_child. The bridge is invisible in LangSmith. - bridge: _ContextBridgeRunTree | None = None + # _RootReplaySafeRunTreeFactory so @traceable calls get a + # _ReplaySafeRunTree parent via create_child. The factory is + # invisible in LangSmith. + factory: _RootReplaySafeRunTreeFactory | None = None if not self._config._add_temporal_runs and parent is None: - bridge = _ContextBridgeRunTree( + factory = _RootReplaySafeRunTreeFactory( ls_client=self._config._client, executor=self._config._executor, session_name=self._config._project_name, ) - ctx_kwargs["parent"] = bridge + ctx_kwargs["parent"] = factory ctx_kwargs["project_name"] = self._config._project_name elif parent: ctx_kwargs["parent"] = parent diff --git a/tests/contrib/langsmith/test_background_io.py b/tests/contrib/langsmith/test_background_io.py index faca0e581..b43418f13 100644 --- a/tests/contrib/langsmith/test_background_io.py +++ b/tests/contrib/langsmith/test_background_io.py @@ -1,4 +1,4 @@ -"""Unit tests for _ReplaySafeRunTree and _ContextBridgeRunTree. +"""Unit tests for _ReplaySafeRunTree and _RootReplaySafeRunTreeFactory. Covers create_child propagation, executor-backed post/patch, replay suppression, and post-shutdown fallback. @@ -18,8 +18,8 @@ from langsmith.run_trees import RunTree from temporalio.contrib.langsmith._interceptor import ( - _ContextBridgeRunTree, _ReplaySafeRunTree, + _RootReplaySafeRunTreeFactory, _uuid_from_random, ) @@ -377,42 +377,42 @@ def test_end_delegates_during_normal_execution( # =================================================================== -# TestContextBridgeRunTree +# TestRootReplaySafeRunTreeFactory # =================================================================== -class TestContextBridgeRunTree: - """Tests for _ContextBridgeRunTree subclass.""" +class TestRootReplaySafeRunTreeFactory: + """Tests for _RootReplaySafeRunTreeFactory subclass.""" - def _make_bridge(self, **kwargs: Any) -> _ContextBridgeRunTree: - """Create a _ContextBridgeRunTree for testing.""" - from temporalio.contrib.langsmith._interceptor import _ContextBridgeRunTree + def _make_factory(self, **kwargs: Any) -> _RootReplaySafeRunTreeFactory: + """Create a _RootReplaySafeRunTreeFactory for testing.""" + from temporalio.contrib.langsmith._interceptor import ( + _RootReplaySafeRunTreeFactory, + ) executor = kwargs.pop("executor", _make_executor()) mock_client = kwargs.pop("ls_client", MagicMock()) - return _ContextBridgeRunTree( - ls_client=mock_client, - executor=executor, - **kwargs, + return _RootReplaySafeRunTreeFactory( + ls_client=mock_client, executor=executor, **kwargs ) def test_post_raises_runtime_error(self) -> None: - """Bridge's post() raises RuntimeError — bridge must never be posted.""" - bridge = self._make_bridge() + """Factory's post() raises RuntimeError — factory must never be posted.""" + factory = self._make_factory() with pytest.raises(RuntimeError, match="must never be posted"): - bridge.post() + factory.post() def test_patch_raises_runtime_error(self) -> None: - """Bridge's patch() raises RuntimeError — bridge must never be patched.""" - bridge = self._make_bridge() + """Factory's patch() raises RuntimeError — factory must never be patched.""" + factory = self._make_factory() with pytest.raises(RuntimeError, match="must never be patched"): - bridge.patch() + factory.patch() def test_end_raises_runtime_error(self) -> None: - """Bridge's end() raises RuntimeError — bridge must never be ended.""" - bridge = self._make_bridge() + """Factory's end() raises RuntimeError — factory must never be ended.""" + factory = self._make_factory() with pytest.raises(RuntimeError, match="must never be ended"): - bridge.end(outputs={"status": "ok"}) + factory.end(outputs={"status": "ok"}) @patch(_PATCH_GET_WF_RANDOM) @patch(_PATCH_WF_NOW) @@ -423,7 +423,7 @@ def test_create_child_returns_root_replay_safe_run_tree( mock_now: Any, mock_get_random: Any, ) -> None: - """Bridge's create_child creates a root _ReplaySafeRunTree (no parent_run_id).""" + """Factory's create_child creates a root _ReplaySafeRunTree (no parent_run_id).""" import random as stdlib_random mock_get_random.return_value = stdlib_random.Random(42) @@ -431,20 +431,20 @@ def test_create_child_returns_root_replay_safe_run_tree( executor = _make_executor() mock_client = MagicMock() - bridge = self._make_bridge(ls_client=mock_client, executor=executor) + factory = self._make_factory(ls_client=mock_client, executor=executor) - child = bridge.create_child(name="traceable-fn", run_type="chain") + child = factory.create_child(name="traceable-fn", run_type="chain") assert isinstance(child, _ReplaySafeRunTree) # Child should be a root run — no parent_run_id assert child._run.parent_run_id is None def test_create_child_inherits_client_session_and_replicas(self) -> None: - """Bridge's children inherit ls_client, session_name, replicas.""" + """Factory's children inherit ls_client, session_name, replicas.""" executor = _make_executor() mock_client = MagicMock() mock_replicas = [MagicMock(), MagicMock()] - bridge = self._make_bridge( + factory = self._make_factory( ls_client=mock_client, executor=executor, session_name="my-project", @@ -452,37 +452,37 @@ def test_create_child_inherits_client_session_and_replicas(self) -> None: ) with patch(_PATCH_IN_WORKFLOW, return_value=False): - child = bridge.create_child(name="traceable-fn", run_type="chain") + child = factory.create_child(name="traceable-fn", run_type="chain") assert isinstance(child, _ReplaySafeRunTree) - # Child should have the bridge's ls_client, session_name, and replicas + # Child should have the factory's ls_client, session_name, and replicas assert child.ls_client is mock_client assert child.session_name == "my-project" assert child.replicas is mock_replicas def test_create_child_propagates_executor(self) -> None: - """Bridge propagates executor to children.""" + """Factory propagates executor to children.""" executor = _make_executor() - bridge = self._make_bridge(executor=executor) + factory = self._make_factory(executor=executor) with patch(_PATCH_IN_WORKFLOW, return_value=False): - child = bridge.create_child(name="traceable-fn", run_type="chain") + child = factory.create_child(name="traceable-fn", run_type="chain") assert isinstance(child, _ReplaySafeRunTree) assert child._executor is executor def test_create_child_maps_run_id_to_id(self) -> None: - """Bridge's create_child maps run_id kwarg to id on the resulting RunTree. + """Factory's create_child maps run_id kwarg to id on the resulting RunTree. The run_id kwarg is mapped to id, matching LangSmith's RunTree.create_child convention (run_trees.py:545). """ executor = _make_executor() - bridge = self._make_bridge(executor=executor) + factory = self._make_factory(executor=executor) explicit_id = uuid.uuid4() with patch(_PATCH_IN_WORKFLOW, return_value=False): - child = bridge.create_child( + child = factory.create_child( name="traceable-fn", run_type="chain", run_id=explicit_id ) @@ -490,16 +490,16 @@ def test_create_child_maps_run_id_to_id(self) -> None: # The underlying RunTree should have id set to the passed run_id assert child._run.id == explicit_id - def test_bridge_not_in_collected_runs(self) -> None: - """Bridge's post/patch/end raise RuntimeError — bridge is never traced.""" - bridge = self._make_bridge() + def test_factory_not_in_collected_runs(self) -> None: + """Factory's post/patch/end raise RuntimeError — factory is never traced.""" + factory = self._make_factory() with pytest.raises(RuntimeError): - bridge.post() + factory.post() with pytest.raises(RuntimeError): - bridge.patch() + factory.patch() with pytest.raises(RuntimeError): - bridge.end() + factory.end() # =================================================================== diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 202a1546e..3decaf4e6 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -884,7 +884,7 @@ async def user_pipeline() -> WorkflowHandle[Any, Any]: # --------------------------------------------------------------------------- -# TestBackgroundIOIntegration — _ContextBridgeRunTree + sync @traceable +# TestBackgroundIOIntegration — _RootReplaySafeRunTreeFactory + sync @traceable # --------------------------------------------------------------------------- @@ -907,12 +907,12 @@ async def _async_calls_sync(prompt: str) -> str: @workflow.defn -class BridgeTraceableWorkflow: - """Workflow exercising _ContextBridgeRunTree with async, sync, and mixed @traceable. +class FactoryTraceableWorkflow: + """Workflow exercising _RootReplaySafeRunTreeFactory with async, sync, and mixed @traceable. Covers three code paths through create_child: - async→async nesting - - sync→sync nesting (sync @traceable entry to bridge) + - sync→sync nesting (sync @traceable entry to factory) - async→sync nesting (cross-boundary case) """ @@ -932,23 +932,23 @@ async def run(self) -> str: class TestBackgroundIOIntegration: """Integration tests for workflows using add_temporal_runs=False without external context. - Exercises the _ContextBridgeRunTree path with sync, async, and mixed @traceable + Exercises the _RootReplaySafeRunTreeFactory path with sync, async, and mixed @traceable nesting. Verifies root-run creation, correct nesting hierarchy, and replay safety. """ - async def test_bridge_traceable_no_external_context( + async def test_factory_traceable_no_external_context( self, client: Client, env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: - """Exercises _ContextBridgeRunTree: add_temporal_runs=False, no external context. + """Exercises _RootReplaySafeRunTreeFactory: add_temporal_runs=False, no external context. Uses a workflow with async→async, sync→sync, and async→sync @traceable nesting, plus an activity with nested @traceable. Verifies: - - Each top-level @traceable becomes a root run (bridge creates root children) + - Each top-level @traceable becomes a root run (factory creates root children) - Nested @traceable calls nest correctly under their parent - Activity @traceable also produces correct hierarchy - - No phantom bridge run appears in collected runs + - No phantom factory run appears in collected runs - No duplicate run IDs after replay (max_cached_workflows=0) """ temporal_client, collector, _ = _make_client_and_collector( @@ -957,13 +957,13 @@ async def test_bridge_traceable_no_external_context( async with new_worker( temporal_client, - BridgeTraceableWorkflow, + FactoryTraceableWorkflow, activities=[nested_traceable_activity], max_cached_workflows=0, ) as worker: handle = await temporal_client.start_workflow( - BridgeTraceableWorkflow.run, - id=f"bridge-{uuid.uuid4()}", + FactoryTraceableWorkflow.run, + id=f"factory-{uuid.uuid4()}", task_queue=worker.task_queue, ) result = await handle.result() @@ -995,25 +995,25 @@ async def test_bridge_traceable_no_external_context( set(run_ids) ), f"Duplicate run IDs found (replay issue): {run_ids}" - async def test_bridge_passes_project_name_to_children( + async def test_factory_passes_project_name_to_children( self, client: Client, env: WorkflowEnvironment, # type:ignore[reportUnusedParameter] ) -> None: - """Bridge children inherit project_name (session_name) from plugin config.""" + """Factory children inherit project_name (session_name) from plugin config.""" temporal_client, _collector, mock_ls_client = _make_client_and_collector( client, add_temporal_runs=False, project_name="my-ls-project" ) async with new_worker( temporal_client, - BridgeTraceableWorkflow, + FactoryTraceableWorkflow, activities=[nested_traceable_activity], max_cached_workflows=0, ) as worker: handle = await temporal_client.start_workflow( - BridgeTraceableWorkflow.run, - id=f"bridge-proj-{uuid.uuid4()}", + FactoryTraceableWorkflow.run, + id=f"factory-proj-{uuid.uuid4()}", task_queue=worker.task_queue, ) await handle.result() @@ -1044,12 +1044,12 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( async with new_worker( temporal_client, - BridgeTraceableWorkflow, + FactoryTraceableWorkflow, activities=[nested_traceable_activity], max_cached_workflows=0, ) as worker: handle = await temporal_client.start_workflow( - BridgeTraceableWorkflow.run, + FactoryTraceableWorkflow.run, id=f"mixed-temporal-{uuid.uuid4()}", task_queue=worker.task_queue, ) @@ -1064,8 +1064,8 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( # With add_temporal_runs=True, Temporal operations get their own runs. # @traceable calls nest under the RunWorkflow run. expected = [ - "StartWorkflow:BridgeTraceableWorkflow", - " RunWorkflow:BridgeTraceableWorkflow", + "StartWorkflow:FactoryTraceableWorkflow", + " RunWorkflow:FactoryTraceableWorkflow", " outer_chain", " inner_llm_call", " sync_outer_chain", From c6db2348647f309755c593f508cbc5f15b89f1e4 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 3 Apr 2026 14:18:18 -0400 Subject: [PATCH 27/30] Rename overloaded kwargs/ctx_kwargs variables in LangSmith interceptor MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Rename manually constructed dicts to more descriptive names: - kwargs → run_tree_args (used to build RunTree instances) - ctx_kwargs → tracing_args (used to build tracing_context calls) Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 50 ++++++++++---------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index eccc3ae70..80d9d49b9 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -477,7 +477,7 @@ def _maybe_run( if parent is None: parent = _get_current_run_for_propagation() - kwargs: dict[str, Any] = dict( + run_tree_args: dict[str, Any] = dict( name=name, run_type=run_type, inputs=inputs or {}, @@ -490,20 +490,20 @@ def _maybe_run( # returns None. Deterministic IDs aren't needed — these aren't replayed. # LangSmith will auto-generate a random UUID. if rng is not None: - kwargs["id"] = _uuid_from_random(rng) - kwargs["start_time"] = temporalio.workflow.now() + run_tree_args["id"] = _uuid_from_random(rng) + run_tree_args["start_time"] = temporalio.workflow.now() if project_name is not None: - kwargs["project_name"] = project_name + run_tree_args["project_name"] = project_name if parent is not None: # Unwrap _ReplaySafeRunTree so RunTree gets the real parent - kwargs["parent_run"] = ( + run_tree_args["parent_run"] = ( parent._run if isinstance(parent, _ReplaySafeRunTree) else parent ) if metadata: - kwargs["extra"] = {"metadata": metadata} + run_tree_args["extra"] = {"metadata": metadata} if tags: - kwargs["tags"] = tags - run_tree = _ReplaySafeRunTree(RunTree(**kwargs), executor=executor) + run_tree_args["tags"] = tags + run_tree = _ReplaySafeRunTree(RunTree(**run_tree_args), executor=executor) run_tree.post() try: with tracing_context(parent=run_tree, client=client): @@ -702,15 +702,15 @@ async def execute_activity( # rather than lazily creating one with different configuration. if parent is not None and hasattr(parent, "ls_client"): parent.ls_client = self._config._client - ctx_kwargs: dict[str, Any] = { + tracing_args: dict[str, Any] = { "client": self._config._client, "enabled": True, } if self._config._project_name: - ctx_kwargs["project_name"] = self._config._project_name + tracing_args["project_name"] = self._config._project_name if parent: - ctx_kwargs["parent"] = parent - with tracing_context(**ctx_kwargs): + tracing_args["parent"] = parent + with tracing_context(**tracing_args): with self._config.maybe_run( f"RunActivity:{info.activity_type}", run_type="tool", @@ -756,7 +756,7 @@ def _workflow_maybe_run( "temporalWorkflowID": info.workflow_id, "temporalRunID": info.run_id, } - ctx_kwargs: dict[str, Any] = { + tracing_args: dict[str, Any] = { "client": self._config._client, "enabled": True, } @@ -771,11 +771,11 @@ def _workflow_maybe_run( executor=self._config._executor, session_name=self._config._project_name, ) - ctx_kwargs["parent"] = factory - ctx_kwargs["project_name"] = self._config._project_name + tracing_args["parent"] = factory + tracing_args["project_name"] = self._config._project_name elif parent: - ctx_kwargs["parent"] = parent - with tracing_context(**ctx_kwargs): + tracing_args["parent"] = parent + with tracing_context(**tracing_args): with self._config.maybe_run( name, parent=parent, @@ -925,15 +925,15 @@ async def execute_nexus_operation_start( parent = _extract_nexus_context(input.ctx.headers, self._config._executor) if parent is not None and hasattr(parent, "ls_client"): parent.ls_client = self._config._client - ctx_kwargs: dict[str, Any] = { + tracing_args: dict[str, Any] = { "client": self._config._client, "enabled": True, } if self._config._project_name: - ctx_kwargs["project_name"] = self._config._project_name + tracing_args["project_name"] = self._config._project_name if parent: - ctx_kwargs["parent"] = parent - with tracing_context(**ctx_kwargs): + tracing_args["parent"] = parent + with tracing_context(**tracing_args): with self._config.maybe_run( f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", run_type="tool", @@ -947,15 +947,15 @@ async def execute_nexus_operation_cancel( parent = _extract_nexus_context(input.ctx.headers, self._config._executor) if parent is not None and hasattr(parent, "ls_client"): parent.ls_client = self._config._client - ctx_kwargs: dict[str, Any] = { + tracing_args: dict[str, Any] = { "client": self._config._client, "enabled": True, } if self._config._project_name: - ctx_kwargs["project_name"] = self._config._project_name + tracing_args["project_name"] = self._config._project_name if parent: - ctx_kwargs["parent"] = parent - with tracing_context(**ctx_kwargs): + tracing_args["parent"] = parent + with tracing_context(**tracing_args): with self._config.maybe_run( f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", run_type="tool", From 0fc2ab3dad3b28edd3d86a93733d3f4155796c47 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 3 Apr 2026 15:27:11 -0400 Subject: [PATCH 28/30] Clean up parent post-processing in LangSmith interceptor MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - _extract_context / _extract_nexus_context now accept ls_client and return fully-formed parents, eliminating 4 call-site fix-ups - Remove unnecessary _ReplaySafeRunTree unwrap in _make_run — RunTree only accesses .id/.dotted_order/.trace_id which delegate transparently - Simplify tracing_args construction by always including project_name and parent (tracing_context treats None same as absent) - Clean up _workflow_maybe_run: eliminate intermediate factory/ tracing_parent variables with single conditional expression Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 98 ++++++++++---------- tests/contrib/langsmith/test_interceptor.py | 13 ++- 2 files changed, 58 insertions(+), 53 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index 80d9d49b9..c15196e7e 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -89,6 +89,7 @@ def _inject_current_context( def _extract_context( headers: Mapping[str, Payload], executor: ThreadPoolExecutor, + ls_client: langsmith.Client, ) -> _ReplaySafeRunTree | None: """Extract LangSmith context from Temporal payload headers. @@ -102,7 +103,10 @@ def _extract_context( return None ls_headers = _payload_converter.from_payloads([header])[0] run = RunTree.from_headers(ls_headers) - return _ReplaySafeRunTree(run, executor=executor) if run else None + if run is None: + return None + run.ls_client = ls_client + return _ReplaySafeRunTree(run, executor=executor) def _inject_nexus_context( @@ -120,6 +124,7 @@ def _inject_nexus_context( def _extract_nexus_context( headers: Mapping[str, str], executor: ThreadPoolExecutor, + ls_client: langsmith.Client, ) -> _ReplaySafeRunTree | None: """Extract LangSmith context from Nexus string headers.""" raw = headers.get(HEADER_KEY) @@ -127,7 +132,10 @@ def _extract_nexus_context( return None ls_headers = json.loads(raw) run = RunTree.from_headers(ls_headers) - return _ReplaySafeRunTree(run, executor=executor) if run else None + if run is None: + return None + run.ls_client = ls_client + return _ReplaySafeRunTree(run, executor=executor) def _get_current_run_for_propagation() -> RunTree | None: @@ -495,10 +503,7 @@ def _maybe_run( if project_name is not None: run_tree_args["project_name"] = project_name if parent is not None: - # Unwrap _ReplaySafeRunTree so RunTree gets the real parent - run_tree_args["parent_run"] = ( - parent._run if isinstance(parent, _ReplaySafeRunTree) else parent - ) + run_tree_args["parent_run"] = parent if metadata: run_tree_args["extra"] = {"metadata": metadata} if tags: @@ -687,7 +692,9 @@ def __init__( async def execute_activity( self, input: temporalio.worker.ExecuteActivityInput ) -> Any: - parent = _extract_context(input.headers, self._config._executor) + parent = _extract_context( + input.headers, self._config._executor, self._config._client + ) info = temporalio.activity.info() extra_metadata = { "temporalWorkflowID": info.workflow_id or "", @@ -697,19 +704,12 @@ async def execute_activity( # Unconditionally set tracing context so @traceable functions inside # activities inherit the plugin's client and parent, regardless of # the add_temporal_runs toggle. - # - # Override ls_client so @traceable children use the plugin's client - # rather than lazily creating one with different configuration. - if parent is not None and hasattr(parent, "ls_client"): - parent.ls_client = self._config._client tracing_args: dict[str, Any] = { "client": self._config._client, "enabled": True, + "project_name": self._config._project_name, + "parent": parent, } - if self._config._project_name: - tracing_args["project_name"] = self._config._project_name - if parent: - tracing_args["parent"] = parent with tracing_context(**tracing_args): with self._config.maybe_run( f"RunActivity:{info.activity_type}", @@ -748,33 +748,37 @@ def _workflow_maybe_run( code can discover the parent and LangSmith client, independent of the ``add_temporal_runs`` toggle. """ - parent = _extract_context(headers, self._config._executor) if headers else None - if parent is not None: - parent.ls_client = self._config._client - info = temporalio.workflow.info() - extra_metadata = { - "temporalWorkflowID": info.workflow_id, - "temporalRunID": info.run_id, - } - tracing_args: dict[str, Any] = { - "client": self._config._client, - "enabled": True, - } + parent = ( + _extract_context(headers, self._config._executor, self._config._client) + if headers + else None + ) # When add_temporal_runs=False and no external parent, create a # _RootReplaySafeRunTreeFactory so @traceable calls get a # _ReplaySafeRunTree parent via create_child. The factory is # invisible in LangSmith. - factory: _RootReplaySafeRunTreeFactory | None = None - if not self._config._add_temporal_runs and parent is None: - factory = _RootReplaySafeRunTreeFactory( + # tracing_parent can be None when add_temporal_runs=True but no parent was + # propagated via headers — maybe_run will later create a root run in that case. + tracing_parent: _ReplaySafeRunTree | _RootReplaySafeRunTreeFactory | None = ( + parent + if parent is not None or self._config._add_temporal_runs + else _RootReplaySafeRunTreeFactory( ls_client=self._config._client, executor=self._config._executor, session_name=self._config._project_name, ) - tracing_args["parent"] = factory - tracing_args["project_name"] = self._config._project_name - elif parent: - tracing_args["parent"] = parent + ) + tracing_args: dict[str, Any] = { + "client": self._config._client, + "enabled": True, + "project_name": self._config._project_name, + "parent": tracing_parent, + } + info = temporalio.workflow.info() + extra_metadata = { + "temporalWorkflowID": info.workflow_id, + "temporalRunID": info.run_id, + } with tracing_context(**tracing_args): with self._config.maybe_run( name, @@ -922,17 +926,15 @@ async def execute_nexus_operation_start( nexusrpc.handler.StartOperationResultSync[Any] | nexusrpc.handler.StartOperationResultAsync ): - parent = _extract_nexus_context(input.ctx.headers, self._config._executor) - if parent is not None and hasattr(parent, "ls_client"): - parent.ls_client = self._config._client + parent = _extract_nexus_context( + input.ctx.headers, self._config._executor, self._config._client + ) tracing_args: dict[str, Any] = { "client": self._config._client, "enabled": True, + "project_name": self._config._project_name, + "parent": parent, } - if self._config._project_name: - tracing_args["project_name"] = self._config._project_name - if parent: - tracing_args["parent"] = parent with tracing_context(**tracing_args): with self._config.maybe_run( f"RunStartNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", @@ -944,17 +946,15 @@ async def execute_nexus_operation_start( async def execute_nexus_operation_cancel( self, input: temporalio.worker.ExecuteNexusOperationCancelInput ) -> None: - parent = _extract_nexus_context(input.ctx.headers, self._config._executor) - if parent is not None and hasattr(parent, "ls_client"): - parent.ls_client = self._config._client + parent = _extract_nexus_context( + input.ctx.headers, self._config._executor, self._config._client + ) tracing_args: dict[str, Any] = { "client": self._config._client, "enabled": True, + "project_name": self._config._project_name, + "parent": parent, } - if self._config._project_name: - tracing_args["project_name"] = self._config._project_name - if parent: - tracing_args["parent"] = parent with tracing_context(**tracing_args): with self._config.maybe_run( f"RunCancelNexusOperationHandler:{input.ctx.service}/{input.ctx.operation}", diff --git a/tests/contrib/langsmith/test_interceptor.py b/tests/contrib/langsmith/test_interceptor.py index 47ec8eabc..96fdc1170 100644 --- a/tests/contrib/langsmith/test_interceptor.py +++ b/tests/contrib/langsmith/test_interceptor.py @@ -114,7 +114,7 @@ def test_inject_extract_roundtrip(self, MockRunTree: Any) -> None: mock_extracted = MagicMock() MockRunTree.from_headers.return_value = mock_extracted - extracted = _extract_context(result, _make_executor()) + extracted = _extract_context(result, _make_executor(), MagicMock()) # extracted should be a _ReplaySafeRunTree wrapping the reconstructed run assert isinstance(extracted, _ReplaySafeRunTree) assert extracted._run is mock_extracted @@ -123,7 +123,7 @@ def test_inject_extract_roundtrip(self, MockRunTree: Any) -> None: def test_extract_missing_header(self) -> None: """When the _temporal-langsmith-context header is absent, returns None.""" headers: dict[str, Payload] = {} - result = _extract_context(headers, _make_executor()) + result = _extract_context(headers, _make_executor(), MagicMock()) assert result is None def test_inject_preserves_existing_headers(self) -> None: @@ -1110,7 +1110,10 @@ async def test_false_still_propagates_context( # tracing_context SHOULD be called with the client and extracted parent # (unconditionally, before _maybe_run) mock_tracing_ctx.assert_called_once_with( - client=config._client, enabled=True, parent=mock_extracted_parent + client=config._client, + enabled=True, + project_name=None, + parent=mock_extracted_parent, ) mock_act_next.execute_activity.assert_called_once() @@ -1141,5 +1144,7 @@ async def test_false_activity_no_parent_no_context( MockRunTree.assert_not_called() # tracing_context called with client and enabled (no parent) - mock_tracing_ctx.assert_called_once_with(client=config._client, enabled=True) + mock_tracing_ctx.assert_called_once_with( + client=config._client, enabled=True, project_name=None, parent=None + ) mock_act_next.execute_activity.assert_called_once() From 285329923f521094f2f11aabf007b12effb182e0 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 3 Apr 2026 16:13:27 -0400 Subject: [PATCH 29/30] Make StartFoo and RunFoo siblings instead of parent-child in LangSmith traces StartFoo completes instantly while RunFoo runs for the operation's lifetime, making the parent-child timing misleading in the UI. Now headers carry the ambient parent's context instead of StartFoo's, so RunFoo nests under the same parent as StartFoo. Adds _traced_start for client outbound start operations (separate from _traced_call used by query/signal/update which keep parent-child). Workflow outbound _traced_outbound captures ambient context before maybe_run for all operations. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/_interceptor.py | 30 +++- tests/contrib/langsmith/test_integration.py | 166 +++++++++---------- tests/contrib/langsmith/test_plugin.py | 106 ++++++------ 3 files changed, 157 insertions(+), 145 deletions(-) diff --git a/temporalio/contrib/langsmith/_interceptor.py b/temporalio/contrib/langsmith/_interceptor.py index c15196e7e..5e020eb4d 100644 --- a/temporalio/contrib/langsmith/_interceptor.py +++ b/temporalio/contrib/langsmith/_interceptor.py @@ -633,11 +633,23 @@ def _traced_call(self, name: str, input: _InputWithHeaders) -> Iterator[None]: input.headers = _inject_current_context(input.headers) yield + @contextmanager + def _traced_start(self, name: str, input: _InputWithHeaders) -> Iterator[None]: + """Wrap a start operation, injecting ambient parent context before creating the run. + + Unlike ``_traced_call``, this injects headers *before* ``maybe_run`` + so the downstream ``RunFoo`` becomes a sibling of ``StartFoo`` rather + than a child. + """ + input.headers = _inject_current_context(input.headers) + with self._config.maybe_run(name): + yield + async def start_workflow( self, input: temporalio.client.StartWorkflowInput ) -> temporalio.client.WorkflowHandle[Any, Any]: prefix = "SignalWithStartWorkflow" if input.start_signal else "StartWorkflow" - with self._traced_call(f"{prefix}:{input.workflow}", input): + with self._traced_start(f"{prefix}:{input.workflow}", input): return await super().start_workflow(input) async def query_workflow(self, input: temporalio.client.QueryWorkflowInput) -> Any: @@ -659,15 +671,15 @@ async def start_workflow_update( async def start_update_with_start_workflow( self, input: temporalio.client.StartWorkflowUpdateWithStartInput ) -> temporalio.client.WorkflowUpdateHandle[Any]: + input.start_workflow_input.headers = _inject_current_context( + input.start_workflow_input.headers + ) + input.update_workflow_input.headers = _inject_current_context( + input.update_workflow_input.headers + ) with self._config.maybe_run( f"StartUpdateWithStartWorkflow:{input.start_workflow_input.workflow}", ): - input.start_workflow_input.headers = _inject_current_context( - input.start_workflow_input.headers - ) - input.update_workflow_input.headers = _inject_current_context( - input.update_workflow_input.headers - ) return await super().start_update_with_start_workflow(input) @@ -845,8 +857,8 @@ def _traced_outbound(self, name: str, input: _InputWithHeaders) -> Iterator[None Uses ambient context so ``@traceable`` step functions that wrap outbound calls correctly parent the outbound run under themselves. """ + context_source = _get_current_run_for_propagation() with self._config.maybe_run(name): - context_source = _get_current_run_for_propagation() if context_source: input.headers = _inject_context(input.headers, context_source) yield None @@ -891,10 +903,10 @@ def continue_as_new(self, input: temporalio.worker.ContinueAsNewInput) -> NoRetu async def start_nexus_operation( self, input: temporalio.worker.StartNexusOperationInput[Any, Any] ) -> temporalio.workflow.NexusOperationHandle[Any]: + context_source = _get_current_run_for_propagation() with self._config.maybe_run( f"StartNexusOperation:{input.service}/{input.operation_name}", ): - context_source = _get_current_run_for_propagation() if context_source: input.headers = _inject_nexus_context( input.headers or {}, context_source diff --git a/tests/contrib/langsmith/test_integration.py b/tests/contrib/langsmith/test_integration.py index 3decaf4e6..18ec082c7 100644 --- a/tests/contrib/langsmith/test_integration.py +++ b/tests/contrib/langsmith/test_integration.py @@ -371,10 +371,10 @@ async def test_workflow_activity_trace_hierarchy( hierarchy = dump_runs(collector) expected = [ "StartWorkflow:SimpleWorkflow", - " RunWorkflow:SimpleWorkflow", - " StartActivity:simple_activity", - " RunActivity:simple_activity", - " simple_activity", + "RunWorkflow:SimpleWorkflow", + " StartActivity:simple_activity", + " RunActivity:simple_activity", + " simple_activity", ] assert ( hierarchy == expected @@ -433,11 +433,11 @@ async def test_no_duplicate_traces_on_replay( hierarchy = dump_runs(collector) expected = [ "StartWorkflow:TraceableActivityWorkflow", - " RunWorkflow:TraceableActivityWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", + "RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", ] assert hierarchy == expected, ( f"Hierarchy mismatch (possible replay duplicates).\n" @@ -479,10 +479,10 @@ async def test_activity_failure_marked( hierarchy = dump_runs(collector) expected = [ "StartWorkflow:ActivityFailureWorkflow", - " RunWorkflow:ActivityFailureWorkflow", - " StartActivity:failing_activity", - " RunActivity:failing_activity", - " failing_activity", + "RunWorkflow:ActivityFailureWorkflow", + " StartActivity:failing_activity", + " RunActivity:failing_activity", + " failing_activity", ] assert ( hierarchy == expected @@ -521,7 +521,7 @@ async def test_workflow_failure_marked( hierarchy = dump_runs(collector) expected = [ "StartWorkflow:FailingWorkflow", - " RunWorkflow:FailingWorkflow", + "RunWorkflow:FailingWorkflow", ] assert ( hierarchy == expected @@ -559,10 +559,10 @@ async def test_benign_error_not_marked( hierarchy = dump_runs(collector) expected = [ "StartWorkflow:BenignErrorWorkflow", - " RunWorkflow:BenignErrorWorkflow", - " StartActivity:benign_failing_activity", - " RunActivity:benign_failing_activity", - " benign_failing_activity", + "RunWorkflow:BenignErrorWorkflow", + " StartActivity:benign_failing_activity", + " RunActivity:benign_failing_activity", + " benign_failing_activity", ] assert ( hierarchy == expected @@ -666,64 +666,64 @@ async def user_pipeline() -> WorkflowHandle[Any, Any]: assert workflow_traces[0] == [ "user_pipeline", " StartWorkflow:ComprehensiveWorkflow", - " RunWorkflow:ComprehensiveWorkflow", - " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " RunWorkflow:ComprehensiveWorkflow", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", # step-wrapped activity - " step_with_activity", - " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " step_with_activity", " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", - " inner_llm_call", - " outer_chain", - " inner_llm_call", - " StartChildWorkflow:TraceableActivityWorkflow", - " RunWorkflow:TraceableActivityWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", # step-wrapped child workflow - " step_with_child_workflow", - " StartChildWorkflow:TraceableActivityWorkflow", - " RunWorkflow:TraceableActivityWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", - " StartNexusOperation:NexusService/run_operation", - " RunStartNexusOperationHandler:NexusService/run_operation", - " StartWorkflow:SimpleNexusWorkflow", - " RunWorkflow:SimpleNexusWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", + " step_with_child_workflow", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", # step-wrapped nexus operation - " step_with_nexus", - " StartNexusOperation:NexusService/run_operation", - " RunStartNexusOperationHandler:NexusService/run_operation", - " StartWorkflow:SimpleNexusWorkflow", - " RunWorkflow:SimpleNexusWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", - # post-signal - " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", + " step_with_nexus", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", " inner_llm_call", + # post-signal + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", ] # poll_query trace (separate root, variable number of iterations) @@ -1065,18 +1065,18 @@ async def test_mixed_sync_async_traceable_with_temporal_runs( # @traceable calls nest under the RunWorkflow run. expected = [ "StartWorkflow:FactoryTraceableWorkflow", - " RunWorkflow:FactoryTraceableWorkflow", - " outer_chain", - " inner_llm_call", - " sync_outer_chain", - " sync_inner_llm_call", - " async_calls_sync", - " sync_inner_llm_call", - " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", - " inner_llm_call", + "RunWorkflow:FactoryTraceableWorkflow", + " outer_chain", + " inner_llm_call", + " sync_outer_chain", + " sync_inner_llm_call", + " async_calls_sync", + " sync_inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", ] assert ( hierarchy == expected diff --git a/tests/contrib/langsmith/test_plugin.py b/tests/contrib/langsmith/test_plugin.py index c54df7d1c..1438b583e 100644 --- a/tests/contrib/langsmith/test_plugin.py +++ b/tests/contrib/langsmith/test_plugin.py @@ -230,64 +230,64 @@ async def user_pipeline() -> WorkflowHandle[Any, Any]: assert workflow_traces[0] == [ "user_pipeline", " StartWorkflow:ComprehensiveWorkflow", - " RunWorkflow:ComprehensiveWorkflow", - " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " RunWorkflow:ComprehensiveWorkflow", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", # step-wrapped activity - " step_with_activity", - " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", - " inner_llm_call", + " step_with_activity", " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", - " inner_llm_call", - " outer_chain", - " inner_llm_call", - " StartChildWorkflow:TraceableActivityWorkflow", - " RunWorkflow:TraceableActivityWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", + " outer_chain", + " inner_llm_call", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", # step-wrapped child workflow - " step_with_child_workflow", - " StartChildWorkflow:TraceableActivityWorkflow", - " RunWorkflow:TraceableActivityWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", - " StartNexusOperation:NexusService/run_operation", - " RunStartNexusOperationHandler:NexusService/run_operation", - " StartWorkflow:SimpleNexusWorkflow", - " RunWorkflow:SimpleNexusWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", + " step_with_child_workflow", + " StartChildWorkflow:TraceableActivityWorkflow", + " RunWorkflow:TraceableActivityWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", + " inner_llm_call", # step-wrapped nexus operation - " step_with_nexus", - " StartNexusOperation:NexusService/run_operation", - " RunStartNexusOperationHandler:NexusService/run_operation", - " StartWorkflow:SimpleNexusWorkflow", - " RunWorkflow:SimpleNexusWorkflow", - " StartActivity:traceable_activity", - " RunActivity:traceable_activity", - " traceable_activity", - " inner_llm_call", - # post-signal - " StartActivity:nested_traceable_activity", - " RunActivity:nested_traceable_activity", - " nested_traceable_activity", - " outer_chain", + " step_with_nexus", + " StartNexusOperation:NexusService/run_operation", + " RunStartNexusOperationHandler:NexusService/run_operation", + " StartWorkflow:SimpleNexusWorkflow", + " RunWorkflow:SimpleNexusWorkflow", + " StartActivity:traceable_activity", + " RunActivity:traceable_activity", + " traceable_activity", " inner_llm_call", + # post-signal + " StartActivity:nested_traceable_activity", + " RunActivity:nested_traceable_activity", + " nested_traceable_activity", + " outer_chain", + " inner_llm_call", ] # poll_query trace (separate root, variable number of iterations) From 1b38567701b42fbe0d0fd135654b7435a4e94390 Mon Sep 17 00:00:00 2001 From: Maple Xu Date: Fri, 3 Apr 2026 18:03:35 -0400 Subject: [PATCH 30/30] Add README for LangSmith plugin Covers quick start, example chatbot, add_temporal_runs toggle, where @traceable works, migration guide, replay safety, and context propagation. Co-Authored-By: Claude Opus 4.6 (1M context) --- temporalio/contrib/langsmith/README.md | 225 +++++++++++++++++++++++++ 1 file changed, 225 insertions(+) create mode 100644 temporalio/contrib/langsmith/README.md diff --git a/temporalio/contrib/langsmith/README.md b/temporalio/contrib/langsmith/README.md new file mode 100644 index 000000000..382511549 --- /dev/null +++ b/temporalio/contrib/langsmith/README.md @@ -0,0 +1,225 @@ +# LangSmith Plugin for Temporal Python SDK + +This Temporal [plugin](https://docs.temporal.io/develop/plugins-guide) allows your [LangSmith](https://smith.langchain.com/) traces to be fully replay safe when added to Temporal workflows and activities. It propagates trace context across worker boundaries so that `@traceable` calls, LLM invocations, and Temporal operations show up in a single connected trace, and ensures that replaying does not generate duplicate traces. + +## Quick Start + +Register the plugin on your Temporal client. You need it on both the client (starter) side and the workers: + +```python +from temporalio.client import Client +from temporalio.contrib.langsmith import LangSmithPlugin + +client = await Client.connect( + "localhost:7233", + plugins=[LangSmithPlugin(project_name="my-project")], +) +``` + +Once that's set up, any `@traceable` function inside your workflows and activities will show up in LangSmith with correct parent-child relationships, even across worker boundaries. + +## Example: AI Chatbot + +A conversational chatbot using OpenAI, orchestrated by a Temporal workflow. The workflow stays alive waiting for user messages via signals, and dispatches each message to an activity that calls the LLM. + +### Activity (wraps the LLM call) + +```python +@langsmith.traceable(name="Call OpenAI", run_type="chain") +@activity.defn +async def call_openai(request: OpenAIRequest) -> Response: + client = wrap_openai(AsyncOpenAI()) # This is a traced langsmith function + return await client.responses.create( + model=request.model, + input=request.input, + instructions=request.instructions, + ) +``` + +### Workflow (orchestrates the conversation) + +```python +@workflow.defn +class ChatbotWorkflow: + @workflow.run + async def run(self) -> str: + # @traceable works inside workflows — fully replay-safe + now = workflow.now().strftime("%b %d %H:%M") + return await langsmith.traceable( + name=f"Session {now}", run_type="chain", + )(self._session)() + + async def _session(self) -> str: + while not self._done: + await workflow.wait_condition( + lambda: self._pending_message is not None or self._done + ) + if self._done: + break + + message = self._pending_message + self._pending_message = None + + @langsmith.traceable(name=f"Request: {message[:60]}", run_type="chain") + async def _query(msg: str) -> str: + response = await workflow.execute_activity( + call_openai, + OpenAIRequest(model="gpt-4o-mini", input=msg), + start_to_close_timeout=timedelta(seconds=60), + ) + return response.output_text + + self._last_response = await _query(message) + + return "Session ended." +``` + +### Worker + +```python +client = await Client.connect( + "localhost:7233", + plugins=[LangSmithPlugin(project_name="chatbot")], +) + +worker = Worker( + client, + task_queue="chatbot", + workflows=[ChatbotWorkflow], + activities=[call_openai], +) +await worker.run() +``` + +### What you see in LangSmith + +With the default configuration (`add_temporal_runs=False`), the trace only contains your application logic: + +``` +Session Apr 03 14:30 + Request: "What's the weather in NYC?" + Call OpenAI + openai.responses.create (auto-traced by wrap_openai) +``` + + + +## `add_temporal_runs` — Temporal Operation Visibility + +By default, `add_temporal_runs` is `False` and only your `@traceable` application logic appears in traces. Setting it to `True` also adds Temporal operations (StartWorkflow, RunWorkflow, StartActivity, RunActivity, etc.): + +```python +plugins=[LangSmithPlugin(project_name="my-project", add_temporal_runs=True)] +``` + +This adds Temporal operation nodes to the trace tree so that the orchestration layer is visible alongside your application logic. If the caller wraps `start_workflow` in a `@traceable` function, the full trace looks like: + +``` +Ask Chatbot # @traceable wrapper around client.start_workflow + StartWorkflow:ChatbotWorkflow + RunWorkflow:ChatbotWorkflow + Session Apr 03 14:30 + Request: "What's the weather in NYC?" + StartActivity:call_openai + RunActivity:call_openai + Call OpenAI + openai.responses.create +``` + +Note: `StartFoo` and `RunFoo` appear as siblings. The start is the short-lived outbound RPC that completes immediately, and the run is the actual execution which may take much longer. + + + + + +## Migrating Existing LangSmith Code to Temporal + +If you already have code with LangSmith tracing, you should be able to move it into a Temporal workflow and keep the same trace hierarchy. The plugin handles sandbox restrictions and context propagation behind the scenes, so anything that was traceable before should remain traceable after the move. More details below: + +### Where `@traceable` works + +The plugin allows `@traceable` to work inside Temporal's deterministic workflow sandbox, where it normally can't run: + +| Location | Works? | Notes | +|----------|--------|-------| +| On `@activity.defn` functions | Yes | Stack `@traceable` on top of `@activity.defn` | +| On `@workflow.defn` class | No | Use `@traceable` inside `@workflow.run` instead | +| Inside workflow methods (sync or async) | Yes | Use `langsmith.traceable(name="...")(fn)()` | +| Inside activity methods (sync or async) | Yes | Regular `@traceable` decorator | +| Around `client.start_workflow` / `execute_workflow` | Yes | Wrap the caller to trace the entire workflow as one unit | +| Around `execute_activity` calls | Yes | Wrap the dispatch to group related operations | + +## Replay Safety + +Temporal workflows are deterministic and get replayed from event history on recovery. The plugin accounts for this by injecting replay-safe data into your traceable runs: + +- **No duplicate traces on replay.** Run IDs are derived deterministically from the workflow's random seed, so replayed operations produce the same IDs and LangSmith deduplicates them. +- **No non-deterministic calls.** The plugin injects metadata using `workflow.now()` for timestamps and `workflow.random()` for UUIDs instead of `datetime.now()` and `uuid4()`. +- **Background I/O stays outside the sandbox.** LangSmith HTTP calls to the server are submitted to a background thread pool that doesn't interfere with the deterministic workflow execution. + +You don't need to do anything special for this. Your `@traceable` functions behave the same whether it's a fresh execution or a replay. + +### Example: Worker crash mid-workflow + +``` +1. Workflow starts, executes Activity A -> trace appears in LangSmith +2. Worker crashes +3. New worker picks up the workflow +4. Workflow replays Activity A (skips execution) -> NO duplicate trace +5. Workflow executes Activity B (new work) -> new trace appears +``` + + + +### Example: Wrapping retriable steps in a trace + +Since Temporal retries failed activities, you can use `@traceable` to group the attempts together: + +```python +@langsmith.traceable(name="my_step", run_type="chain") +async def my_step(message: str) -> str: + return await workflow.execute_activity( + call_openai, + ... + ) +``` + +This groups everything under one run: +``` +my_step + Call OpenAI # first attempt + openai.responses.create + Call OpenAI # retry + openai.responses.create +``` + +## Context Propagation + +The plugin propagates trace context across process boundaries (client -> workflow -> activity -> child workflow -> nexus) via Temporal headers. You don't need to pass any context manually. + +``` +Client Process Worker Process (Workflow) Worker Process (Activity) +───────────── ────────────────────────── ───────────────────────── +@traceable("my workflow") + start_workflow ──headers──> RunWorkflow + @traceable("session") + execute_activity ──headers──> RunActivity + @traceable("Call OpenAI") + openai.create(...) +``` + +## API Reference + +### `LangSmithPlugin` + +```python +LangSmithPlugin( + client=None, # langsmith.Client instance (auto-created if None) + project_name=None, # LangSmith project name + add_temporal_runs=False, # Show Temporal operation nodes in traces + metadata=None, # Default metadata for all runs + tags=None, # Default tags for all runs +) +``` + +We recommend registering the plugin on both the client and all workers. Strictly speaking, you only need it on the sides that produce traces, but adding it everywhere avoids surprises with context propagation. The client and worker don't need to share the same configuration — for example, they can use different `add_temporal_runs` settings.