diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py index 837d1675ba05..92d243d46764 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py @@ -18,7 +18,7 @@ Response as OpenAIResponse, ResponseStreamEvent, ) -from azure.ai.agentserver.core.models.projects import ResponseErrorEvent, ResponseFailedEvent +from azure.ai.agentserver.core.models._projects import ResponseErrorEvent, ResponseFailedEvent from azure.ai.agentserver.core.tools import OAuthConsentRequiredError # pylint: disable=import-error from .models.agent_framework_output_streaming_converter import AgentFrameworkOutputStreamingConverter diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py index aac9b24c445c..86094a617991 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py @@ -12,7 +12,7 @@ from azure.ai.agentserver.core import AgentRunContext from azure.ai.agentserver.core.logger import get_logger from azure.ai.agentserver.core.models import Response as OpenAIResponse -from azure.ai.agentserver.core.models.projects import ( +from azure.ai.agentserver.core.models._projects import ( ItemContentOutputText, ResponsesAssistantMessageItemResource, ) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py index 22f144a4b7a4..02d11958cf24 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py @@ -16,7 +16,7 @@ Response as OpenAIResponse, ResponseStreamEvent, ) -from azure.ai.agentserver.core.models.projects import ( +from azure.ai.agentserver.core.models._projects import ( FunctionToolCallItemResource, FunctionToolCallOutputItemResource, ItemContentOutputText, diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_id_generator.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_id_generator.py index abd2dd2c02ef..ca429683a1be 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_id_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_id_generator.py @@ -8,7 +8,7 @@ from typing import Optional from azure.ai.agentserver.core import AgentRunContext -from azure.ai.agentserver.core.models import projects +from azure.ai.agentserver.core.models import _projects as projects def generate_agent_id(context: AgentRunContext) -> Optional[projects.AgentId]: diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_repository.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_repository.py index acd89f1baef0..3e84763f4e68 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_repository.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_repository.py @@ -8,7 +8,7 @@ from agent_framework import CheckpointStorage -from azure.ai.agentserver.core.checkpoints.client import ( +from azure.ai.agentserver.core.checkpoints import ( CheckpointSession, FoundryCheckpointClient, ) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_storage.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_storage.py index 833c3647149a..63ba16dcd1ed 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_storage.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/persistence/_foundry_checkpoint_storage.py @@ -9,7 +9,7 @@ from agent_framework import WorkflowCheckpoint -from azure.ai.agentserver.core.checkpoints.client import ( +from azure.ai.agentserver.core.checkpoints import ( CheckpointItem, CheckpointItemId, FoundryCheckpointClient, diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/tests/unit_tests/mocks/mock_checkpoint_client.py b/sdk/agentserver/azure-ai-agentserver-agentframework/tests/unit_tests/mocks/mock_checkpoint_client.py index ffc1e2fcc4c1..50a4458856ec 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/tests/unit_tests/mocks/mock_checkpoint_client.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/tests/unit_tests/mocks/mock_checkpoint_client.py @@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional -from azure.ai.agentserver.core.checkpoints.client import ( +from azure.ai.agentserver.core.checkpoints import ( CheckpointItem, CheckpointItemId, CheckpointSession, diff --git a/sdk/agentserver/azure-ai-agentserver-core/README.md b/sdk/agentserver/azure-ai-agentserver-core/README.md index ff60cf460196..cc420579e5fe 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/README.md +++ b/sdk/agentserver/azure-ai-agentserver-core/README.md @@ -26,7 +26,7 @@ from azure.ai.agentserver.core.models import ( CreateResponse, Response as OpenAIResponse, ) -from azure.ai.agentserver.core.models.projects import ( +from azure.ai.agentserver.core.models._projects import ( ItemContentOutputText, ResponsesAssistantMessageItemResource, ResponseTextDeltaEvent, diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/__init__.py index f9d6ed3d8aa8..0ca387146579 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/__init__.py @@ -3,7 +3,7 @@ # --------------------------------------------------------- """Checkpoint storage module for Azure AI Agent Server.""" -from .client import FoundryCheckpointClient +from .client._client import FoundryCheckpointClient from .client._models import ( CheckpointItem, CheckpointItemId, diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/__init__.py index 34f30f16c5d9..901cbb3d70a8 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/__init__.py @@ -3,16 +3,4 @@ # --------------------------------------------------------- """Checkpoint client module for Azure AI Agent Server.""" -from ._client import FoundryCheckpointClient -from ._models import ( - CheckpointItem, - CheckpointItemId, - CheckpointSession, -) - -__all__ = [ - "CheckpointItem", - "CheckpointItemId", - "CheckpointSession", - "FoundryCheckpointClient", -] +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/_client.py index f7e178d758b4..fc2f45321968 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/_client.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/checkpoints/client/_client.py @@ -2,6 +2,7 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- # pylint: disable=client-method-missing-kwargs,client-accepts-api-version-keyword,missing-client-constructor-parameter-kwargs +# ^^^ azure-sdk pylint rules: internal client not intended as a public Azure SDK client """Asynchronous client for Azure AI Foundry checkpoint storage API.""" from typing import Any, AsyncContextManager, List, Optional diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py index f15e98986470..2b5f39e964b4 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py @@ -1,4 +1,3 @@ -# pylint: disable=broad-exception-caught,dangerous-default-value # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- @@ -12,13 +11,12 @@ from .constants import Constants def _get_default_log_config() -> dict[str, Any]: - """ - Build default log config with level from environment. - + """Build default log config with level from environment. + :return: A dictionary containing logging configuration. - :rtype: dict + :rtype: dict[str, Any] """ - log_level = get_log_level() + log_level = _get_log_level() return { "version": 1, "disable_existing_loggers": False, @@ -40,7 +38,14 @@ def _get_default_log_config() -> dict[str, Any]: } -def get_log_level(): +def _get_log_level() -> str: + """Read log level from the ``AGENT_LOG_LEVEL`` environment variable. + + Falls back to ``"INFO"`` if the variable is unset or contains an invalid value. + + :return: A valid Python logging level name. + :rtype: str + """ log_level = os.getenv(Constants.AGENT_LOG_LEVEL, "INFO").upper() valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] if log_level not in valid_levels: @@ -54,7 +59,12 @@ def get_log_level(): APPINSIGHT_CONNSTR_ENV_NAME = "APPLICATIONINSIGHTS_CONNECTION_STRING" -def get_dimensions(): +def _get_dimensions() -> dict[str, str]: + """Collect environment-based dimensions for structured logging. + + :return: A mapping of dimension keys to their runtime values. + :rtype: dict[str, str] + """ env_values = {name: value for name, value in vars(Constants).items() if not name.startswith("_")} res = {"azure.ai.agentserver.version": VERSION} for name, env_name in env_values.items(): @@ -65,11 +75,25 @@ def get_dimensions(): return res -def get_project_endpoint(logger=None): +def get_project_endpoint(logger: Optional[logging.Logger] = None) -> Optional[str]: + """Resolve the project endpoint from environment variables. + + Checks ``AZURE_AI_PROJECT_ENDPOINT`` first, then falls back to deriving + an endpoint from ``AGENT_PROJECT_NAME``. + + :param logger: Optional logger for diagnostic messages. + :type logger: Optional[logging.Logger] + :return: The resolved project endpoint URL, or ``None`` if unavailable. + :rtype: Optional[str] + """ project_endpoint = os.environ.get(Constants.AZURE_AI_PROJECT_ENDPOINT) if project_endpoint: if logger: - logger.info(f"Using project endpoint from {Constants.AZURE_AI_PROJECT_ENDPOINT}: {project_endpoint}") + logger.info( + "Using project endpoint from %s: %s", + Constants.AZURE_AI_PROJECT_ENDPOINT, + project_endpoint, + ) return project_endpoint project_resource_id = os.environ.get(Constants.AGENT_PROJECT_RESOURCE_ID) if project_resource_id: @@ -78,18 +102,32 @@ def get_project_endpoint(logger=None): parts = last_part.split("@") if len(parts) < 2: if logger: - logger.warning(f"Invalid project resource id format: {project_resource_id}") + logger.warning("Invalid project resource id format: %s", project_resource_id) return None account = parts[0] project = parts[1] endpoint = f"https://{account}.services.ai.azure.com/api/projects/{project}" if logger: - logger.info(f"Using project endpoint derived from {Constants.AGENT_PROJECT_RESOURCE_ID}: {endpoint}") + logger.info( + "Using project endpoint derived from %s: %s", + Constants.AGENT_PROJECT_RESOURCE_ID, + endpoint, + ) return endpoint return None -def get_application_insights_connstr(logger=None): +def _get_application_insights_connstr(logger: Optional[logging.Logger] = None) -> Optional[str]: + """Retrieve or derive the Application Insights connection string. + + Looks in the ``APPLICATIONINSIGHTS_CONNECTION_STRING`` environment variable first, + then attempts to fetch it from the project endpoint. + + :param logger: Optional logger for diagnostic messages. + :type logger: Optional[logging.Logger] + :return: The connection string, or ``None`` if unavailable. + :rtype: Optional[str] + """ try: conn_str = os.environ.get(APPINSIGHT_CONNSTR_ENV_NAME) if not conn_str: @@ -101,22 +139,36 @@ def get_application_insights_connstr(logger=None): project_client = AIProjectClient(credential=DefaultAzureCredential(), endpoint=project_endpoint) conn_str = project_client.telemetry.get_application_insights_connection_string() if not conn_str and logger: - logger.info(f"No Application Insights connection found for project: {project_endpoint}") + logger.info( + "No Application Insights connection found for project: %s", + project_endpoint, + ) elif conn_str: os.environ[APPINSIGHT_CONNSTR_ENV_NAME] = conn_str elif logger: logger.info("Application Insights not configured, telemetry export disabled.") return conn_str - except Exception as e: + except Exception as e: # pylint: disable=broad-exception-caught # bootstrap: many failure modes possible if logger: - logger.warning(f"Failed to get Application Insights connection string, telemetry export disabled: {e}") + logger.warning( + "Failed to get Application Insights connection string, telemetry export disabled: %s", + e, + ) return None class CustomDimensionsFilter(logging.Filter): - def filter(self, record): - # Add custom dimensions to every log record - dimensions = get_dimensions() + """Logging filter that attaches environment dimensions and request context to log records.""" + + def filter(self, record: logging.LogRecord) -> bool: + """Inject custom dimensions into *record* and allow it through. + + :param record: The log record to enrich. + :type record: logging.LogRecord + :return: Always ``True`` so the record is never discarded. + :rtype: bool + """ + dimensions = _get_dimensions() for key, value in dimensions.items(): setattr(record, key, value) cur_request_context = request_context.get() @@ -140,7 +192,7 @@ def configure(log_config: Optional[dict[str, Any]] = None): config.dictConfig(log_config) app_logger = logging.getLogger("azure.ai.agentserver") - application_insights_connection_string = get_application_insights_connstr(logger=app_logger) + application_insights_connection_string = _get_application_insights_connstr(logger=app_logger) enable_application_insights_logger = ( os.environ.get(Constants.ENABLE_APPLICATION_INSIGHTS_LOGGER, "true").lower() == "true" ) @@ -169,10 +221,10 @@ def configure(log_config: Optional[dict[str, Any]] = None): handler.addFilter(custom_filter) # Only add to azure.ai.agentserver namespace to avoid infrastructure logs - app_logger.setLevel(get_log_level()) + app_logger.setLevel(_get_log_level()) app_logger.addHandler(handler) - except Exception as e: + except Exception as e: # pylint: disable=broad-exception-caught print(f"Failed to configure logging: {e}") diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py index d5622ebe7732..b6a1895a3868 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/__init__.py @@ -1,7 +1,8 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- +# TypedDict module; __all__ cannot be statically typed because the list is built at runtime. from ._create_response import CreateResponse # type: ignore -from .projects import Response, ResponseStreamEvent +from ._projects import Response, ResponseStreamEvent __all__ = ["CreateResponse", "Response", "ResponseStreamEvent"] # type: ignore[var-annotated] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py index 820d54c6cea0..5ec72115734a 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_create_response.py @@ -1,11 +1,12 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -# pylint: disable=no-name-in-module +# pylint: disable=no-name-in-module # openai re-exports are dynamically generated from typing import Optional -from .openai import response_create_params # type: ignore -from . import projects as _azure_ai_projects_models +# ResponseCreateParamsBase is a TypedDict — mypy cannot verify total=False on mixed bases. +from ._openai import response_create_params # type: ignore +from . import _projects as _azure_ai_projects_models class CreateResponse(response_create_params.ResponseCreateParamsBase, total=False): # type: ignore agent: Optional[_azure_ai_projects_models.AgentReference] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_openai/__init__.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/openai/__init__.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_openai/__init__.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/__init__.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/__init__.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/__init__.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_enums.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_enums.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_enums.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_models.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_models.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_models.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_patch.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_patch.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_patch_evaluations.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_patch_evaluations.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_patch_evaluations.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_utils/__init__.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/__init__.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_utils/__init__.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_utils/model_base.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/model_base.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_utils/model_base.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_utils/serialization.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/projects/_utils/serialization.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/models/_projects/_utils/serialization.py diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_response_metadata.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_response_metadata.py index 352dfdc9d27b..9b13cfedd636 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_response_metadata.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_response_metadata.py @@ -9,7 +9,7 @@ from ..application._metadata import get_current_app from ..models import Response as OpenAIResponse, ResponseStreamEvent -from ..models.projects import ( +from ..models._projects import ( ResponseCompletedEvent, ResponseCreatedEvent, ResponseInProgressEvent, diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index 7a9f488227a7..994d9045dd38 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -1,9 +1,7 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -# pylint: disable=broad-exception-caught,unused-argument,logging-fstring-interpolation,too-many-statements,too-many-return-statements -# mypy: ignore-errors -import asyncio # pylint: disable=C4763 +import asyncio # pylint: disable=C4763 # azure-sdk: async-client-bad-name (false positive on module) import contextlib import inspect import json @@ -13,6 +11,7 @@ from typing import Any, AsyncGenerator, Generator, Optional, Union import uvicorn +from openai import AsyncOpenAI from opentelemetry import context as otel_context, trace from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from starlette.applications import Starlette @@ -40,7 +39,11 @@ from .common.agent_run_context import AgentRunContext from ..constants import Constants from ..logger import APPINSIGHT_CONNSTR_ENV_NAME, get_logger, get_project_endpoint, request_context -from ..models import Response as OpenAIResponse, ResponseStreamEvent, projects as project_models +from ..models import ( + Response as OpenAIResponse, + ResponseStreamEvent, + _projects as project_models +) from ..tools import UserInfoContextMiddleware, create_tool_runtime from ..utils._credential import AsyncTokenCredentialAdapter @@ -53,19 +56,19 @@ def __init__(self, app: ASGIApp, agent: Optional['FoundryCBAgent'] = None): super().__init__(app) self.agent = agent - async def dispatch(self, request: Request, call_next): + async def dispatch(self, request: Request, call_next): # type: ignore[override] if request.url.path in ("/runs", "/responses"): try: self.set_request_id_to_context_var(request) payload = await request.json() - except Exception as e: - logger.error(f"Invalid JSON payload: {e}") + except Exception as e: # pylint: disable=broad-exception-caught # middleware catch-all for bad payload + logger.error("Invalid JSON payload: %s", e) return JSONResponse({"error": f"Invalid JSON payload: {e}"}, status_code=400) try: request.state.agent_run_context = AgentRunContext(payload) self.set_run_context_to_context_var(request.state.agent_run_context) - except Exception as e: - logger.error(f"Context build failed: {e}.", exc_info=True) + except Exception as e: # pylint: disable=broad-exception-caught # middleware catch-all for context build + logger.error("Context build failed: %s.", e, exc_info=True) return JSONResponse({"error": f"Context build failed: {e}"}, status_code=500) return await call_next(request) @@ -99,7 +102,8 @@ def set_run_context_to_context_var(self, run_context): class FoundryCBAgent: - def __init__(self, + def __init__( # pylint: disable=too-many-statements # Starlette app setup requires sequential route/middleware wiring + self, credentials: Optional[Union[AsyncTokenCredential, TokenCredential]] = None, project_endpoint: Optional[str] = None) -> None: self.credentials = AsyncTokenCredentialAdapter(credentials) if credentials else AsyncDefaultTokenCredential() @@ -129,9 +133,9 @@ async def runs_endpoint(request): ex = None resp = await self.agent_run(context) - except Exception as e: + except Exception as e: # pylint: disable=broad-exception-caught # top-level agent_run catch-all # TODO: extract status code from exception - logger.error(f"Error processing CreateResponse request: {e}", exc_info=True) + logger.error("Error processing CreateResponse request: %s", e, exc_info=True) ex = e if not context.stream: @@ -172,7 +176,7 @@ async def gen_async(ex): if self._should_store(context): logger.debug("Storing output to conversation.") await self._save_output_events_to_conversation(context, output_events) - except Exception as e: # noqa: BLE001 + except Exception as e: # noqa: BLE001 # pylint: disable=broad-exception-caught logger.error("Error in async generator: %s", e, exc_info=True) ex = e finally: @@ -207,12 +211,12 @@ async def readiness_endpoint(request): ] @contextlib.asynccontextmanager - async def _lifespan(app): + async def _lifespan(app): # pylint: disable=unused-argument import logging # Log server started successfully port = getattr(self, '_port', 'unknown') - logger.info(f"FoundryCBAgent server started successfully on port {port}") + logger.info("FoundryCBAgent server started successfully on port %s", port) # Attach App Insights handler to uvicorn loggers for handler in logger.handlers: @@ -234,9 +238,9 @@ async def _lifespan(app): allow_methods=["*"], allow_headers=["*"], ) - self.app.add_middleware(AgentRunContextMiddleware, agent=self) + self.app.add_middleware(AgentRunContextMiddleware, agent=self) # type: ignore[arg-type] - self.tracer = None + self.tracer: trace.Tracer = trace.get_tracer(__name__) def _should_store(self, context: AgentRunContext) -> bool: """Determine whether conversation artifacts should be persisted. @@ -246,7 +250,7 @@ def _should_store(self, context: AgentRunContext) -> bool: :return: ``True`` when storage is requested and the conversation is scoped to a project. :rtype: bool """ - return context.request.get("store", False) and context.conversation_id and self._project_endpoint + return bool(context.request.get("store", False) and context.conversation_id and self._project_endpoint) def _items_are_equal(self, item1: dict, item2: dict) -> bool: """Compare two conversation items for equality based on type and content. @@ -274,7 +278,7 @@ def _items_are_equal(self, item1: dict, item2: dict) -> bool: return text1 == text2 return content1 == content2 - async def _create_openai_client(self) -> "AsyncOpenAI": + async def _create_openai_client(self) -> AsyncOpenAI: """Create an AsyncOpenAI client for conversation operations. :return: Configured AsyncOpenAI client scoped to the Foundry project endpoint. @@ -303,7 +307,7 @@ async def _save_input_to_conversation(self, context: AgentRunContext) -> None: try: conversation_id = context.conversation_id input_items = context.request.get("input", []) - if not input_items: + if not input_items or not conversation_id: return # Handle string input as a single item @@ -349,19 +353,22 @@ async def _save_input_to_conversation(self, context: AgentRunContext) -> None: all_match = False break if all_match: - logger.debug(f"All {n} input items already exist in " + - f"conversation {conversation_id}, skipping save") + logger.debug( + "All %d input items already exist in conversation %s, skipping save", + n, + conversation_id, + ) return - except Exception as e: - logger.debug(f"Could not check for duplicates: {e}") + except Exception as e: # pylint: disable=broad-exception-caught # best-effort duplicate check + logger.debug("Could not check for duplicates: %s", e) await openai_client.conversations.items.create( conversation_id=conversation_id, items=items_to_save, ) - logger.debug(f"Saved {len(items_to_save)} input items to conversation {conversation_id}") - except Exception as e: - logger.warning(f"Failed to save input items to conversation: {e}", exc_info=True) + logger.debug("Saved %d input items to conversation %s", len(items_to_save), conversation_id) + except Exception as e: # pylint: disable=broad-exception-caught # best-effort conversation persistence + logger.warning("Failed to save input items to conversation: %s", e, exc_info=True) async def _save_output_to_conversation( self, context: AgentRunContext, response: project_models.Response) -> None: @@ -396,9 +403,9 @@ async def _save_output_to_conversation( conversation_id=conversation_id, items=items_to_save, ) - logger.debug(f"Saved {len(items_to_save)} output items to conversation {conversation_id}") - except Exception as e: - logger.warning(f"Failed to save output items to conversation: {e}", exc_info=True) + logger.debug("Saved %d output items to conversation %s", len(items_to_save), conversation_id) + except Exception as e: # pylint: disable=broad-exception-caught # best-effort conversation persistence + logger.warning("Failed to save output items to conversation: %s", e, exc_info=True) async def _save_output_events_to_conversation(self, context: AgentRunContext, events: list) -> None: """Persist streaming output events for later retrieval. @@ -433,9 +440,9 @@ async def _save_output_events_to_conversation(self, context: AgentRunContext, ev conversation_id=conversation_id, items=items_to_save, ) - logger.debug(f"Saved {len(items_to_save)} output items to conversation {conversation_id}") - except Exception as e: - logger.warning(f"Failed to save output items to conversation: {e}", exc_info=True) + logger.debug("Saved %d output items to conversation %s", len(items_to_save), conversation_id) + except Exception as e: # pylint: disable=broad-exception-caught # best-effort conversation persistence + logger.warning("Failed to save output items to conversation: %s", e, exc_info=True) @abstractmethod async def agent_run( @@ -557,10 +564,10 @@ async def respond_with_oauth_consent_astream(self, context, error) -> AsyncGener }) yield project_models.ResponseCompletedEvent(sequence_number=sequence_number, response=response) - async def agent_liveness(self, request) -> Union[Response, dict]: + async def agent_liveness(self, request) -> Union[Response, dict]: # pylint: disable=unused-argument return Response(status_code=200) - async def agent_readiness(self, request) -> Union[Response, dict]: + async def agent_readiness(self, request) -> Union[Response, dict]: # pylint: disable=unused-argument return {"status": "ready"} async def run_async( @@ -577,7 +584,7 @@ async def run_async( config = uvicorn.Config(self.app, host="0.0.0.0", port=port, loop="asyncio") server = uvicorn.Server(config) self._port = port - logger.info(f"Starting FoundryCBAgent server async on port {port}") + logger.info("Starting FoundryCBAgent server async on port %s", port) await server.serve() def run(self, port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088))) -> None: @@ -593,7 +600,7 @@ def run(self, port: int = int(os.environ.get("DEFAULT_AD_PORT", 8088))) -> None: """ self.init_tracing() self._port = port - logger.info(f"Starting FoundryCBAgent server on port {port}") + logger.info("Starting FoundryCBAgent server on port %s", port) uvicorn.run(self.app, host="0.0.0.0", port=port) def init_tracing(self): @@ -618,7 +625,9 @@ def get_trace_attributes(self): "service.name": "azure.ai.agentserver", } - def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None): + def init_tracing_internal( # pylint: disable=unused-argument # base class hook, params used by subclasses + self, exporter_endpoint=None, app_insights_conn_str=None + ): pass def setup_application_insights_exporter(self, connection_string, provider): @@ -638,7 +647,7 @@ def setup_otlp_exporter(self, endpoint, provider): exporter_instance = OTLPSpanExporter(endpoint=endpoint) processor = BatchSpanProcessor(exporter_instance) provider.add_span_processor(processor) - logger.info(f"Tracing setup with OTLP exporter: {endpoint}") + logger.info("Tracing setup with OTLP exporter: %s", endpoint) def create_response_headers(self) -> dict[str, str]: headers = {} diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py index 87c32926bde4..174685f652fe 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py @@ -7,7 +7,7 @@ from .id_generator.id_generator import IdGenerator from ...logger import get_logger from ...models import CreateResponse -from ...models.projects import AgentId, AgentReference, ResponseConversation1 +from ...models._projects import AgentId, AgentReference, ResponseConversation1 logger = get_logger() @@ -48,10 +48,10 @@ def conversation_id(self) -> Optional[str]: def stream(self) -> bool: return self._stream - def get_agent_id_object(self) -> AgentId: + def get_agent_id_object(self) -> Optional[AgentId]: agent = self.request.get("agent") if not agent: - return None # type: ignore + return None return AgentId( { "type": agent.type, @@ -60,9 +60,9 @@ def get_agent_id_object(self) -> AgentId: } ) - def get_conversation_object(self) -> ResponseConversation1: + def get_conversation_object(self) -> Optional[ResponseConversation1]: if not self._conversation_id: - return None # type: ignore + return None return ResponseConversation1(id=self._conversation_id) @@ -75,11 +75,11 @@ def _deserialize_create_response(payload: dict) -> CreateResponse: tools = payload.get("tools") if tools: - _deserialized["tools"] = [tool for tool in tools] # pylint: disable=unnecessary-comprehension + _deserialized["tools"] = list(tools) return _deserialized -def _deserialize_agent_reference(payload: dict) -> AgentReference: +def _deserialize_agent_reference(payload: dict) -> Optional[AgentReference]: if not payload: - return None # type: ignore + return None return AgentReference(**payload) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py index 01ac72289e4e..4d9cc741ec81 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/id_generator/foundry_id_generator.py @@ -1,4 +1,3 @@ -# pylint: disable=docstring-missing-return,docstring-missing-param,docstring-missing-rtype # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- @@ -26,6 +25,13 @@ class FoundryIdGenerator(IdGenerator): """ def __init__(self, response_id: Optional[str], conversation_id: Optional[str]): + """Initialize the ID generator. + + :param response_id: An existing response ID, or ``None`` to generate one. + :type response_id: Optional[str] + :param conversation_id: An existing conversation ID, or ``None``. + :type conversation_id: Optional[str] + """ self.response_id = response_id or self._new_id("resp") self.conversation_id = conversation_id partition_source = self.conversation_id or self.response_id @@ -36,6 +42,13 @@ def __init__(self, response_id: Optional[str], conversation_id: Optional[str]): @classmethod def from_request(cls, payload: dict) -> "FoundryIdGenerator": + """Create a generator from an incoming request payload. + + :param payload: The raw request payload dictionary. + :type payload: dict + :return: A configured :class:`FoundryIdGenerator` instance. + :rtype: FoundryIdGenerator + """ response_id = payload.get("metadata", {}).get("response_id", None) conv_id_raw = payload.get("conversation", None) if isinstance(conv_id_raw, str): @@ -47,6 +60,13 @@ def from_request(cls, payload: dict) -> "FoundryIdGenerator": return cls(response_id, conv_id) def generate(self, category: Optional[str] = None) -> str: + """Generate a new unique ID for the given category. + + :param category: Optional prefix category (e.g. ``"msg"``, ``"func"``). Defaults to ``"id"``. + :type category: Optional[str] + :return: The generated unique identifier string. + :rtype: str + """ prefix = "id" if not category else category return self._new_id(prefix, partition_key=self._partition_id) @@ -63,12 +83,29 @@ def _new_id( partition_key: Optional[str] = None, partition_key_hint: str = "", ) -> str: - """ - Generates a new ID. - - Format matches the C# logic: - f"{prefix}{delimiter}{infix}{partitionKey}{entropy}" - (i.e., exactly one delimiter after prefix; no delimiter between entropy and partition key) + """Generate a new ID matching the C# FoundryIdGenerator format. + + Format: ``"{prefix}{delimiter}{infix}{partitionKey}{entropy}"`` + + :param prefix: The ID prefix (e.g. ``"resp"``, ``"msg"``). + :type prefix: str + :param string_length: Length of the random entropy portion. + :type string_length: int + :param partition_key_length: Length of the partition key. + :type partition_key_length: int + :param infix: Optional infix inserted between delimiter and partition key. + :type infix: Optional[str] + :param watermark: Optional alphanumeric watermark inserted mid-entropy. + :type watermark: str + :param delimiter: Delimiter between prefix and the rest of the ID. + :type delimiter: str + :param partition_key: Explicit partition key; if ``None``, derived or generated. + :type partition_key: Optional[str] + :param partition_key_hint: ID string to extract a partition key from. + :type partition_key_hint: str + :return: The generated ID string. + :rtype: str + :raises ValueError: If the watermark contains non-alphanumeric characters. """ entropy = FoundryIdGenerator._secure_entropy(string_length) @@ -96,10 +133,16 @@ def _new_id( @staticmethod def _secure_entropy(string_length: int) -> str: - """ - Generates a secure random alphanumeric string of exactly `string_length`. - Re-tries whole generation until the filtered base64 string is exactly the desired length, - matching the C# behavior. + """Generate a cryptographically secure alphanumeric string. + + Uses :func:`os.urandom` and base64 encoding, filtering to alphanumeric + characters and retrying until the exact length is reached. + + :param string_length: Desired length of the output string. + :type string_length: int + :return: A random alphanumeric string of exactly *string_length* characters. + :rtype: str + :raises ValueError: If *string_length* is less than 1. """ if string_length < 1: raise ValueError("Must be greater than or equal to 1") @@ -120,11 +163,22 @@ def _extract_partition_id( partition_key_length: int = 18, delimiter: str = "_", ) -> str: - """ - Extracts partition key from an existing ID. - - Expected shape (per C# logic): "_" - We take the last `partition_key_length` characters from the *second* segment. + """Extract the partition key from an existing ID. + + Expected shape: ``"_"``. + Returns the first *partition_key_length* characters of the second segment. + + :param id_str: The ID string to extract from. + :type id_str: str + :param string_length: Expected entropy length used for validation. + :type string_length: int + :param partition_key_length: Number of characters to extract as partition key. + :type partition_key_length: int + :param delimiter: The delimiter separating ID segments. + :type delimiter: str + :return: The extracted partition key. + :rtype: str + :raises ValueError: If the ID format is invalid. """ if not id_str: raise ValueError("Id cannot be null or empty") diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py index 12b647d7adc7..0efcf1c6f20b 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py @@ -1,7 +1,7 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -import asyncio # pylint: disable=C4763 +import asyncio # pylint: disable=C4763 # azure-sdk: async-client-bad-name import itertools from collections import defaultdict from typing import ( @@ -37,7 +37,7 @@ from .._exceptions import ToolInvocationError -class FoundryToolClient(AsyncContextManager["FoundryToolClient"]): # pylint: disable=C4748 +class FoundryToolClient(AsyncContextManager["FoundryToolClient"]): # pylint: disable=C4748 # azure-sdk: client-paging-methods-use-list """Asynchronous client for aggregating tools from Azure AI MCP and Tools APIs. This client provides access to tools from both MCP (Model Context Protocol) servers @@ -55,7 +55,7 @@ class FoundryToolClient(AsyncContextManager["FoundryToolClient"]): # pylint: di :type api_version: str or None """ - def __init__( # pylint: disable=C4718 + def __init__( # pylint: disable=C4718 # azure-sdk: client-method-name-no-double-underscore self, endpoint: str, credential: "AsyncTokenCredential", diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py index 2d50089fef8f..c75532f0d3e4 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py @@ -1,7 +1,7 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -import asyncio # pylint: disable=C4763 +import asyncio # pylint: disable=C4763 # azure-sdk: async-client-bad-name from abc import ABC, abstractmethod from typing import Any, Awaitable, Collection, List, Mapping, MutableMapping, Optional, Union diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py index 80b25d78b20e..9604124cde9b 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py @@ -40,8 +40,9 @@ def install(cls, :type user_resolver: Optional[Callable[[Request], Awaitable[Optional[UserInfo]]]] """ + user_info_var : _UserContextType = user_context or ContextVarUserProvider.default_user_info_context app.add_middleware(UserInfoContextMiddleware, # type: ignore[arg-type] - user_info_var=user_context or ContextVarUserProvider.default_user_info_context, + user_info_var=user_info_var, user_resolver=user_resolver or cls._default_user_resolver) @staticmethod diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py index 398a8c46fd5d..0b6600de7d6a 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py @@ -3,7 +3,7 @@ # --------------------------------------------------------- from __future__ import annotations -import asyncio # pylint: disable=C4763 +import asyncio # pylint: disable=C4763 # azure-sdk: async-client-bad-name import inspect from types import TracebackType from typing import Any, Type, cast @@ -12,7 +12,7 @@ from azure.core.credentials_async import AsyncTokenCredential -async def _to_thread(func, *args, **kwargs): # pylint: disable=C4743 +async def _to_thread(func, *args, **kwargs): # pylint: disable=C4743 # azure-sdk: client-method-should-not-use-static-method """Compatibility wrapper for asyncio.to_thread (Python 3.8+). :param func: The function to run in a thread. diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.client.operations.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.client.operations.rst new file mode 100644 index 000000000000..3076ff010e1b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.client.operations.rst @@ -0,0 +1,7 @@ +azure.ai.agentserver.core.checkpoints.client.operations package +=============================================================== + +.. automodule:: azure.ai.agentserver.core.checkpoints.client.operations + :inherited-members: + :members: + :undoc-members: diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.client.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.client.rst new file mode 100644 index 000000000000..cd6763335948 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.client.rst @@ -0,0 +1,15 @@ +azure.ai.agentserver.core.checkpoints.client package +==================================================== + +.. automodule:: azure.ai.agentserver.core.checkpoints.client + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.checkpoints.client.operations diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.rst new file mode 100644 index 000000000000..99b9dfa2ef50 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.checkpoints.rst @@ -0,0 +1,15 @@ +azure.ai.agentserver.core.checkpoints package +============================================= + +.. automodule:: azure.ai.agentserver.core.checkpoints + :inherited-members: + :members: + :undoc-members: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + azure.ai.agentserver.core.checkpoints.client diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.openai.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.openai.rst deleted file mode 100644 index dd1cce6eecca..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.openai.rst +++ /dev/null @@ -1,8 +0,0 @@ -azure.ai.agentserver.core.models.openai package -=============================================== - -.. automodule:: azure.ai.agentserver.core.models.openai - :inherited-members: - :members: - :undoc-members: - :ignore-module-all: diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.projects.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.projects.rst deleted file mode 100644 index 38e0be4f331b..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.projects.rst +++ /dev/null @@ -1,8 +0,0 @@ -azure.ai.agentserver.core.models.projects package -================================================= - -.. automodule:: azure.ai.agentserver.core.models.projects - :inherited-members: - :members: - :undoc-members: - :ignore-module-all: diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.rst index 008b280c64de..120b01cccc5a 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.rst +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.models.rst @@ -6,12 +6,3 @@ azure.ai.agentserver.core.models package :members: :undoc-members: :ignore-module-all: - -Subpackages ------------ - -.. toctree:: - :maxdepth: 4 - - azure.ai.agentserver.core.models.openai - azure.ai.agentserver.core.models.projects diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst index b8f1dadf3a73..60005f2b04cc 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.rst @@ -13,6 +13,7 @@ Subpackages :maxdepth: 4 azure.ai.agentserver.core.application + azure.ai.agentserver.core.checkpoints azure.ai.agentserver.core.models azure.ai.agentserver.core.server azure.ai.agentserver.core.tools diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.client.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.client.rst index 8182914f69f9..14304731f5e7 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.client.rst +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.client.rst @@ -2,6 +2,6 @@ azure.ai.agentserver.core.tools.client package ============================================== .. automodule:: azure.ai.agentserver.core.tools.client - :inherited-members: + :inherited-members: BaseModel :members: :undoc-members: diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.rst b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.rst index c112ec2beabd..6b798851fed2 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.rst +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/azure.ai.agentserver.core.tools.rst @@ -2,10 +2,9 @@ azure.ai.agentserver.core.tools package ======================================= .. automodule:: azure.ai.agentserver.core.tools - :inherited-members: + :inherited-members: BaseModel :members: :undoc-members: - :exclude-members: BaseModel,model_json_schema Subpackages ----------- diff --git a/sdk/agentserver/azure-ai-agentserver-core/doc/index.md b/sdk/agentserver/azure-ai-agentserver-core/doc/index.md new file mode 100644 index 000000000000..bfce99e40065 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/doc/index.md @@ -0,0 +1,136 @@ +# Azure AI Agent Server Adapter for Python + + +## Getting started + +```bash +pip install azure-ai-agentserver-core +``` + +## Key concepts + +This is the core package for Azure AI Agent server. It hosts your agent as a container on the cloud. + +You can talk to your agent using azure-ai-project sdk. + + +## Examples + +If your agent is not built using a supported framework such as LangGraph and Agent-framework, you can still make it compatible with Microsoft AI Foundry by manually implementing the predefined interface. + +```python +import datetime + +from azure.ai.agentserver.core import FoundryCBAgent +from azure.ai.agentserver.core.models import ( + CreateResponse, + Response as OpenAIResponse, +) +from azure.ai.agentserver.core.models._projects import ( + ItemContentOutputText, + ResponsesAssistantMessageItemResource, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, +) + + +def stream_events(text: str): + assembled = "" + for i, token in enumerate(text.split(" ")): + piece = token if i == len(text.split(" ")) - 1 else token + " " + assembled += piece + yield ResponseTextDeltaEvent(delta=piece) + # Done with text + yield ResponseTextDoneEvent(text=assembled) + + +async def agent_run(request_body: CreateResponse): + agent = request_body.agent + print(f"agent:{agent}") + + if request_body.stream: + return stream_events("I am mock agent with no intelligence in stream mode.") + + # Build assistant output content + output_content = [ + ItemContentOutputText( + text="I am mock agent with no intelligence.", + annotations=[], + ) + ] + + response = OpenAIResponse( + metadata={}, + temperature=0.0, + top_p=0.0, + user="me", + id="id", + created_at=datetime.datetime.now(), + output=[ + ResponsesAssistantMessageItemResource( + status="completed", + content=output_content, + ) + ], + ) + return response + + +my_agent = FoundryCBAgent() +my_agent.agent_run = agent_run + +if __name__ == "__main__": + my_agent.run() + +``` + +## Troubleshooting + +First run your agent with azure-ai-agentserver-core locally. + +If it works on local by failed on cloud. Check your logs in the application insight connected to your Azure AI Foundry Project. + + +### Reporting issues + +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). Mention the package name "azure-ai-agents" in the title or content. + + +## Next steps + +Please visit [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-core/samples) folder. There are several cases for you to build your agent with azure-ai-agentserver + + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. + + +## Indices and tables + +- {ref}`genindex` +- {ref}`modindex` +- {ref}`search` + +```{toctree} +:caption: Developer Documentation +:glob: true +:maxdepth: 5 + +azure.ai.agentserver.core.rst + +``` + diff --git a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml index 3829a7356919..dca59dffef43 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml @@ -75,5 +75,5 @@ combine-as-imports = true breaking = false # incompatible python version pyright = false verifytypes = false -latestdependency = false -dependencies = false \ No newline at end of file +# latestdependency = false +# dependencies = false \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py b/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py index 099d8dc45181..2cf533eb33fb 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py +++ b/sdk/agentserver/azure-ai-agentserver-core/samples/bilingual_weekend_planner/main.py @@ -33,7 +33,7 @@ CreateResponse, Response as OpenAIResponse, ) -from azure.ai.agentserver.core.models.projects import ( +from azure.ai.agentserver.core.models._projects import ( ItemContentOutputText, ResponseCompletedEvent, ResponseCreatedEvent, diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py b/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py index af9812826941..3831f702564d 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py +++ b/sdk/agentserver/azure-ai-agentserver-core/samples/mcp_simple/mcp_simple.py @@ -29,7 +29,7 @@ from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent from azure.ai.agentserver.core.models import Response as OpenAIResponse -from azure.ai.agentserver.core.models.projects import ( +from azure.ai.agentserver.core.models._projects import ( ItemContentOutputText, MCPListToolsItemResource, MCPListToolsTool, diff --git a/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py b/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py index f6d2c08bb0b9..f4298d21d39c 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py +++ b/sdk/agentserver/azure-ai-agentserver-core/samples/simple_mock_agent/custom_mock_agent_test.py @@ -3,7 +3,7 @@ from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent from azure.ai.agentserver.core.models import Response as OpenAIResponse -from azure.ai.agentserver.core.models.projects import ( +from azure.ai.agentserver.core.models._projects import ( ItemContentOutputText, ResponseCompletedEvent, ResponseCreatedEvent, diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/server/test_response_metadata.py b/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/server/test_response_metadata.py index c2e3bea53287..f01c4977cfb0 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/server/test_response_metadata.py +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/server/test_response_metadata.py @@ -10,7 +10,7 @@ set_current_app, ) from azure.ai.agentserver.core.models import Response as OpenAIResponse -from azure.ai.agentserver.core.models.projects import ResponseCreatedEvent, ResponseErrorEvent +from azure.ai.agentserver.core.models._projects import ResponseCreatedEvent, ResponseErrorEvent from azure.ai.agentserver.core.server._response_metadata import ( METADATA_KEY, attach_foundry_metadata_to_response, diff --git a/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/test_logger.py b/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/test_logger.py index 771ca0a0eb0c..35639ea8ae2c 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/test_logger.py +++ b/sdk/agentserver/azure-ai-agentserver-core/tests/unit_tests/test_logger.py @@ -78,31 +78,31 @@ def test_logs_warning_for_invalid_resource_id(self): @pytest.mark.unit class TestGetApplicationInsightsConnstr: - """Tests for get_application_insights_connstr function.""" + """Tests for _get_application_insights_connstr function.""" def test_returns_connstr_from_env_var(self): """Test that connection string is returned from environment variable.""" - from azure.ai.agentserver.core.logger import get_application_insights_connstr + from azure.ai.agentserver.core.logger import _get_application_insights_connstr with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test123"}, clear=False): - result = get_application_insights_connstr() + result = _get_application_insights_connstr() assert result == "InstrumentationKey=test123" def test_returns_none_when_no_connstr_and_no_project(self): """Test that None is returned when no connection string and no project endpoint.""" - from azure.ai.agentserver.core.logger import get_application_insights_connstr + from azure.ai.agentserver.core.logger import _get_application_insights_connstr with patch.dict(os.environ, { "APPLICATIONINSIGHTS_CONNECTION_STRING": "", "AZURE_AI_PROJECT_ENDPOINT": "", "AGENT_PROJECT_RESOURCE_ID": "", }, clear=False): - result = get_application_insights_connstr() + result = _get_application_insights_connstr() assert result is None or result == "" def test_logs_debug_when_not_configured(self): """Test that debug message is logged when not configured.""" - from azure.ai.agentserver.core.logger import get_application_insights_connstr + from azure.ai.agentserver.core.logger import _get_application_insights_connstr mock_logger = MagicMock() @@ -111,7 +111,7 @@ def test_logs_debug_when_not_configured(self): "AZURE_AI_PROJECT_ENDPOINT": "", "AGENT_PROJECT_RESOURCE_ID": "", }, clear=False): - result = get_application_insights_connstr(logger=mock_logger) + result = _get_application_insights_connstr(logger=mock_logger) # Debug should be called when not configured, or result should be None assert mock_logger.debug.called or result is None or result == "" diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/checkpointer/_foundry_checkpoint_saver.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/checkpointer/_foundry_checkpoint_saver.py index 999b87dc8fe8..82215640d60c 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/checkpointer/_foundry_checkpoint_saver.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/checkpointer/_foundry_checkpoint_saver.py @@ -21,7 +21,7 @@ from azure.core.credentials import TokenCredential from azure.core.credentials_async import AsyncTokenCredential -from azure.ai.agentserver.core.checkpoints.client import ( +from azure.ai.agentserver.core.checkpoints import ( CheckpointItem, CheckpointItemId, CheckpointSession, diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_helper.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_helper.py index 9f3c693800a1..bc8890339dca 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_helper.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_helper.py @@ -11,8 +11,8 @@ ) from azure.ai.agentserver.core.logger import get_logger -from azure.ai.agentserver.core.models import projects as project_models -from azure.ai.agentserver.core.models.openai import (ResponseInputItemParam, ResponseInputParam) +from azure.ai.agentserver.core.models import _projects as project_models +from azure.ai.agentserver.core.models._openai import (ResponseInputItemParam, ResponseInputParam) from .._context import LanggraphRunContext INTERRUPT_NODE_NAME = "__interrupt__" diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_json_helper.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_json_helper.py index e1396ba90577..e3bf49a5ad4b 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_json_helper.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/human_in_the_loop_json_helper.py @@ -11,8 +11,8 @@ ) from azure.ai.agentserver.core.logger import get_logger -from azure.ai.agentserver.core.models import projects as project_models -from azure.ai.agentserver.core.models.openai import ( +from azure.ai.agentserver.core.models import _projects as project_models +from azure.ai.agentserver.core.models._openai import ( ResponseInputItemParam, ) from azure.ai.agentserver.core.server.common.constants import HUMAN_IN_THE_LOOP_FUNCTION_NAME diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_non_stream_response_converter.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_non_stream_response_converter.py index 7ec8bdf14f1a..cb3ecefb60f5 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_non_stream_response_converter.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_non_stream_response_converter.py @@ -11,7 +11,7 @@ from langchain_core.messages import AnyMessage from azure.ai.agentserver.core.logger import get_logger -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from .human_in_the_loop_helper import ( HumanInTheLoopHelper, INTERRUPT_NODE_NAME, diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_request_converter.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_request_converter.py index 486545ef078a..f718695dbc1e 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_request_converter.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_api_request_converter.py @@ -17,7 +17,9 @@ from langchain_core.messages.tool import ToolCall from azure.ai.agentserver.core.logger import get_logger -from azure.ai.agentserver.core.models import CreateResponse, openai as openai_models, projects as project_models +from azure.ai.agentserver.core.models import ( + CreateResponse, _openai as openai_models, _projects as project_models +) logger = get_logger() diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py index ae169d866ee5..807d759cb151 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_content_helpers.py @@ -1,7 +1,7 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models class ItemContentHelper: diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py index 8502ec13069b..9f5f1ac6d55b 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/item_resource_helpers.py @@ -6,7 +6,7 @@ from langgraph.types import Interrupt -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from ..human_in_the_loop_helper import HumanInTheLoopHelper from ..utils import extract_function_call diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py index 4823de4411ae..8b989bc20da3 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_content_part_event_generator.py @@ -7,7 +7,7 @@ from langchain_core import messages as langgraph_messages -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from . import item_content_helpers from .response_event_generator import ResponseEventGenerator, StreamEventState diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py index cd161b99d152..843cf18fe2dc 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_event_generator.py @@ -7,7 +7,7 @@ from langchain_core.messages import AnyMessage -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from ..._context import LanggraphRunContext diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py index 56c3bde68632..3a556fb70e7b 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_function_call_argument_event_generator.py @@ -9,7 +9,7 @@ from langchain_core.messages import AnyMessage from langgraph.types import Interrupt -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from . import ResponseEventGenerator, StreamEventState from ..human_in_the_loop_helper import HumanInTheLoopHelper from ..utils import extract_function_call diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py index 14eee3c571b2..181952077875 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_item_event_generator.py @@ -9,7 +9,7 @@ from langchain_core.messages import AnyMessage from langgraph.types import Interrupt -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from azure.ai.agentserver.core.server.common.id_generator.id_generator import IdGenerator from . import ResponseEventGenerator, StreamEventState, item_resource_helpers from .response_content_part_event_generator import ResponseContentPartEventGenerator diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py index 8d0e62650a2d..dc64f37733bc 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_output_text_event_generator.py @@ -5,7 +5,7 @@ # mypy: disable-error-code="return-value,assignment" from typing import List -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from .response_event_generator import ( ResponseEventGenerator, StreamEventState, diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py index f19629eba94b..896e35829d98 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/models/response_event_generators/response_stream_event_generator.py @@ -8,7 +8,7 @@ from langchain_core import messages as langgraph_messages -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from .response_event_generator import ( ResponseEventGenerator, StreamEventState, diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/mocks/mock_checkpoint_client.py b/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/mocks/mock_checkpoint_client.py index ffc1e2fcc4c1..50a4458856ec 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/mocks/mock_checkpoint_client.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/mocks/mock_checkpoint_client.py @@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional -from azure.ai.agentserver.core.checkpoints.client import ( +from azure.ai.agentserver.core.checkpoints import ( CheckpointItem, CheckpointItemId, CheckpointSession, diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/test_langgraph_request_converter.py b/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/test_langgraph_request_converter.py index b1894f7350d5..056780cc9903 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/test_langgraph_request_converter.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/tests/unit_tests/test_langgraph_request_converter.py @@ -2,7 +2,7 @@ from langchain_core import messages as langgraph_messages from azure.ai.agentserver.core import models -from azure.ai.agentserver.core.models import projects as project_models +from azure.ai.agentserver.core.models import _projects as project_models from azure.ai.agentserver.langgraph.models.response_api_request_converter import ResponseAPIMessageRequestConverter