diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index 0270f0e38b..fde4c86e16 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -107,6 +107,7 @@
+
diff --git a/dotnet/agent-framework-dotnet.slnx b/dotnet/agent-framework-dotnet.slnx
index 24b596509e..d070bbdb76 100644
--- a/dotnet/agent-framework-dotnet.slnx
+++ b/dotnet/agent-framework-dotnet.slnx
@@ -481,6 +481,7 @@
+
diff --git a/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalChatClient.cs b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalChatClient.cs
new file mode 100644
index 0000000000..6a6efac9c6
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalChatClient.cs
@@ -0,0 +1,183 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.ClientModel;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.AI.Foundry.Local;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+using Microsoft.Shared.Diagnostics;
+using OpenAI;
+
+namespace Microsoft.Agents.AI.FoundryLocal;
+
+///
+/// A that uses AI Foundry Local for on-device model inference.
+///
+///
+///
+/// This client manages the lifecycle of a local AI model through the Foundry Local SDK,
+/// including model discovery, download, loading, and serving via an OpenAI-compatible HTTP endpoint.
+///
+///
+/// Because initialization requires asynchronous operations (model download, loading, and web service startup),
+/// instances must be created using the static factory method rather than
+/// a constructor.
+///
+///
+/// Internally, this client creates an pointed at the local Foundry endpoint
+/// (typically http://localhost:5272) and wraps it as an .
+/// This avoids conflicts with the Foundry Local SDK's internal use of a different OpenAI client library.
+///
+///
+#pragma warning disable OPENAI001
+public sealed class FoundryLocalChatClient : DelegatingChatClient
+{
+ private readonly ChatClientMetadata _metadata;
+
+ ///
+ /// Gets the instance managing the local model service.
+ ///
+ public FoundryLocalManager Manager { get; }
+
+ ///
+ /// Gets the resolved model identifier being used for inference.
+ ///
+ public string ModelId { get; }
+
+ private FoundryLocalChatClient(IChatClient innerClient, FoundryLocalManager manager, string modelId)
+ : base(innerClient)
+ {
+ this.Manager = manager;
+ this.ModelId = modelId;
+ this._metadata = new ChatClientMetadata("microsoft.foundry.local", defaultModelId: modelId);
+ }
+
+ ///
+ /// Creates a new instance with the specified options.
+ ///
+ /// The configuration options for the Foundry Local client. Cannot be .
+ /// An optional logger for diagnostic output during initialization.
+ /// A cancellation token that can be used to cancel the initialization.
+ /// A task that represents the asynchronous creation operation, containing the initialized .
+ /// Thrown when is .
+ ///
+ /// Thrown when the model cannot be resolved from the options or environment, when the specified model is not found
+ /// in the Foundry Local catalog, or when the web service endpoint is not available after startup.
+ ///
+ ///
+ ///
+ /// This method performs the following steps based on the provided :
+ ///
+ ///
+ /// - Resolves the model name from options or the FOUNDRY_LOCAL_MODEL environment variable.
+ /// - Bootstraps the if not already initialized (when is ).
+ /// - Resolves the model from the catalog using the model alias.
+ /// - Downloads and loads the model if is .
+ /// - Starts the web service endpoint if is .
+ /// - Creates an pointed at the local endpoint and wraps it as an .
+ ///
+ ///
+ public static async Task CreateAsync(
+ FoundryLocalClientOptions options,
+ ILogger? logger = null,
+ CancellationToken cancellationToken = default)
+ {
+ Throw.IfNull(options);
+
+ logger ??= NullLogger.Instance;
+
+ // 1. Resolve model name
+ var modelName = options.ResolveModel();
+
+ // 2. Bootstrap FoundryLocalManager if needed
+ if (options.Bootstrap && !FoundryLocalManager.IsInitialized)
+ {
+ var webServiceUrl = options.WebServiceUrl?.ToString() ?? "http://localhost:5272";
+
+ var config = new Configuration
+ {
+ AppName = options.AppName,
+ Web = new Configuration.WebService { Urls = webServiceUrl },
+ };
+
+ await FoundryLocalManager.CreateAsync(config, logger, cancellationToken).ConfigureAwait(false);
+ }
+
+ if (!FoundryLocalManager.IsInitialized)
+ {
+ throw new InvalidOperationException(
+ "FoundryLocalManager is not initialized. Enable Bootstrap to initialize it automatically, " +
+ "or initialize FoundryLocalManager manually before creating a FoundryLocalChatClient.");
+ }
+
+ var manager = FoundryLocalManager.Instance;
+
+ // 3. Get catalog and resolve model
+ var catalog = await manager.GetCatalogAsync(cancellationToken).ConfigureAwait(false);
+ var model = await catalog.GetModelAsync(modelName, cancellationToken).ConfigureAwait(false);
+
+ if (model is null)
+ {
+ throw new InvalidOperationException(
+ $"Model with alias '{modelName}' was not found in the Foundry Local catalog. " +
+ "Use FoundryLocalManager to list available models.");
+ }
+
+ var resolvedModelId = model.Id;
+
+ // 4. Download and load model if requested
+ if (options.PrepareModel)
+ {
+ if (!await model.IsCachedAsync(cancellationToken).ConfigureAwait(false))
+ {
+ await model.DownloadAsync().ConfigureAwait(false);
+ }
+
+ if (!await model.IsLoadedAsync(cancellationToken).ConfigureAwait(false))
+ {
+ await model.LoadAsync(cancellationToken).ConfigureAwait(false);
+ }
+ }
+
+ // 5. Start web service if needed
+ if (options.StartWebService && manager.Urls is null)
+ {
+ await manager.StartWebServiceAsync(cancellationToken).ConfigureAwait(false);
+ }
+
+ var urls = manager.Urls;
+ if (urls is null || urls.Length == 0)
+ {
+ throw new InvalidOperationException(
+ "The Foundry Local web service is not running and no endpoint URLs are available. " +
+ "Ensure StartWebService is enabled or start the service manually.");
+ }
+
+ // 6. Create OpenAI client pointed at the local endpoint
+ // Foundry Local serves OpenAI-compatible API at /v1/ (e.g., /v1/chat/completions)
+ var endpointUrl = urls[0].TrimEnd('/') + "/v1";
+ var openAIClient = new OpenAIClient(
+ new ApiKeyCredential("foundry-local"),
+ new OpenAIClientOptions { Endpoint = new Uri(endpointUrl) });
+
+ // 7. Get ChatClient and wrap as IChatClient
+ var chatClient = openAIClient.GetChatClient(resolvedModelId);
+ var innerChatClient = chatClient.AsIChatClient();
+
+ return new FoundryLocalChatClient(innerChatClient, manager, resolvedModelId);
+ }
+
+ ///
+ public override object? GetService(Type serviceType, object? serviceKey = null)
+ {
+ return (serviceKey is null && serviceType == typeof(ChatClientMetadata))
+ ? this._metadata
+ : (serviceKey is null && serviceType == typeof(FoundryLocalManager))
+ ? this.Manager
+ : base.GetService(serviceType, serviceKey);
+ }
+}
+#pragma warning restore OPENAI001
diff --git a/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalChatClientExtensions.cs b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalChatClientExtensions.cs
new file mode 100644
index 0000000000..fb8d9014e3
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalChatClientExtensions.cs
@@ -0,0 +1,89 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+using Microsoft.Shared.Diagnostics;
+
+namespace Microsoft.Agents.AI.FoundryLocal;
+
+///
+/// Provides extension methods for
+/// to simplify the creation of AI agents that work with Foundry Local on-device models.
+///
+///
+/// These extensions bridge the gap between the Foundry Local chat client and the Microsoft Agent Framework,
+/// allowing developers to easily create AI agents that leverage local model inference.
+/// The methods wrap the in objects
+/// that implement the interface.
+///
+public static class FoundryLocalChatClientExtensions
+{
+ ///
+ /// Creates an AI agent from a for local model inference.
+ ///
+ /// The to use for the agent. Cannot be .
+ /// Optional system instructions that define the agent's behavior and personality.
+ /// Optional name for the agent for identification purposes.
+ /// Optional description of the agent's capabilities and purpose.
+ /// Optional collection of AI tools that the agent can use during conversations.
+ /// Provides a way to customize the creation of the underlying used by the agent.
+ /// Optional logger factory for enabling logging within the agent.
+ /// An optional to use for resolving services required by the instances being invoked.
+ /// A instance backed by Foundry Local on-device inference.
+ /// Thrown when is .
+ public static ChatClientAgent AsAIAgent(
+ this FoundryLocalChatClient client,
+ string? instructions = null,
+ string? name = null,
+ string? description = null,
+ IList? tools = null,
+ Func? clientFactory = null,
+ ILoggerFactory? loggerFactory = null,
+ IServiceProvider? services = null) =>
+ client.AsAIAgent(
+ new ChatClientAgentOptions()
+ {
+ Name = name,
+ Description = description,
+ ChatOptions = tools is null && string.IsNullOrWhiteSpace(instructions) ? null : new ChatOptions()
+ {
+ Instructions = instructions,
+ Tools = tools,
+ }
+ },
+ clientFactory,
+ loggerFactory,
+ services);
+
+ ///
+ /// Creates an AI agent from a for local model inference.
+ ///
+ /// The to use for the agent. Cannot be .
+ /// Full set of options to configure the agent. Cannot be .
+ /// Provides a way to customize the creation of the underlying used by the agent.
+ /// Optional logger factory for enabling logging within the agent.
+ /// An optional to use for resolving services required by the instances being invoked.
+ /// A instance backed by Foundry Local on-device inference.
+ /// Thrown when or is .
+ public static ChatClientAgent AsAIAgent(
+ this FoundryLocalChatClient client,
+ ChatClientAgentOptions options,
+ Func? clientFactory = null,
+ ILoggerFactory? loggerFactory = null,
+ IServiceProvider? services = null)
+ {
+ Throw.IfNull(client);
+ Throw.IfNull(options);
+
+ IChatClient chatClient = client;
+
+ if (clientFactory is not null)
+ {
+ chatClient = clientFactory(chatClient);
+ }
+
+ return new ChatClientAgent(chatClient, options, loggerFactory, services);
+ }
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalClientOptions.cs b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalClientOptions.cs
new file mode 100644
index 0000000000..60b4fcb717
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalClientOptions.cs
@@ -0,0 +1,94 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+
+namespace Microsoft.Agents.AI.FoundryLocal;
+
+///
+/// Configuration options for creating a .
+///
+///
+///
+/// These options control how the Foundry Local manager is initialized, whether models are
+/// automatically downloaded and loaded, and whether the OpenAI-compatible HTTP endpoint is started.
+///
+///
+/// The property is required and specifies the model alias to use (e.g., "phi-4-mini").
+/// If not set explicitly, it can be resolved from the FOUNDRY_LOCAL_MODEL environment variable.
+///
+///
+public sealed class FoundryLocalClientOptions
+{
+ ///
+ /// Gets or sets the model alias or identifier to use (e.g., "phi-4-mini").
+ ///
+ ///
+ /// If not set, the value will be resolved from the FOUNDRY_LOCAL_MODEL environment variable.
+ /// This property must be set (either directly or via the environment variable) before creating a
+ /// .
+ ///
+ public string? Model { get; set; }
+
+ ///
+ /// Gets or sets the application name used when initializing the .
+ ///
+ /// The default value is "AgentFramework".
+ public string AppName { get; set; } = "AgentFramework";
+
+ ///
+ /// Gets or sets a value indicating whether to automatically create and initialize the
+ /// if it has not already been initialized.
+ ///
+ /// The default value is .
+ public bool Bootstrap { get; set; } = true;
+
+ ///
+ /// Gets or sets a value indicating whether to automatically download and load the specified model
+ /// during initialization.
+ ///
+ ///
+ /// When set to , the model will be downloaded to the local cache (if not already cached)
+ /// and loaded into the inference service. When set to , the model will be loaded on
+ /// the first inference request, which may cause a significant delay.
+ ///
+ /// The default value is .
+ public bool PrepareModel { get; set; } = true;
+
+ ///
+ /// Gets or sets a value indicating whether to start the OpenAI-compatible HTTP web service endpoint
+ /// if it is not already running.
+ ///
+ /// The default value is .
+ public bool StartWebService { get; set; } = true;
+
+ ///
+ /// Gets or sets an optional custom binding URL for the web service endpoint.
+ ///
+ ///
+ /// When set, this URL will be used to configure the web service binding via
+ /// .
+ /// When , the default URL (typically http://localhost:5272) is used.
+ ///
+ public Uri? WebServiceUrl { get; set; }
+
+ ///
+ /// Resolves the model name from the property or the FOUNDRY_LOCAL_MODEL environment variable.
+ ///
+ /// The resolved model name.
+ ///
+ /// Thrown when neither the property nor the FOUNDRY_LOCAL_MODEL environment variable is set.
+ ///
+ internal string ResolveModel()
+ {
+ var model = this.Model ?? Environment.GetEnvironmentVariable("FOUNDRY_LOCAL_MODEL");
+
+ if (string.IsNullOrWhiteSpace(model))
+ {
+ throw new InvalidOperationException(
+ "A model must be specified. Set the 'Model' property on FoundryLocalClientOptions " +
+ "or set the 'FOUNDRY_LOCAL_MODEL' environment variable.");
+ }
+
+ return model;
+ }
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.FoundryLocal/Microsoft.Agents.AI.FoundryLocal.csproj b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/Microsoft.Agents.AI.FoundryLocal.csproj
new file mode 100644
index 0000000000..02217a79b3
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.FoundryLocal/Microsoft.Agents.AI.FoundryLocal.csproj
@@ -0,0 +1,43 @@
+
+
+
+
+ $(TargetFrameworksCore)
+
+ win-x64;win-arm64;linux-x64;linux-arm64;osx-arm64
+ false
+ true
+ $(NoWarn);OPENAI001
+
+
+
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Microsoft Agent Framework for Foundry Local
+ Provides Microsoft Agent Framework support for AI Foundry Local (on-device model inference).
+
+
+
+
+
+
+
+
diff --git a/dotnet/tests/FoundryLocal.SmokeTest/FoundryLocal.SmokeTest.csproj b/dotnet/tests/FoundryLocal.SmokeTest/FoundryLocal.SmokeTest.csproj
new file mode 100644
index 0000000000..ca8eae1486
--- /dev/null
+++ b/dotnet/tests/FoundryLocal.SmokeTest/FoundryLocal.SmokeTest.csproj
@@ -0,0 +1,35 @@
+
+
+
+ Exe
+ net9.0
+ win-x64;win-arm64;linux-x64;linux-arm64;osx-arm64
+ false
+ false
+ $(NoWarn);OPENAI001
+
+ false
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/tests/FoundryLocal.SmokeTest/Program.cs b/dotnet/tests/FoundryLocal.SmokeTest/Program.cs
new file mode 100644
index 0000000000..a451120ee2
--- /dev/null
+++ b/dotnet/tests/FoundryLocal.SmokeTest/Program.cs
@@ -0,0 +1,44 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Threading.Tasks;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.FoundryLocal;
+using Microsoft.Extensions.AI;
+
+// Quick smoke test — does FoundryLocalChatClient actually call a local model?
+
+Console.WriteLine("=== Foundry Local Integration Test ===\n");
+
+// 1. Create the client (this bootstraps the manager, downloads/loads the model, starts web service)
+Console.WriteLine("Creating FoundryLocalChatClient with qwen2.5-0.5b...");
+var client = await FoundryLocalChatClient.CreateAsync(
+ new FoundryLocalClientOptions
+ {
+ Model = "qwen2.5-0.5b",
+ PrepareModel = true,
+ StartWebService = true,
+ });
+
+Console.WriteLine(" Model ID: " + client.ModelId);
+Console.WriteLine(" Manager URLs: " + string.Join(", ", client.Manager.Urls ?? Array.Empty()));
+
+// 2. Create an agent
+Console.WriteLine("\nCreating agent...");
+var agent = client.AsAIAgent(
+ instructions: "You are a helpful assistant. Keep answers very brief (1-2 sentences).",
+ name: "LocalTestAgent");
+
+Console.WriteLine(" Agent created successfully.");
+
+// 3. Run a simple query via agent.RunAsync
+Console.WriteLine("\nSending message: 'What is 2 + 2?'");
+var response = await agent.RunAsync("What is 2 + 2?");
+
+Console.WriteLine("\nResponse:");
+foreach (var msg in response.Messages)
+{
+ Console.WriteLine(" [" + msg.Role + "]: " + msg.Text);
+}
+
+Console.WriteLine("\n=== Test Complete ===");
diff --git a/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalChatClientExtensionsTests.cs b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalChatClientExtensionsTests.cs
new file mode 100644
index 0000000000..631b8c9b8d
--- /dev/null
+++ b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalChatClientExtensionsTests.cs
@@ -0,0 +1,41 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Microsoft.Extensions.AI;
+
+namespace Microsoft.Agents.AI.FoundryLocal.UnitTests;
+
+public class FoundryLocalChatClientExtensionsTests
+{
+ [Fact]
+ public void AsAIAgent_WithNullClient_Throws()
+ {
+ FoundryLocalChatClient client = null!;
+
+ Assert.Throws(() =>
+ client.AsAIAgent(instructions: "test"));
+ }
+
+ [Fact]
+ public void AsAIAgent_WithOptions_WithNullClient_Throws()
+ {
+ FoundryLocalChatClient client = null!;
+ var options = new ChatClientAgentOptions();
+
+ Assert.Throws(() =>
+ client.AsAIAgent(options));
+ }
+
+ [Fact]
+ public void AsAIAgent_WithOptions_WithNullOptions_Throws()
+ {
+ // When both arguments are null, AsAIAgent validates client first.
+ // This test therefore verifies the null-client guard, not the null-options path.
+ FoundryLocalChatClient client = null!;
+
+ var exception = Assert.Throws(() =>
+ client.AsAIAgent((ChatClientAgentOptions)null!));
+
+ Assert.Equal("client", exception.ParamName);
+ }
+}
diff --git a/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalChatClientTests.cs b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalChatClientTests.cs
new file mode 100644
index 0000000000..ccbebc6378
--- /dev/null
+++ b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalChatClientTests.cs
@@ -0,0 +1,35 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Threading.Tasks;
+using Microsoft.Extensions.AI;
+
+namespace Microsoft.Agents.AI.FoundryLocal.UnitTests;
+
+public class FoundryLocalChatClientTests
+{
+ [Fact]
+ public async Task CreateAsync_WithNullOptions_Throws()
+ {
+ await Assert.ThrowsAsync(() =>
+ FoundryLocalChatClient.CreateAsync(null!));
+ }
+
+ [Fact]
+ public async Task CreateAsync_WithNoModel_ThrowsInvalidOperation()
+ {
+ var previousValue = Environment.GetEnvironmentVariable("FOUNDRY_LOCAL_MODEL");
+ try
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", null);
+ var options = new FoundryLocalClientOptions { Bootstrap = false };
+
+ await Assert.ThrowsAsync(() =>
+ FoundryLocalChatClient.CreateAsync(options));
+ }
+ finally
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", previousValue);
+ }
+ }
+}
diff --git a/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalClientOptionsTests.cs b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalClientOptionsTests.cs
new file mode 100644
index 0000000000..efc3c487ab
--- /dev/null
+++ b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/FoundryLocalClientOptionsTests.cs
@@ -0,0 +1,128 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+
+namespace Microsoft.Agents.AI.FoundryLocal.UnitTests;
+
+[Collection("EnvironmentVariables")]
+public class FoundryLocalClientOptionsTests
+{
+ [Fact]
+ public void ResolveModel_WithExplicitModel_ReturnsModel()
+ {
+ var options = new FoundryLocalClientOptions { Model = "phi-4-mini" };
+
+ var result = options.ResolveModel();
+
+ Assert.Equal("phi-4-mini", result);
+ }
+
+ [Fact]
+ public void ResolveModel_WithEnvironmentVariable_ReturnsEnvValue()
+ {
+ var previousValue = Environment.GetEnvironmentVariable("FOUNDRY_LOCAL_MODEL");
+ try
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", "env-model");
+ var options = new FoundryLocalClientOptions();
+
+ var result = options.ResolveModel();
+
+ Assert.Equal("env-model", result);
+ }
+ finally
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", previousValue);
+ }
+ }
+
+ [Fact]
+ public void ResolveModel_ExplicitModelOverridesEnvVar()
+ {
+ var previousValue = Environment.GetEnvironmentVariable("FOUNDRY_LOCAL_MODEL");
+ try
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", "env-model");
+ var options = new FoundryLocalClientOptions { Model = "explicit-model" };
+
+ var result = options.ResolveModel();
+
+ Assert.Equal("explicit-model", result);
+ }
+ finally
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", previousValue);
+ }
+ }
+
+ [Fact]
+ public void ResolveModel_WithNoModelAndNoEnvVar_Throws()
+ {
+ var previousValue = Environment.GetEnvironmentVariable("FOUNDRY_LOCAL_MODEL");
+ try
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", null);
+ var options = new FoundryLocalClientOptions();
+
+ var ex = Assert.Throws(() => options.ResolveModel());
+
+ Assert.Contains("FOUNDRY_LOCAL_MODEL", ex.Message);
+ }
+ finally
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", previousValue);
+ }
+ }
+
+ [Fact]
+ public void ResolveModel_WithWhitespaceModel_Throws()
+ {
+ var previousValue = Environment.GetEnvironmentVariable("FOUNDRY_LOCAL_MODEL");
+ try
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", null);
+ var options = new FoundryLocalClientOptions { Model = " " };
+
+ Assert.Throws(() => options.ResolveModel());
+ }
+ finally
+ {
+ Environment.SetEnvironmentVariable("FOUNDRY_LOCAL_MODEL", previousValue);
+ }
+ }
+
+ [Fact]
+ public void DefaultValues_AreCorrect()
+ {
+ var options = new FoundryLocalClientOptions();
+
+ Assert.Null(options.Model);
+ Assert.Equal("AgentFramework", options.AppName);
+ Assert.True(options.Bootstrap);
+ Assert.True(options.PrepareModel);
+ Assert.True(options.StartWebService);
+ Assert.Null(options.WebServiceUrl);
+ }
+
+ [Fact]
+ public void Properties_CanBeSet()
+ {
+ var webUrl = new Uri("http://localhost:9999");
+ var options = new FoundryLocalClientOptions
+ {
+ Model = "test-model",
+ AppName = "TestApp",
+ Bootstrap = false,
+ PrepareModel = false,
+ StartWebService = false,
+ WebServiceUrl = webUrl,
+ };
+
+ Assert.Equal("test-model", options.Model);
+ Assert.Equal("TestApp", options.AppName);
+ Assert.False(options.Bootstrap);
+ Assert.False(options.PrepareModel);
+ Assert.False(options.StartWebService);
+ Assert.Equal(webUrl, options.WebServiceUrl);
+ }
+}
diff --git a/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/Microsoft.Agents.AI.FoundryLocal.UnitTests.csproj b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/Microsoft.Agents.AI.FoundryLocal.UnitTests.csproj
new file mode 100644
index 0000000000..1203c6f930
--- /dev/null
+++ b/dotnet/tests/Microsoft.Agents.AI.FoundryLocal.UnitTests/Microsoft.Agents.AI.FoundryLocal.UnitTests.csproj
@@ -0,0 +1,14 @@
+
+
+
+
+ net10.0
+ win-x64;win-arm64;linux-x64;linux-arm64;osx-arm64
+ $(NoWarn);OPENAI001
+
+
+
+
+
+
+