From 44b964fc2d11a2b2c476b088d9af20fd8b01f038 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 25 Mar 2025 17:15:49 +0000 Subject: [PATCH 1/6] Add support for agent specific params for OpenAIAssistant Invoke --- dotnet/src/Agents/Abstractions/Agent.cs | 4 +- .../Agents/Abstractions/AgentInvokeOptions.cs | 22 +++++- .../src/Agents/OpenAI/OpenAIAssistantAgent.cs | 71 ++++++++++++++++--- .../OpenAIAssistantAgentInvokeOptions.cs | 50 +++++++++++++ 4 files changed, 136 insertions(+), 11 deletions(-) create mode 100644 dotnet/src/Agents/OpenAI/OpenAIAssistantAgentInvokeOptions.cs diff --git a/dotnet/src/Agents/Abstractions/Agent.cs b/dotnet/src/Agents/Abstractions/Agent.cs index bf6c26cc72ca..f80dbaf33f73 100644 --- a/dotnet/src/Agents/Abstractions/Agent.cs +++ b/dotnet/src/Agents/Abstractions/Agent.cs @@ -87,7 +87,7 @@ public abstract IAsyncEnumerable> InvokeAs /// The conversation thread to continue with this invocation. If not provided, creates a new thread. /// Optional parameters for agent invocation. /// The to monitor for cancellation requests. The default is . - /// An async list of response items that each contain a and an . + /// An async list of response items that each contain a and an . /// /// To continue this thread in the future, use an returned in one of the response items. /// @@ -107,7 +107,7 @@ public virtual IAsyncEnumerable> /// The conversation thread to continue with this invocation. If not provided, creates a new thread. /// Optional parameters for agent invocation. /// The to monitor for cancellation requests. The default is . - /// An async list of response items that each contain a and an . + /// An async list of response items that each contain a and an . /// /// To continue this thread in the future, use an returned in one of the response items. /// diff --git a/dotnet/src/Agents/Abstractions/AgentInvokeOptions.cs b/dotnet/src/Agents/Abstractions/AgentInvokeOptions.cs index 57d27652ab02..8b2044faf58f 100644 --- a/dotnet/src/Agents/Abstractions/AgentInvokeOptions.cs +++ b/dotnet/src/Agents/Abstractions/AgentInvokeOptions.cs @@ -5,8 +5,28 @@ namespace Microsoft.SemanticKernel.Agents; /// /// Optional parameters for agent invocation. /// -public sealed class AgentInvokeOptions +public class AgentInvokeOptions { + /// + /// Initializes a new instance of the class. + /// + public AgentInvokeOptions() + { + } + + /// + /// Initializes a new instance of the class by cloning the provided options. + /// + /// The options to clone. + public AgentInvokeOptions(AgentInvokeOptions options) + { + Verify.NotNull(options); + + this.KernelArguments = options.KernelArguments; + this.Kernel = options.Kernel; + this.AdditionalInstructions = options.AdditionalInstructions; + } + /// /// Gets or sets optional arguments to pass to the agent's invocation, including any /// diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs index 186fb5e88bd8..ee92af8a3735 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs @@ -361,10 +361,34 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul } /// - public override async IAsyncEnumerable> InvokeAsync( + public override IAsyncEnumerable> InvokeAsync( ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, + CancellationToken cancellationToken = default) + { + return this.InvokeAsync( + messages, + thread, + options is null ? null : new OpenAIAssistantAgentInvokeOptions(options), + cancellationToken); + } + + /// + /// Invoke the agent with the provided message and arguments. + /// + /// The messages to pass to the agent. + /// The conversation thread to continue with this invocation. If not provided, creates a new thread. + /// Optional parameters for agent invocation. + /// The to monitor for cancellation requests. The default is . + /// An async list of response items that each contain a and an . + /// + /// To continue this thread in the future, use an returned in one of the response items. + /// + public async IAsyncEnumerable> InvokeAsync( + ICollection messages, + AgentThread? thread = null, + OpenAIAssistantAgentInvokeOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(messages); @@ -375,12 +399,15 @@ public override async IAsyncEnumerable> In () => new OpenAIAssistantAgentThread(this.Client), cancellationToken).ConfigureAwait(false); - // Create options that include the additional instructions. - var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new RunCreationOptions() + // Create options that use the RunCreationOptions from the options param if provided or + // falls back to creating a new RunCreationOptions if additional instructions is provided + // separately. + var internalOptions = options?.RunCreationOptions ?? (string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new RunCreationOptions() { AdditionalInstructions = options?.AdditionalInstructions, - }; + }); +#pragma warning disable CS0618 // Type or member is obsolete // Invoke the Agent with the thread that we already added our message to. var invokeResults = this.InvokeAsync( openAIAssistantAgentThread.Id!, @@ -388,6 +415,7 @@ public override async IAsyncEnumerable> In this.MergeArguments(options?.KernelArguments), options?.Kernel ?? this.Kernel, cancellationToken); +#pragma warning restore CS0618 // Type or member is obsolete // Notify the thread of new messages and return them to the caller. await foreach (var result in invokeResults.ConfigureAwait(false)) @@ -428,6 +456,7 @@ public IAsyncEnumerable InvokeAsync( /// /// The "arguments" parameter is not currently used by the agent, but is provided for future extensibility. /// + [Obsolete("Use InvokeAsync with AgentThread instead.")] public IAsyncEnumerable InvokeAsync( string threadId, RunCreationOptions? options, @@ -456,10 +485,34 @@ async IAsyncEnumerable InternalInvokeAsync() } /// - public async override IAsyncEnumerable> InvokeStreamingAsync( + public override IAsyncEnumerable> InvokeStreamingAsync( ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, + CancellationToken cancellationToken = default) + { + return this.InvokeStreamingAsync( + messages, + thread, + options is null ? null : new OpenAIAssistantAgentInvokeOptions(options), + cancellationToken); + } + + /// + /// Invoke the agent with the provided message and arguments. + /// + /// The messages to pass to the agent. + /// The conversation thread to continue with this invocation. If not provided, creates a new thread. + /// Optional parameters for agent invocation. + /// The to monitor for cancellation requests. The default is . + /// An async list of response items that each contain a and an . + /// + /// To continue this thread in the future, use an returned in one of the response items. + /// + public async IAsyncEnumerable> InvokeStreamingAsync( + ICollection messages, + AgentThread? thread = null, + OpenAIAssistantAgentInvokeOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(messages); @@ -470,11 +523,13 @@ public async override IAsyncEnumerable new OpenAIAssistantAgentThread(this.Client), cancellationToken).ConfigureAwait(false); - // Create options that include the additional instructions. - var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new RunCreationOptions() + // Create options that use the RunCreationOptions from the options param if provided or + // falls back to creating a new RunCreationOptions if additional instructions is provided + // separately. + var internalOptions = options?.RunCreationOptions ?? (string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new RunCreationOptions() { AdditionalInstructions = options?.AdditionalInstructions, - }; + }); // Invoke the Agent with the thread that we already added our message to. var newMessagesReceiver = new ChatHistory(); diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgentInvokeOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgentInvokeOptions.cs new file mode 100644 index 000000000000..843350c5c94d --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgentInvokeOptions.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Optional parameters for invocation. +/// +public sealed class OpenAIAssistantAgentInvokeOptions : AgentInvokeOptions +{ + /// + /// Initializes a new instance of the class. + /// + public OpenAIAssistantAgentInvokeOptions() + { + } + + /// + /// Initializes a new instance of the class by cloning the provided options. + /// + /// The options to clone. + public OpenAIAssistantAgentInvokeOptions(AgentInvokeOptions options) + : base(options) + { + Verify.NotNull(options); + } + + /// + /// Initializes a new instance of the class by cloning the provided options. + /// + /// The options to clone. + public OpenAIAssistantAgentInvokeOptions(OpenAIAssistantAgentInvokeOptions options) + : base(options) + { + Verify.NotNull(options); + + this.RunCreationOptions = options.RunCreationOptions; + } + + /// + /// Gets or sets the to use when creating the new run to execute the invocation. + /// + /// + /// If this property is set, then will not be used. + /// Instead, please set the property to provide the + /// additional instructions for the run. + /// + public RunCreationOptions? RunCreationOptions { get; init; } = null; +} From 8db2366e485fe7c3f21c215fef44c2a1fc616cb9 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Tue, 25 Mar 2025 17:21:30 +0000 Subject: [PATCH 2/6] Small fix. --- dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs index ee92af8a3735..4672d96f8840 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs @@ -370,7 +370,9 @@ public override IAsyncEnumerable> InvokeAs return this.InvokeAsync( messages, thread, - options is null ? null : new OpenAIAssistantAgentInvokeOptions(options), + options is null ? + null : + options is OpenAIAssistantAgentInvokeOptions ? options : new OpenAIAssistantAgentInvokeOptions(options), cancellationToken); } @@ -494,7 +496,9 @@ public override IAsyncEnumerable> return this.InvokeStreamingAsync( messages, thread, - options is null ? null : new OpenAIAssistantAgentInvokeOptions(options), + options is null ? + null : + options is OpenAIAssistantAgentInvokeOptions ? options : new OpenAIAssistantAgentInvokeOptions(options), cancellationToken); } From 4bd03b8cdac02bee67723dc414c8ccd2a8480569 Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 26 Mar 2025 10:42:34 +0000 Subject: [PATCH 3/6] Add more tests and fix bugs --- .../Internal/AssistantRunOptionsFactory.cs | 4 +- .../src/Agents/OpenAI/OpenAIAssistantAgent.cs | 4 +- .../OpenAI/OpenAIAssistantAgentTests.cs | 64 ++++++++++++++ .../Agents/OpenAIAssistantAgentTests.cs | 85 +++++++++++++++++++ 4 files changed, 153 insertions(+), 4 deletions(-) diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs index 772c30630fe5..d0245dbb9bdf 100644 --- a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs @@ -10,13 +10,13 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; /// internal static class AssistantRunOptionsFactory { - public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? overrideInstructions, RunCreationOptions? invocationOptions) + public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? agentInstructions, RunCreationOptions? invocationOptions) { RunCreationOptions runOptions = new() { AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? defaultOptions?.AdditionalInstructions, - InstructionsOverride = overrideInstructions, + InstructionsOverride = invocationOptions?.InstructionsOverride ?? agentInstructions, MaxOutputTokenCount = invocationOptions?.MaxOutputTokenCount ?? defaultOptions?.MaxOutputTokenCount, MaxInputTokenCount = invocationOptions?.MaxInputTokenCount ?? defaultOptions?.MaxInputTokenCount, ModelOverride = invocationOptions?.ModelOverride ?? defaultOptions?.ModelOverride, diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs index 4672d96f8840..9d642debf584 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs @@ -372,7 +372,7 @@ public override IAsyncEnumerable> InvokeAs thread, options is null ? null : - options is OpenAIAssistantAgentInvokeOptions ? options : new OpenAIAssistantAgentInvokeOptions(options), + options is OpenAIAssistantAgentInvokeOptions openAIAssistantAgentInvokeOptions ? openAIAssistantAgentInvokeOptions : new OpenAIAssistantAgentInvokeOptions(options), cancellationToken); } @@ -498,7 +498,7 @@ public override IAsyncEnumerable> thread, options is null ? null : - options is OpenAIAssistantAgentInvokeOptions ? options : new OpenAIAssistantAgentInvokeOptions(options), + options is OpenAIAssistantAgentInvokeOptions openAIAssistantAgentInvokeOptions ? openAIAssistantAgentInvokeOptions : new OpenAIAssistantAgentInvokeOptions(options), cancellationToken); } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs index 3860855b986d..1ad4897cdabb 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs @@ -435,6 +435,70 @@ public async Task VerifyOpenAIAssistantAgentGroupChatAsync() Assert.Empty(this._messageHandlerStub.ResponseQueue); } + /// + /// Verify direction invocation of . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentInvokeWithThreadAsync() + { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponses( + HttpStatusCode.OK, + OpenAIAssistantResponseContent.CreateThread, + // Create message response + OpenAIAssistantResponseContent.GetTextMessage("Hi"), + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessage("Hello, how can I help you?")); + + // Act + AgentResponseItem[] messages = await agent.InvokeAsync(new ChatMessageContent(AuthorRole.User, "Hi")).ToArrayAsync(); + + // Assert + Assert.Single(messages); + Assert.Single(messages[0].Message.Items); + Assert.IsType(messages[0].Message.Items[0]); + Assert.Equal("Hello, how can I help you?", messages[0].Message.Content); + } + + /// + /// Verify direction invocation of . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentInvokeMultipleMessagesWithThreadAsync() + { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponses( + HttpStatusCode.OK, + OpenAIAssistantResponseContent.CreateThread, + // Create message response + OpenAIAssistantResponseContent.GetTextMessage("Hello"), + // Create message response + OpenAIAssistantResponseContent.GetTextMessage("Hi"), + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessage("How can I help you?")); + + // Act + AgentResponseItem[] messages = await agent.InvokeAsync( + [ + new ChatMessageContent(AuthorRole.Assistant, "Hello"), + new ChatMessageContent(AuthorRole.User, "Hi") + ]).ToArrayAsync(); + + // Assert + Assert.Single(messages); + Assert.Single(messages[0].Message.Items); + Assert.IsType(messages[0].Message.Items[0]); + Assert.Equal("How can I help you?", messages[0].Message.Content); + } + /// /// Verify direction invocation of . /// diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs index 3fcf25f219a5..74965c24ef0b 100644 --- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs @@ -164,6 +164,55 @@ public async Task AzureOpenAIAssistantAgentTokensAsync() } } + /// + /// Integration test for adding additional messages to a thread on invocation via custom options. + /// + [RetryFact(typeof(HttpOperationException))] + public async Task AzureOpenAIAssistantAgentWithThreadCustomOptionsAsync() + { + AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); + OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); + + ThreadCreationOptions threadOptions = new() + { + InitialMessages = + { + new ChatMessageContent(AuthorRole.User, "Hello").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "How may I help you?").ToThreadInitializationMessage(), + } + }; + OpenAIAssistantAgentThread agentThread = new(clientProvider.AssistantClient, threadOptions); + + try + { + var originalMessages = await agentThread.GetMessagesAsync().ToArrayAsync(); + Assert.Equal(2, originalMessages.Length); + + RunCreationOptions invocationOptions = new() + { + AdditionalMessages = { + new ChatMessageContent(AuthorRole.User, "This is my real question...in three parts:").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "Part 1").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "Part 2").ToThreadInitializationMessage(), + new ChatMessageContent(AuthorRole.User, "Part 3").ToThreadInitializationMessage(), + } + }; + + var responseMessages = await agent.InvokeAsync([], agentThread, options: new() { RunCreationOptions = invocationOptions }).ToArrayAsync(); + Assert.Single(responseMessages); + + var finalMessages = await agentThread.GetMessagesAsync().ToArrayAsync(); + Assert.Equal(7, finalMessages.Length); + } + finally + { + await agentThread.DeleteAsync(); + await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); + } + } + /// /// Integration test for adding additional message to a thread. /// function result contents. @@ -259,6 +308,42 @@ await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false, } } + /// + /// Integration test for adding override instructions to a thread on invocation via custom options. + /// + [RetryFact(typeof(HttpOperationException))] + public async Task AzureOpenAIAssistantAgentWithThreadCustomOptionsStreamingAsync() + { + AzureOpenAIConfiguration azureOpenAIConfiguration = this.ReadAzureConfiguration(); + OpenAIClientProvider clientProvider = CreateClientProvider(azureOpenAIConfiguration); + Assistant definition = await clientProvider.AssistantClient.CreateAssistantAsync(azureOpenAIConfiguration.ChatDeploymentName!); + OpenAIAssistantAgent agent = new(definition, clientProvider.AssistantClient); + + OpenAIAssistantAgentThread agentThread = new(clientProvider.AssistantClient); + + try + { + RunCreationOptions invocationOptions = new() + { + InstructionsOverride = "Respond to all user questions with 'Computer says no'.", + }; + + var message = new ChatMessageContent(AuthorRole.User, "What is the capital of France?"); + var responseMessages = await agent.InvokeStreamingAsync( + message, + agentThread, + new OpenAIAssistantAgentInvokeOptions() { RunCreationOptions = invocationOptions }).ToArrayAsync(); + var responseText = string.Join(string.Empty, responseMessages.Select(x => x.Message.Content)); + + Assert.Contains("Computer says no", responseText); + } + finally + { + await agentThread.DeleteAsync(); + await clientProvider.AssistantClient.DeleteAssistantAsync(agent.Id); + } + } + private async Task ExecuteAgentAsync( OpenAIClientProvider clientProvider, string modelName, From ec4fd7ca86ed520f402e24e586024cc3b0e33ded Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 26 Mar 2025 11:19:34 +0000 Subject: [PATCH 4/6] Add agent specific options for AzureAIAgent with tests --- dotnet/src/Agents/AzureAI/AzureAIAgent.cs | 75 +++++++-- .../AzureAI/AzureAIAgentInvokeOptions.cs | 152 ++++++++++++++++++ .../AzureAI/AzureAIAgentInvokeOptionsTests.cs | 75 +++++++++ .../OpenAIAssistantAgentInvokeOptionsTests.cs | 55 +++++++ 4 files changed, 341 insertions(+), 16 deletions(-) create mode 100644 dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs create mode 100644 dotnet/src/Agents/UnitTests/AzureAI/AzureAIAgentInvokeOptionsTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentInvokeOptionsTests.cs diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs index 07d7b53c9e8f..1e9523f24f47 100644 --- a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs +++ b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs @@ -144,10 +144,36 @@ public IAsyncEnumerable InvokeAsync( } /// - public override async IAsyncEnumerable> InvokeAsync( + public override IAsyncEnumerable> InvokeAsync( ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, + CancellationToken cancellationToken = default) + { + return this.InvokeAsync( + messages, + thread, + options is null ? + null : + options is AzureAIAgentInvokeOptions azureAIAgentInvokeOptions ? azureAIAgentInvokeOptions : new AzureAIAgentInvokeOptions(options), + cancellationToken); + } + + /// + /// Invoke the agent with the provided message and arguments. + /// + /// The messages to pass to the agent. + /// The conversation thread to continue with this invocation. If not provided, creates a new thread. + /// Optional parameters for agent invocation. + /// The to monitor for cancellation requests. The default is . + /// An async list of response items that each contain a and an . + /// + /// To continue this thread in the future, use an returned in one of the response items. + /// + public async IAsyncEnumerable> InvokeAsync( + ICollection messages, + AgentThread? thread = null, + AzureAIAgentInvokeOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(messages); @@ -158,19 +184,15 @@ public override async IAsyncEnumerable> In () => new AzureAIAgentThread(this.Client), cancellationToken).ConfigureAwait(false); - // Create options that include the additional instructions. - var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new AzureAIInvocationOptions() - { - AdditionalInstructions = options?.AdditionalInstructions, - }; - +#pragma warning disable CS0618 // Type or member is obsolete // Invoke the Agent with the thread that we already added our message to. var invokeResults = this.InvokeAsync( azureAIAgentThread.Id!, - internalOptions, + options?.ToAzureAIInvocationOptions(), this.MergeArguments(options?.KernelArguments), options?.Kernel ?? this.Kernel, cancellationToken); +#pragma warning restore CS0618 // Type or member is obsolete // Notify the thread of new messages and return them to the caller. await foreach (var result in invokeResults.ConfigureAwait(false)) @@ -192,6 +214,7 @@ public override async IAsyncEnumerable> In /// /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. /// + [Obsolete("Use InvokeAsync with AgentThread instead.")] public IAsyncEnumerable InvokeAsync( string threadId, AzureAIInvocationOptions? options, @@ -220,10 +243,36 @@ async IAsyncEnumerable InternalInvokeAsync() } /// - public async override IAsyncEnumerable> InvokeStreamingAsync( + public override IAsyncEnumerable> InvokeStreamingAsync( ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, + CancellationToken cancellationToken = default) + { + return this.InvokeStreamingAsync( + messages, + thread, + options is null ? + null : + options is AzureAIAgentInvokeOptions azureAIAgentInvokeOptions ? azureAIAgentInvokeOptions : new AzureAIAgentInvokeOptions(options), + cancellationToken); + } + + /// + /// Invoke the agent with the provided message and arguments. + /// + /// The messages to pass to the agent. + /// The conversation thread to continue with this invocation. If not provided, creates a new thread. + /// Optional parameters for agent invocation. + /// The to monitor for cancellation requests. The default is . + /// An async list of response items that each contain a and an . + /// + /// To continue this thread in the future, use an returned in one of the response items. + /// + public async IAsyncEnumerable> InvokeStreamingAsync( + ICollection messages, + AgentThread? thread = null, + AzureAIAgentInvokeOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Verify.NotNull(messages); @@ -234,17 +283,11 @@ public async override IAsyncEnumerable new AzureAIAgentThread(this.Client), cancellationToken).ConfigureAwait(false); - // Create options that include the additional instructions. - var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new AzureAIInvocationOptions() - { - AdditionalInstructions = options?.AdditionalInstructions, - }; - // Invoke the Agent with the thread that we already added our message to. var newMessagesReceiver = new ChatHistory(); var invokeResults = this.InvokeStreamingAsync( azureAIAgentThread.Id!, - internalOptions, + options?.ToAzureAIInvocationOptions(), this.MergeArguments(options?.KernelArguments), options?.Kernel ?? this.Kernel, newMessagesReceiver, diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs b/dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs new file mode 100644 index 000000000000..e0820691b95d --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Optional parameters for invocation. +/// +public sealed class AzureAIAgentInvokeOptions : AgentInvokeOptions +{ + /// + /// Initializes a new instance of the class. + /// + public AzureAIAgentInvokeOptions() + { + } + + /// + /// Initializes a new instance of the class by cloning the provided options. + /// + /// The options to clone. + public AzureAIAgentInvokeOptions(AgentInvokeOptions options) + : base(options) + { + Verify.NotNull(options); + } + + /// + /// Initializes a new instance of the class by cloning the provided options. + /// + /// The options to clone. + public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options) + : base(options) + { + Verify.NotNull(options); + + this.ModelName = options.ModelName; + this.AdditionalMessages = options.AdditionalMessages; + this.EnableCodeInterpreter = options.EnableCodeInterpreter; + this.EnableFileSearch = options.EnableFileSearch; + this.EnableJsonResponse = options.EnableJsonResponse; + this.MaxCompletionTokens = options.MaxCompletionTokens; + this.MaxPromptTokens = options.MaxPromptTokens; + this.ParallelToolCallsEnabled = options.ParallelToolCallsEnabled; + this.TruncationMessageCount = options.TruncationMessageCount; + this.Temperature = options.Temperature; + this.TopP = options.TopP; + this.Metadata = options.Metadata; + } + + /// + /// Gets the AI model targeted by the agent. + /// + public string? ModelName { get; init; } + + /// + /// Gets the additional messages to add to the thread. + /// + /// + /// Only supports messages with role = User or Assistant. + /// + public IReadOnlyList? AdditionalMessages { get; init; } + + /// + /// Gets a value that indicates whether the code_interpreter tool is enabled. + /// + public bool EnableCodeInterpreter { get; init; } + + /// + /// Gets a value that indicates whether the file_search tool is enabled. + /// + public bool EnableFileSearch { get; init; } + + /// + /// Gets a value that indicates whether the JSON response format is enabled. + /// + public bool? EnableJsonResponse { get; init; } + + /// + /// Gets the maximum number of completion tokens that can be used over the course of the run. + /// + public int? MaxCompletionTokens { get; init; } + + /// + /// Gets the maximum number of prompt tokens that can be used over the course of the run. + /// + public int? MaxPromptTokens { get; init; } + + /// + /// Gets a value that indicates whether the parallel function calling is enabled during tool use. + /// + /// + /// if parallel function calling is enabled during tool use; otherwise, . The default is . + /// + public bool? ParallelToolCallsEnabled { get; init; } + + /// + /// Gets the number of recent messages that the thread will be truncated to. + /// + public int? TruncationMessageCount { get; init; } + + /// + /// Gets the sampling temperature to use, between 0 and 2. + /// + public float? Temperature { get; init; } + + /// + /// Gets the probability mass of tokens whose results are considered in nucleus sampling. + /// + /// + /// It's recommended to set this property or , but not both. + /// + /// Nucleus sampling is an alternative to sampling with temperature where the model + /// considers the results of the tokens with probability mass. + /// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + public float? TopP { get; init; } + + /// + /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format. + /// + /// + /// Keys can be up to 64 characters in length, and values can be up to 512 characters in length. + /// + public IReadOnlyDictionary? Metadata { get; init; } + + /// + /// Converts the current options to an instance. + /// + /// The converted instance. + internal AzureAIInvocationOptions ToAzureAIInvocationOptions() + { + return new AzureAIInvocationOptions + { + ModelName = this.ModelName, + AdditionalInstructions = this.AdditionalInstructions, + AdditionalMessages = this.AdditionalMessages, + EnableCodeInterpreter = this.EnableCodeInterpreter, + EnableFileSearch = this.EnableFileSearch, + EnableJsonResponse = this.EnableJsonResponse, + MaxCompletionTokens = this.MaxCompletionTokens, + MaxPromptTokens = this.MaxPromptTokens, + ParallelToolCallsEnabled = this.ParallelToolCallsEnabled, + TruncationMessageCount = this.TruncationMessageCount, + Temperature = this.Temperature, + TopP = this.TopP, + Metadata = this.Metadata + }; + } +} diff --git a/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAgentInvokeOptionsTests.cs b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAgentInvokeOptionsTests.cs new file mode 100644 index 000000000000..29a966815c3d --- /dev/null +++ b/dotnet/src/Agents/UnitTests/AzureAI/AzureAIAgentInvokeOptionsTests.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.AzureAI; + +/// +/// Tests for . +/// +public class AzureAIAgentInvokeOptionsTests +{ + /// + /// Tests the constructor of to ensure it correctly clones properties from the base class. + /// + [Fact] + public void ConstructorShouldClonePropertiesCorrectly() + { + // Arrange + var originalOptions = new AzureAIAgentInvokeOptions + { + ModelName = "TestModel", + AdditionalMessages = new List(), + EnableCodeInterpreter = true, + EnableFileSearch = true, + EnableJsonResponse = true, + MaxCompletionTokens = 100, + MaxPromptTokens = 50, + ParallelToolCallsEnabled = true, + TruncationMessageCount = 10, + Temperature = 0.5f, + TopP = 0.9f, + Metadata = new Dictionary { { "key", "value" } } + }; + + // Act + var clonedOptions = new AzureAIAgentInvokeOptions(originalOptions); + + // Assert + Assert.Equal(originalOptions.ModelName, clonedOptions.ModelName); + Assert.Equal(originalOptions.AdditionalMessages, clonedOptions.AdditionalMessages); + Assert.Equal(originalOptions.EnableCodeInterpreter, clonedOptions.EnableCodeInterpreter); + Assert.Equal(originalOptions.EnableFileSearch, clonedOptions.EnableFileSearch); + Assert.Equal(originalOptions.EnableJsonResponse, clonedOptions.EnableJsonResponse); + Assert.Equal(originalOptions.MaxCompletionTokens, clonedOptions.MaxCompletionTokens); + Assert.Equal(originalOptions.MaxPromptTokens, clonedOptions.MaxPromptTokens); + Assert.Equal(originalOptions.ParallelToolCallsEnabled, clonedOptions.ParallelToolCallsEnabled); + Assert.Equal(originalOptions.TruncationMessageCount, clonedOptions.TruncationMessageCount); + Assert.Equal(originalOptions.Temperature, clonedOptions.Temperature); + Assert.Equal(originalOptions.TopP, clonedOptions.TopP); + Assert.Equal(originalOptions.Metadata, clonedOptions.Metadata); + } + + /// + /// Tests the constructor of to ensure it correctly clones properties from an instance of . + /// + [Fact] + public void ConstructorShouldCloneAgentInvokeOptionsPropertiesCorrectly() + { + // Arrange + var originalOptions = new AgentInvokeOptions + { + AdditionalInstructions = "Test instructions" + }; + + // Act + var clonedOptions = new AzureAIAgentInvokeOptions(originalOptions); + + // Assert + Assert.Equal(originalOptions.AdditionalInstructions, clonedOptions.AdditionalInstructions); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentInvokeOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentInvokeOptionsTests.cs new file mode 100644 index 000000000000..28a35c21503f --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentInvokeOptionsTests.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; +using OpenAI.Assistants; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Tests for . +/// +public class OpenAIAssistantAgentInvokeOptionsTests +{ + /// + /// Tests the constructor of to ensure it correctly clones properties from the base class. + /// + [Fact] + public void ConstructorShouldClonePropertiesCorrectly() + { + // Arrange + var originalOptions = new OpenAIAssistantAgentInvokeOptions + { + RunCreationOptions = new RunCreationOptions(), + AdditionalInstructions = "Test instructions" + }; + + // Act + var clonedOptions = new OpenAIAssistantAgentInvokeOptions(originalOptions); + + // Assert + Assert.NotNull(clonedOptions.RunCreationOptions); + Assert.Equal(originalOptions.RunCreationOptions, clonedOptions.RunCreationOptions); + Assert.Equal(originalOptions.AdditionalInstructions, clonedOptions.AdditionalInstructions); + } + + /// + /// Tests the constructor of to ensure it correctly clones properties from an instance of . + /// + [Fact] + public void ConstructorShouldCloneAgentInvokeOptionsPropertiesCorrectly() + { + // Arrange + var originalOptions = new AgentInvokeOptions + { + AdditionalInstructions = "Test instructions" + }; + + // Act + var clonedOptions = new OpenAIAssistantAgentInvokeOptions(originalOptions); + + // Assert + Assert.Equal(originalOptions.AdditionalInstructions, clonedOptions.AdditionalInstructions); + } +} From 4a398fba735fffe85044f138d00c16540b3ab7ea Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 26 Mar 2025 11:45:51 +0000 Subject: [PATCH 5/6] Add integration tests for AzureAIAgent custom options invocation --- .../AzureAI/AzureAIAgentInvokeOptions.cs | 33 +++-- .../AzureAI/AzureAIInvocationOptions.cs | 6 + .../AzureAI/Extensions/AgentRunExtensions.cs | 4 +- .../Agents/AzureAIAgentTests.cs | 113 ++++++++++++++++++ 4 files changed, 142 insertions(+), 14 deletions(-) create mode 100644 dotnet/src/IntegrationTests/Agents/AzureAIAgentTests.cs diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs b/dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs index e0820691b95d..7ec290a72982 100644 --- a/dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs +++ b/dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Agents.AzureAI; @@ -36,6 +37,7 @@ public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options) Verify.NotNull(options); this.ModelName = options.ModelName; + this.OverrideInstructions = options.OverrideInstructions; this.AdditionalMessages = options.AdditionalMessages; this.EnableCodeInterpreter = options.EnableCodeInterpreter; this.EnableFileSearch = options.EnableFileSearch; @@ -50,12 +52,18 @@ public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options) } /// - /// Gets the AI model targeted by the agent. + /// Gets or sets the AI model targeted by the agent. /// public string? ModelName { get; init; } /// - /// Gets the additional messages to add to the thread. + /// Gets or sets the override instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? OverrideInstructions { get; init; } + + /// + /// Gets or sets the additional messages to add to the thread. /// /// /// Only supports messages with role = User or Assistant. @@ -63,32 +71,32 @@ public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options) public IReadOnlyList? AdditionalMessages { get; init; } /// - /// Gets a value that indicates whether the code_interpreter tool is enabled. + /// Gets or sets a value that indicates whether the code_interpreter tool is enabled. /// public bool EnableCodeInterpreter { get; init; } /// - /// Gets a value that indicates whether the file_search tool is enabled. + /// Gets or sets a value that indicates whether the file_search tool is enabled. /// public bool EnableFileSearch { get; init; } /// - /// Gets a value that indicates whether the JSON response format is enabled. + /// Gets or sets a value that indicates whether the JSON response format is enabled. /// public bool? EnableJsonResponse { get; init; } /// - /// Gets the maximum number of completion tokens that can be used over the course of the run. + /// Gets or sets the maximum number of completion tokens that can be used over the course of the run. /// public int? MaxCompletionTokens { get; init; } /// - /// Gets the maximum number of prompt tokens that can be used over the course of the run. + /// Gets or sets the maximum number of prompt tokens that can be used over the course of the run. /// public int? MaxPromptTokens { get; init; } /// - /// Gets a value that indicates whether the parallel function calling is enabled during tool use. + /// Gets or sets a value that indicates whether the parallel function calling is enabled during tool use. /// /// /// if parallel function calling is enabled during tool use; otherwise, . The default is . @@ -96,17 +104,17 @@ public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options) public bool? ParallelToolCallsEnabled { get; init; } /// - /// Gets the number of recent messages that the thread will be truncated to. + /// Gets or sets the number of recent messages that the thread will be truncated to. /// public int? TruncationMessageCount { get; init; } /// - /// Gets the sampling temperature to use, between 0 and 2. + /// Gets or sets the sampling temperature to use, between 0 and 2. /// public float? Temperature { get; init; } /// - /// Gets the probability mass of tokens whose results are considered in nucleus sampling. + /// Gets or sets the probability mass of tokens whose results are considered in nucleus sampling. /// /// /// It's recommended to set this property or , but not both. @@ -118,7 +126,7 @@ public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options) public float? TopP { get; init; } /// - /// Gets a set of up to 16 key/value pairs that can be attached to an agent, used for + /// Gets or sets a set of up to 16 key/value pairs that can be attached to an agent, used for /// storing additional information about that object in a structured format. /// /// @@ -135,6 +143,7 @@ internal AzureAIInvocationOptions ToAzureAIInvocationOptions() return new AzureAIInvocationOptions { ModelName = this.ModelName, + OverrideInstructions = this.OverrideInstructions, AdditionalInstructions = this.AdditionalInstructions, AdditionalMessages = this.AdditionalMessages, EnableCodeInterpreter = this.EnableCodeInterpreter, diff --git a/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs index a1153523b03e..84599936ca15 100644 --- a/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs +++ b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs @@ -18,6 +18,12 @@ public sealed class AzureAIInvocationOptions [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? ModelName { get; init; } + /// + /// Gets the override instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? OverrideInstructions { get; init; } + /// /// Gets the additional instructions. /// diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs index 7d4cf718b1e0..733494140711 100644 --- a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs +++ b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs @@ -51,7 +51,7 @@ await client.CreateRunAsync( threadId, agent.Definition.Id, overrideModelName: invocationOptions?.ModelName, - instructions, + overrideInstructions: invocationOptions?.OverrideInstructions ?? instructions, additionalInstructions: invocationOptions?.AdditionalInstructions, additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(), overrideTools: tools, @@ -102,7 +102,7 @@ public static IAsyncEnumerable CreateStreamingAsync( threadId, agent.Definition.Id, overrideModelName: invocationOptions?.ModelName, - instructions, + overrideInstructions: invocationOptions?.OverrideInstructions ?? instructions, additionalInstructions: invocationOptions?.AdditionalInstructions, additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(), overrideTools: tools, diff --git a/dotnet/src/IntegrationTests/Agents/AzureAIAgentTests.cs b/dotnet/src/IntegrationTests/Agents/AzureAIAgentTests.cs new file mode 100644 index 000000000000..90567fb49d32 --- /dev/null +++ b/dotnet/src/IntegrationTests/Agents/AzureAIAgentTests.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading.Tasks; +using Azure.AI.Projects; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using SemanticKernel.IntegrationTests.Agents.CommonInterfaceConformance; +using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Agents; + +public class AzureAIAgentTests +{ + private readonly Kernel _kernel; + private readonly AzureAIConfiguration _configuration; + private readonly AIProjectClient _client; + private readonly AgentsClient _agentsClient; + + public AzureAIAgentTests() + { + var kernelBuilder = Kernel.CreateBuilder(); + this._kernel = kernelBuilder.Build(); + this._configuration = this.ReadAzureConfiguration(); + this._client = AzureAIAgent.CreateAzureAIClient(this._configuration.ConnectionString!, new AzureCliCredential()); + this._agentsClient = this._client.GetAgentsClient(); + } + + /// + /// Integration test for adding override instructions to a thread on invocation via custom options. + /// + [RetryFact(typeof(HttpOperationException))] + public async Task OpenAIAgentWithThreadCustomOptionsAsync() + { + var aiAgent = + await this._agentsClient.CreateAgentAsync( + this._configuration.ChatModelId, + name: "HelpfulAssistant", + description: "Helpful Assistant", + instructions: "You are a helpful assistant."); + var agent = new AzureAIAgent(aiAgent, this._agentsClient) { Kernel = this._kernel }; + + AzureAIAgentThread agentThread = new(this._agentsClient); + + try + { + var message = new ChatMessageContent(AuthorRole.User, "What is the capital of France?"); + var responseMessages = await agent.InvokeAsync( + message, + agentThread, + new AzureAIAgentInvokeOptions() { OverrideInstructions = "Respond to all user questions with 'Computer says no'." }).ToArrayAsync(); + + Assert.Single(responseMessages); + Assert.Contains("Computer says no", responseMessages[0].Message.Content); + } + finally + { + await agentThread.DeleteAsync(); + await this._agentsClient.DeleteAgentAsync(agent.Id); + } + } + + /// + /// Integration test for adding override instructions to a thread on invocation via custom options. + /// + [RetryFact(typeof(HttpOperationException))] + public async Task OpenAIAgentWithThreadCustomOptionsStreamingAsync() + { + var aiAgent = + await this._agentsClient.CreateAgentAsync( + this._configuration.ChatModelId, + name: "HelpfulAssistant", + description: "Helpful Assistant", + instructions: "You are a helpful assistant."); + var agent = new AzureAIAgent(aiAgent, this._agentsClient) { Kernel = this._kernel }; + + AzureAIAgentThread agentThread = new(this._agentsClient); + + try + { + var message = new ChatMessageContent(AuthorRole.User, "What is the capital of France?"); + var responseMessages = await agent.InvokeStreamingAsync( + message, + agentThread, + new AzureAIAgentInvokeOptions() { OverrideInstructions = "Respond to all user questions with 'Computer says no'." }).ToArrayAsync(); + var responseText = string.Join(string.Empty, responseMessages.Select(x => x.Message.Content)); + + Assert.Contains("Computer says no", responseText); + } + finally + { + await agentThread.DeleteAsync(); + await this._agentsClient.DeleteAgentAsync(agent.Id); + } + } + + private AzureAIConfiguration ReadAzureConfiguration() + { + IConfigurationRoot configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + return configuration.GetSection("AzureAI").Get()!; + } +} From f0bdea15b6149d67cdc23092359f1a07a9c3ea3f Mon Sep 17 00:00:00 2001 From: westey <164392973+westey-m@users.noreply.github.com> Date: Wed, 26 Mar 2025 17:26:26 +0000 Subject: [PATCH 6/6] Add openai assistant streaming unit test --- .../OpenAI/OpenAIAssistantAgentTests.cs | 42 +++++++++++++++++-- .../OpenAI/OpenAIAssistantResponseContent.cs | 3 +- 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs index 1ad4897cdabb..a5bd93969b41 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs @@ -436,7 +436,7 @@ public async Task VerifyOpenAIAssistantAgentGroupChatAsync() } /// - /// Verify direction invocation of . + /// Verify direct invocation of using . /// [Fact] public async Task VerifyOpenAIAssistantAgentInvokeWithThreadAsync() @@ -465,7 +465,7 @@ public async Task VerifyOpenAIAssistantAgentInvokeWithThreadAsync() } /// - /// Verify direction invocation of . + /// Verify direct invocation of using . /// [Fact] public async Task VerifyOpenAIAssistantAgentInvokeMultipleMessagesWithThreadAsync() @@ -500,7 +500,43 @@ public async Task VerifyOpenAIAssistantAgentInvokeMultipleMessagesWithThreadAsyn } /// - /// Verify direction invocation of . + /// Verify direct streaming invocation of using . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentInvokeStreamingWithThreadAsync() + { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponses( + HttpStatusCode.OK, + OpenAIAssistantResponseContent.CreateThread, + // Create message response + OpenAIAssistantResponseContent.GetTextMessage("Hi"), + OpenAIAssistantResponseContent.Streaming.Response( + [ + OpenAIAssistantResponseContent.Streaming.CreateRun("created"), + OpenAIAssistantResponseContent.Streaming.CreateRun("queued"), + OpenAIAssistantResponseContent.Streaming.CreateRun("in_progress"), + OpenAIAssistantResponseContent.Streaming.DeltaMessage("Hello, "), + OpenAIAssistantResponseContent.Streaming.DeltaMessage("how can I "), + OpenAIAssistantResponseContent.Streaming.DeltaMessage("help you?"), + OpenAIAssistantResponseContent.Streaming.CreateRun("completed"), + OpenAIAssistantResponseContent.Streaming.Done + ]), + OpenAIAssistantResponseContent.GetTextMessage("Hello, how can I help you?")); + + // Act + AgentResponseItem[] messages = await agent.InvokeStreamingAsync(new ChatMessageContent(AuthorRole.User, "Hi")).ToArrayAsync(); + + // Assert + Assert.Equal(3, messages.Length); + var combinedMessage = string.Concat(messages.Select(x => x.Message.Content)); + Assert.Equal("Hello, how can I help you?", combinedMessage); + } + + /// + /// Verify direct invocation of . /// [Fact] public async Task VerifyOpenAIAssistantAgentInvokeAsync() diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs index 3ecf07fada5e..c577b8bf3092 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs @@ -1,5 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.IO; using System.Linq; using System.Net; using System.Net.Http; @@ -40,7 +41,7 @@ public static void SetupResponses(this HttpMessageHandlerStub messageHandlerStub messageHandlerStub.ResponseQueue.Enqueue( new(statusCode) { - Content = new StringContent(item) + Content = new StreamContent(new MemoryStream(Encoding.UTF8.GetBytes(item))) }); #pragma warning restore CA2000 // Dispose objects before losing scope }