Skip to content

.Net: Add support for agent specific params #11201

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Mar 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions dotnet/src/Agents/Abstractions/Agent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ public virtual IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>>
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
/// <param name="options">Optional parameters for agent invocation.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>An async list of response items that each contain a <see cref="ChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
/// <returns>An async list of response items that each contain a <see cref="StreamingChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
/// <remarks>
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
/// </remarks>
Expand All @@ -202,7 +202,7 @@ public virtual IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>>
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
/// <param name="options">Optional parameters for agent invocation.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>An async list of response items that each contain a <see cref="ChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
/// <returns>An async list of response items that each contain a <see cref="StreamingChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
/// <remarks>
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
/// </remarks>
Expand Down
20 changes: 20 additions & 0 deletions dotnet/src/Agents/Abstractions/AgentInvokeOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,26 @@ namespace Microsoft.SemanticKernel.Agents;
/// </summary>
public class AgentInvokeOptions
{
/// <summary>
/// Initializes a new instance of the <see cref="AgentInvokeOptions"/> class.
/// </summary>
public AgentInvokeOptions()
{
}

/// <summary>
/// Initializes a new instance of the <see cref="AgentInvokeOptions"/> class by cloning the provided options.
/// </summary>
/// <param name="options">The options to clone.</param>
public AgentInvokeOptions(AgentInvokeOptions options)
{
Verify.NotNull(options);

this.KernelArguments = options.KernelArguments;
this.Kernel = options.Kernel;
this.AdditionalInstructions = options.AdditionalInstructions;
}

/// <summary>
/// Gets or sets optional arguments to pass to the agent's invocation, including any <see cref="PromptExecutionSettings"/>
/// </summary>
Expand Down
75 changes: 59 additions & 16 deletions dotnet/src/Agents/AzureAI/AzureAIAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -144,10 +144,36 @@ public IAsyncEnumerable<ChatMessageContent> InvokeAsync(
}

/// <inheritdoc/>
public override async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> InvokeAsync(
public override IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> InvokeAsync(
ICollection<ChatMessageContent> messages,
AgentThread? thread = null,
AgentInvokeOptions? options = null,
CancellationToken cancellationToken = default)
{
return this.InvokeAsync(
messages,
thread,
options is null ?
null :
options is AzureAIAgentInvokeOptions azureAIAgentInvokeOptions ? azureAIAgentInvokeOptions : new AzureAIAgentInvokeOptions(options),
cancellationToken);
}

/// <summary>
/// Invoke the agent with the provided message and arguments.
/// </summary>
/// <param name="messages">The messages to pass to the agent.</param>
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
/// <param name="options">Optional parameters for agent invocation.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>An async list of response items that each contain a <see cref="ChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
/// <remarks>
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
/// </remarks>
public async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> InvokeAsync(
ICollection<ChatMessageContent> messages,
AgentThread? thread = null,
AzureAIAgentInvokeOptions? options = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
Verify.NotNull(messages);
Expand All @@ -158,19 +184,15 @@ public override async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> In
() => new AzureAIAgentThread(this.Client),
cancellationToken).ConfigureAwait(false);

// Create options that include the additional instructions.
var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new AzureAIInvocationOptions()
{
AdditionalInstructions = options?.AdditionalInstructions,
};

#pragma warning disable CS0618 // Type or member is obsolete
// Invoke the Agent with the thread that we already added our message to.
var invokeResults = this.InvokeAsync(
azureAIAgentThread.Id!,
internalOptions,
options?.ToAzureAIInvocationOptions(),
this.MergeArguments(options?.KernelArguments),
options?.Kernel ?? this.Kernel,
cancellationToken);
#pragma warning restore CS0618 // Type or member is obsolete

// Notify the thread of new messages and return them to the caller.
await foreach (var result in invokeResults.ConfigureAwait(false))
Expand All @@ -192,6 +214,7 @@ public override async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> In
/// <remarks>
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
/// </remarks>
[Obsolete("Use InvokeAsync with AgentThread instead.")]
public IAsyncEnumerable<ChatMessageContent> InvokeAsync(
string threadId,
AzureAIInvocationOptions? options,
Expand Down Expand Up @@ -220,10 +243,36 @@ async IAsyncEnumerable<ChatMessageContent> InternalInvokeAsync()
}

/// <inheritdoc/>
public async override IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>> InvokeStreamingAsync(
public override IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>> InvokeStreamingAsync(
ICollection<ChatMessageContent> messages,
AgentThread? thread = null,
AgentInvokeOptions? options = null,
CancellationToken cancellationToken = default)
{
return this.InvokeStreamingAsync(
messages,
thread,
options is null ?
null :
options is AzureAIAgentInvokeOptions azureAIAgentInvokeOptions ? azureAIAgentInvokeOptions : new AzureAIAgentInvokeOptions(options),
cancellationToken);
}

/// <summary>
/// Invoke the agent with the provided message and arguments.
/// </summary>
/// <param name="messages">The messages to pass to the agent.</param>
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
/// <param name="options">Optional parameters for agent invocation.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>An async list of response items that each contain a <see cref="StreamingChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
/// <remarks>
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
/// </remarks>
public async IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>> InvokeStreamingAsync(
ICollection<ChatMessageContent> messages,
AgentThread? thread = null,
AzureAIAgentInvokeOptions? options = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
Verify.NotNull(messages);
Expand All @@ -234,17 +283,11 @@ public async override IAsyncEnumerable<AgentResponseItem<StreamingChatMessageCon
() => new AzureAIAgentThread(this.Client),
cancellationToken).ConfigureAwait(false);

// Create options that include the additional instructions.
var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new AzureAIInvocationOptions()
{
AdditionalInstructions = options?.AdditionalInstructions,
};

// Invoke the Agent with the thread that we already added our message to.
var newMessagesReceiver = new ChatHistory();
var invokeResults = this.InvokeStreamingAsync(
azureAIAgentThread.Id!,
internalOptions,
options?.ToAzureAIInvocationOptions(),
this.MergeArguments(options?.KernelArguments),
options?.Kernel ?? this.Kernel,
newMessagesReceiver,
Expand Down
161 changes: 161 additions & 0 deletions dotnet/src/Agents/AzureAI/AzureAIAgentInvokeOptions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Collections.Generic;
using System.Text.Json.Serialization;

namespace Microsoft.SemanticKernel.Agents.AzureAI;

/// <summary>
/// Optional parameters for <see cref="AzureAIAgent"/> invocation.
/// </summary>
public sealed class AzureAIAgentInvokeOptions : AgentInvokeOptions
{
/// <summary>
/// Initializes a new instance of the <see cref="AzureAIAgentInvokeOptions"/> class.
/// </summary>
public AzureAIAgentInvokeOptions()
{
}

/// <summary>
/// Initializes a new instance of the <see cref="AzureAIAgentInvokeOptions"/> class by cloning the provided options.
/// </summary>
/// <param name="options">The options to clone.</param>
public AzureAIAgentInvokeOptions(AgentInvokeOptions options)
: base(options)
{
Verify.NotNull(options);
}

/// <summary>
/// Initializes a new instance of the <see cref="AzureAIAgentInvokeOptions"/> class by cloning the provided options.
/// </summary>
/// <param name="options">The options to clone.</param>
public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options)
: base(options)
{
Verify.NotNull(options);

this.ModelName = options.ModelName;
this.OverrideInstructions = options.OverrideInstructions;
this.AdditionalMessages = options.AdditionalMessages;
this.EnableCodeInterpreter = options.EnableCodeInterpreter;
this.EnableFileSearch = options.EnableFileSearch;
this.EnableJsonResponse = options.EnableJsonResponse;
this.MaxCompletionTokens = options.MaxCompletionTokens;
this.MaxPromptTokens = options.MaxPromptTokens;
this.ParallelToolCallsEnabled = options.ParallelToolCallsEnabled;
this.TruncationMessageCount = options.TruncationMessageCount;
this.Temperature = options.Temperature;
this.TopP = options.TopP;
this.Metadata = options.Metadata;
}

/// <summary>
/// Gets or sets the AI model targeted by the agent.
/// </summary>
public string? ModelName { get; init; }

/// <summary>
/// Gets or sets the override instructions.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? OverrideInstructions { get; init; }

/// <summary>
/// Gets or sets the additional messages to add to the thread.
/// </summary>
/// <remarks>
/// Only supports messages with <see href="https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages">role = User or Assistant</see>.
/// </remarks>
public IReadOnlyList<ChatMessageContent>? AdditionalMessages { get; init; }

/// <summary>
/// Gets or sets a value that indicates whether the code_interpreter tool is enabled.
/// </summary>
public bool EnableCodeInterpreter { get; init; }

/// <summary>
/// Gets or sets a value that indicates whether the file_search tool is enabled.
/// </summary>
public bool EnableFileSearch { get; init; }

/// <summary>
/// Gets or sets a value that indicates whether the JSON response format is enabled.
/// </summary>
public bool? EnableJsonResponse { get; init; }

/// <summary>
/// Gets or sets the maximum number of completion tokens that can be used over the course of the run.
/// </summary>
public int? MaxCompletionTokens { get; init; }

/// <summary>
/// Gets or sets the maximum number of prompt tokens that can be used over the course of the run.
/// </summary>
public int? MaxPromptTokens { get; init; }

/// <summary>
/// Gets or sets a value that indicates whether the parallel function calling is enabled during tool use.
/// </summary>
/// <value>
/// <see langword="true"/> if parallel function calling is enabled during tool use; otherwise, <see langword="false"/>. The default is <see langword="true"/>.
/// </value>
public bool? ParallelToolCallsEnabled { get; init; }

/// <summary>
/// Gets or sets the number of recent messages that the thread will be truncated to.
/// </summary>
public int? TruncationMessageCount { get; init; }

/// <summary>
/// Gets or sets the sampling temperature to use, between 0 and 2.
/// </summary>
public float? Temperature { get; init; }

/// <summary>
/// Gets or sets the probability mass of tokens whose results are considered in nucleus sampling.
/// </summary>
/// <remarks>
/// It's recommended to set this property or <see cref="Temperature"/>, but not both.
///
/// Nucleus sampling is an alternative to sampling with temperature where the model
/// considers the results of the tokens with <see cref="TopP"/> probability mass.
/// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
/// </remarks>
public float? TopP { get; init; }

/// <summary>
/// Gets or sets a set of up to 16 key/value pairs that can be attached to an agent, used for
/// storing additional information about that object in a structured format.
/// </summary>
/// <remarks>
/// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
/// </remarks>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }

/// <summary>
/// Converts the current options to an <see cref="AzureAIInvocationOptions"/> instance.
/// </summary>
/// <returns>The converted <see cref="AzureAIInvocationOptions"/> instance.</returns>
internal AzureAIInvocationOptions ToAzureAIInvocationOptions()
{
return new AzureAIInvocationOptions
{
ModelName = this.ModelName,
OverrideInstructions = this.OverrideInstructions,
AdditionalInstructions = this.AdditionalInstructions,
AdditionalMessages = this.AdditionalMessages,
EnableCodeInterpreter = this.EnableCodeInterpreter,
EnableFileSearch = this.EnableFileSearch,
EnableJsonResponse = this.EnableJsonResponse,
MaxCompletionTokens = this.MaxCompletionTokens,
MaxPromptTokens = this.MaxPromptTokens,
ParallelToolCallsEnabled = this.ParallelToolCallsEnabled,
TruncationMessageCount = this.TruncationMessageCount,
Temperature = this.Temperature,
TopP = this.TopP,
Metadata = this.Metadata
};
}
}
6 changes: 6 additions & 0 deletions dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,12 @@ public sealed class AzureAIInvocationOptions
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ModelName { get; init; }

/// <summary>
/// Gets the override instructions.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? OverrideInstructions { get; init; }

/// <summary>
/// Gets the additional instructions.
/// </summary>
Expand Down
4 changes: 2 additions & 2 deletions dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ await client.CreateRunAsync(
threadId,
agent.Definition.Id,
overrideModelName: invocationOptions?.ModelName,
instructions,
overrideInstructions: invocationOptions?.OverrideInstructions ?? instructions,
additionalInstructions: invocationOptions?.AdditionalInstructions,
additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
overrideTools: tools,
Expand Down Expand Up @@ -102,7 +102,7 @@ public static IAsyncEnumerable<StreamingUpdate> CreateStreamingAsync(
threadId,
agent.Definition.Id,
overrideModelName: invocationOptions?.ModelName,
instructions,
overrideInstructions: invocationOptions?.OverrideInstructions ?? instructions,
additionalInstructions: invocationOptions?.AdditionalInstructions,
additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
overrideTools: tools,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
/// </summary>
internal static class AssistantRunOptionsFactory
{
public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? overrideInstructions, RunCreationOptions? invocationOptions)
public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? agentInstructions, RunCreationOptions? invocationOptions)
{
RunCreationOptions runOptions =
new()
{
AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? defaultOptions?.AdditionalInstructions,
InstructionsOverride = overrideInstructions,
InstructionsOverride = invocationOptions?.InstructionsOverride ?? agentInstructions,
MaxOutputTokenCount = invocationOptions?.MaxOutputTokenCount ?? defaultOptions?.MaxOutputTokenCount,
MaxInputTokenCount = invocationOptions?.MaxInputTokenCount ?? defaultOptions?.MaxInputTokenCount,
ModelOverride = invocationOptions?.ModelOverride ?? defaultOptions?.ModelOverride,
Expand Down
Loading
Loading