Skip to content

Commit 8875f9f

Browse files
authored
.Net: Add support for agent specific params (#11201)
### Motivation and Context We should support agent specific parameters on the concrete agent implementations for invoke. ### Description Add support for agent specific params ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone 😄
1 parent d35d0c9 commit 8875f9f

15 files changed

+800
-32
lines changed

dotnet/src/Agents/Abstractions/Agent.cs

+2-2
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,7 @@ public virtual IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>>
180180
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
181181
/// <param name="options">Optional parameters for agent invocation.</param>
182182
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
183-
/// <returns>An async list of response items that each contain a <see cref="ChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
183+
/// <returns>An async list of response items that each contain a <see cref="StreamingChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
184184
/// <remarks>
185185
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
186186
/// </remarks>
@@ -202,7 +202,7 @@ public virtual IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>>
202202
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
203203
/// <param name="options">Optional parameters for agent invocation.</param>
204204
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
205-
/// <returns>An async list of response items that each contain a <see cref="ChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
205+
/// <returns>An async list of response items that each contain a <see cref="StreamingChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
206206
/// <remarks>
207207
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
208208
/// </remarks>

dotnet/src/Agents/Abstractions/AgentInvokeOptions.cs

+20
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,26 @@ namespace Microsoft.SemanticKernel.Agents;
77
/// </summary>
88
public class AgentInvokeOptions
99
{
10+
/// <summary>
11+
/// Initializes a new instance of the <see cref="AgentInvokeOptions"/> class.
12+
/// </summary>
13+
public AgentInvokeOptions()
14+
{
15+
}
16+
17+
/// <summary>
18+
/// Initializes a new instance of the <see cref="AgentInvokeOptions"/> class by cloning the provided options.
19+
/// </summary>
20+
/// <param name="options">The options to clone.</param>
21+
public AgentInvokeOptions(AgentInvokeOptions options)
22+
{
23+
Verify.NotNull(options);
24+
25+
this.KernelArguments = options.KernelArguments;
26+
this.Kernel = options.Kernel;
27+
this.AdditionalInstructions = options.AdditionalInstructions;
28+
}
29+
1030
/// <summary>
1131
/// Gets or sets optional arguments to pass to the agent's invocation, including any <see cref="PromptExecutionSettings"/>
1232
/// </summary>

dotnet/src/Agents/AzureAI/AzureAIAgent.cs

+59-16
Original file line numberDiff line numberDiff line change
@@ -144,10 +144,36 @@ public IAsyncEnumerable<ChatMessageContent> InvokeAsync(
144144
}
145145

146146
/// <inheritdoc/>
147-
public override async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> InvokeAsync(
147+
public override IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> InvokeAsync(
148148
ICollection<ChatMessageContent> messages,
149149
AgentThread? thread = null,
150150
AgentInvokeOptions? options = null,
151+
CancellationToken cancellationToken = default)
152+
{
153+
return this.InvokeAsync(
154+
messages,
155+
thread,
156+
options is null ?
157+
null :
158+
options is AzureAIAgentInvokeOptions azureAIAgentInvokeOptions ? azureAIAgentInvokeOptions : new AzureAIAgentInvokeOptions(options),
159+
cancellationToken);
160+
}
161+
162+
/// <summary>
163+
/// Invoke the agent with the provided message and arguments.
164+
/// </summary>
165+
/// <param name="messages">The messages to pass to the agent.</param>
166+
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
167+
/// <param name="options">Optional parameters for agent invocation.</param>
168+
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
169+
/// <returns>An async list of response items that each contain a <see cref="ChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
170+
/// <remarks>
171+
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
172+
/// </remarks>
173+
public async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> InvokeAsync(
174+
ICollection<ChatMessageContent> messages,
175+
AgentThread? thread = null,
176+
AzureAIAgentInvokeOptions? options = null,
151177
[EnumeratorCancellation] CancellationToken cancellationToken = default)
152178
{
153179
Verify.NotNull(messages);
@@ -158,19 +184,15 @@ public override async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> In
158184
() => new AzureAIAgentThread(this.Client),
159185
cancellationToken).ConfigureAwait(false);
160186

161-
// Create options that include the additional instructions.
162-
var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new AzureAIInvocationOptions()
163-
{
164-
AdditionalInstructions = options?.AdditionalInstructions,
165-
};
166-
187+
#pragma warning disable CS0618 // Type or member is obsolete
167188
// Invoke the Agent with the thread that we already added our message to.
168189
var invokeResults = this.InvokeAsync(
169190
azureAIAgentThread.Id!,
170-
internalOptions,
191+
options?.ToAzureAIInvocationOptions(),
171192
this.MergeArguments(options?.KernelArguments),
172193
options?.Kernel ?? this.Kernel,
173194
cancellationToken);
195+
#pragma warning restore CS0618 // Type or member is obsolete
174196

175197
// Notify the thread of new messages and return them to the caller.
176198
await foreach (var result in invokeResults.ConfigureAwait(false))
@@ -192,6 +214,7 @@ public override async IAsyncEnumerable<AgentResponseItem<ChatMessageContent>> In
192214
/// <remarks>
193215
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
194216
/// </remarks>
217+
[Obsolete("Use InvokeAsync with AgentThread instead.")]
195218
public IAsyncEnumerable<ChatMessageContent> InvokeAsync(
196219
string threadId,
197220
AzureAIInvocationOptions? options,
@@ -220,10 +243,36 @@ async IAsyncEnumerable<ChatMessageContent> InternalInvokeAsync()
220243
}
221244

222245
/// <inheritdoc/>
223-
public async override IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>> InvokeStreamingAsync(
246+
public override IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>> InvokeStreamingAsync(
224247
ICollection<ChatMessageContent> messages,
225248
AgentThread? thread = null,
226249
AgentInvokeOptions? options = null,
250+
CancellationToken cancellationToken = default)
251+
{
252+
return this.InvokeStreamingAsync(
253+
messages,
254+
thread,
255+
options is null ?
256+
null :
257+
options is AzureAIAgentInvokeOptions azureAIAgentInvokeOptions ? azureAIAgentInvokeOptions : new AzureAIAgentInvokeOptions(options),
258+
cancellationToken);
259+
}
260+
261+
/// <summary>
262+
/// Invoke the agent with the provided message and arguments.
263+
/// </summary>
264+
/// <param name="messages">The messages to pass to the agent.</param>
265+
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread.</param>
266+
/// <param name="options">Optional parameters for agent invocation.</param>
267+
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
268+
/// <returns>An async list of response items that each contain a <see cref="StreamingChatMessageContent"/> and an <see cref="AgentThread"/>.</returns>
269+
/// <remarks>
270+
/// To continue this thread in the future, use an <see cref="AgentThread"/> returned in one of the response items.
271+
/// </remarks>
272+
public async IAsyncEnumerable<AgentResponseItem<StreamingChatMessageContent>> InvokeStreamingAsync(
273+
ICollection<ChatMessageContent> messages,
274+
AgentThread? thread = null,
275+
AzureAIAgentInvokeOptions? options = null,
227276
[EnumeratorCancellation] CancellationToken cancellationToken = default)
228277
{
229278
Verify.NotNull(messages);
@@ -234,17 +283,11 @@ public async override IAsyncEnumerable<AgentResponseItem<StreamingChatMessageCon
234283
() => new AzureAIAgentThread(this.Client),
235284
cancellationToken).ConfigureAwait(false);
236285

237-
// Create options that include the additional instructions.
238-
var internalOptions = string.IsNullOrWhiteSpace(options?.AdditionalInstructions) ? null : new AzureAIInvocationOptions()
239-
{
240-
AdditionalInstructions = options?.AdditionalInstructions,
241-
};
242-
243286
// Invoke the Agent with the thread that we already added our message to.
244287
var newMessagesReceiver = new ChatHistory();
245288
var invokeResults = this.InvokeStreamingAsync(
246289
azureAIAgentThread.Id!,
247-
internalOptions,
290+
options?.ToAzureAIInvocationOptions(),
248291
this.MergeArguments(options?.KernelArguments),
249292
options?.Kernel ?? this.Kernel,
250293
newMessagesReceiver,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
// Copyright (c) Microsoft. All rights reserved.
2+
3+
using System.Collections.Generic;
4+
using System.Text.Json.Serialization;
5+
6+
namespace Microsoft.SemanticKernel.Agents.AzureAI;
7+
8+
/// <summary>
9+
/// Optional parameters for <see cref="AzureAIAgent"/> invocation.
10+
/// </summary>
11+
public sealed class AzureAIAgentInvokeOptions : AgentInvokeOptions
12+
{
13+
/// <summary>
14+
/// Initializes a new instance of the <see cref="AzureAIAgentInvokeOptions"/> class.
15+
/// </summary>
16+
public AzureAIAgentInvokeOptions()
17+
{
18+
}
19+
20+
/// <summary>
21+
/// Initializes a new instance of the <see cref="AzureAIAgentInvokeOptions"/> class by cloning the provided options.
22+
/// </summary>
23+
/// <param name="options">The options to clone.</param>
24+
public AzureAIAgentInvokeOptions(AgentInvokeOptions options)
25+
: base(options)
26+
{
27+
Verify.NotNull(options);
28+
}
29+
30+
/// <summary>
31+
/// Initializes a new instance of the <see cref="AzureAIAgentInvokeOptions"/> class by cloning the provided options.
32+
/// </summary>
33+
/// <param name="options">The options to clone.</param>
34+
public AzureAIAgentInvokeOptions(AzureAIAgentInvokeOptions options)
35+
: base(options)
36+
{
37+
Verify.NotNull(options);
38+
39+
this.ModelName = options.ModelName;
40+
this.OverrideInstructions = options.OverrideInstructions;
41+
this.AdditionalMessages = options.AdditionalMessages;
42+
this.EnableCodeInterpreter = options.EnableCodeInterpreter;
43+
this.EnableFileSearch = options.EnableFileSearch;
44+
this.EnableJsonResponse = options.EnableJsonResponse;
45+
this.MaxCompletionTokens = options.MaxCompletionTokens;
46+
this.MaxPromptTokens = options.MaxPromptTokens;
47+
this.ParallelToolCallsEnabled = options.ParallelToolCallsEnabled;
48+
this.TruncationMessageCount = options.TruncationMessageCount;
49+
this.Temperature = options.Temperature;
50+
this.TopP = options.TopP;
51+
this.Metadata = options.Metadata;
52+
}
53+
54+
/// <summary>
55+
/// Gets or sets the AI model targeted by the agent.
56+
/// </summary>
57+
public string? ModelName { get; init; }
58+
59+
/// <summary>
60+
/// Gets or sets the override instructions.
61+
/// </summary>
62+
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
63+
public string? OverrideInstructions { get; init; }
64+
65+
/// <summary>
66+
/// Gets or sets the additional messages to add to the thread.
67+
/// </summary>
68+
/// <remarks>
69+
/// Only supports messages with <see href="https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages">role = User or Assistant</see>.
70+
/// </remarks>
71+
public IReadOnlyList<ChatMessageContent>? AdditionalMessages { get; init; }
72+
73+
/// <summary>
74+
/// Gets or sets a value that indicates whether the code_interpreter tool is enabled.
75+
/// </summary>
76+
public bool EnableCodeInterpreter { get; init; }
77+
78+
/// <summary>
79+
/// Gets or sets a value that indicates whether the file_search tool is enabled.
80+
/// </summary>
81+
public bool EnableFileSearch { get; init; }
82+
83+
/// <summary>
84+
/// Gets or sets a value that indicates whether the JSON response format is enabled.
85+
/// </summary>
86+
public bool? EnableJsonResponse { get; init; }
87+
88+
/// <summary>
89+
/// Gets or sets the maximum number of completion tokens that can be used over the course of the run.
90+
/// </summary>
91+
public int? MaxCompletionTokens { get; init; }
92+
93+
/// <summary>
94+
/// Gets or sets the maximum number of prompt tokens that can be used over the course of the run.
95+
/// </summary>
96+
public int? MaxPromptTokens { get; init; }
97+
98+
/// <summary>
99+
/// Gets or sets a value that indicates whether the parallel function calling is enabled during tool use.
100+
/// </summary>
101+
/// <value>
102+
/// <see langword="true"/> if parallel function calling is enabled during tool use; otherwise, <see langword="false"/>. The default is <see langword="true"/>.
103+
/// </value>
104+
public bool? ParallelToolCallsEnabled { get; init; }
105+
106+
/// <summary>
107+
/// Gets or sets the number of recent messages that the thread will be truncated to.
108+
/// </summary>
109+
public int? TruncationMessageCount { get; init; }
110+
111+
/// <summary>
112+
/// Gets or sets the sampling temperature to use, between 0 and 2.
113+
/// </summary>
114+
public float? Temperature { get; init; }
115+
116+
/// <summary>
117+
/// Gets or sets the probability mass of tokens whose results are considered in nucleus sampling.
118+
/// </summary>
119+
/// <remarks>
120+
/// It's recommended to set this property or <see cref="Temperature"/>, but not both.
121+
///
122+
/// Nucleus sampling is an alternative to sampling with temperature where the model
123+
/// considers the results of the tokens with <see cref="TopP"/> probability mass.
124+
/// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
125+
/// </remarks>
126+
public float? TopP { get; init; }
127+
128+
/// <summary>
129+
/// Gets or sets a set of up to 16 key/value pairs that can be attached to an agent, used for
130+
/// storing additional information about that object in a structured format.
131+
/// </summary>
132+
/// <remarks>
133+
/// Keys can be up to 64 characters in length, and values can be up to 512 characters in length.
134+
/// </remarks>
135+
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
136+
137+
/// <summary>
138+
/// Converts the current options to an <see cref="AzureAIInvocationOptions"/> instance.
139+
/// </summary>
140+
/// <returns>The converted <see cref="AzureAIInvocationOptions"/> instance.</returns>
141+
internal AzureAIInvocationOptions ToAzureAIInvocationOptions()
142+
{
143+
return new AzureAIInvocationOptions
144+
{
145+
ModelName = this.ModelName,
146+
OverrideInstructions = this.OverrideInstructions,
147+
AdditionalInstructions = this.AdditionalInstructions,
148+
AdditionalMessages = this.AdditionalMessages,
149+
EnableCodeInterpreter = this.EnableCodeInterpreter,
150+
EnableFileSearch = this.EnableFileSearch,
151+
EnableJsonResponse = this.EnableJsonResponse,
152+
MaxCompletionTokens = this.MaxCompletionTokens,
153+
MaxPromptTokens = this.MaxPromptTokens,
154+
ParallelToolCallsEnabled = this.ParallelToolCallsEnabled,
155+
TruncationMessageCount = this.TruncationMessageCount,
156+
Temperature = this.Temperature,
157+
TopP = this.TopP,
158+
Metadata = this.Metadata
159+
};
160+
}
161+
}

dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs

+6
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,12 @@ public sealed class AzureAIInvocationOptions
1818
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
1919
public string? ModelName { get; init; }
2020

21+
/// <summary>
22+
/// Gets the override instructions.
23+
/// </summary>
24+
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
25+
public string? OverrideInstructions { get; init; }
26+
2127
/// <summary>
2228
/// Gets the additional instructions.
2329
/// </summary>

dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs

+2-2
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ await client.CreateRunAsync(
5151
threadId,
5252
agent.Definition.Id,
5353
overrideModelName: invocationOptions?.ModelName,
54-
instructions,
54+
overrideInstructions: invocationOptions?.OverrideInstructions ?? instructions,
5555
additionalInstructions: invocationOptions?.AdditionalInstructions,
5656
additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
5757
overrideTools: tools,
@@ -102,7 +102,7 @@ public static IAsyncEnumerable<StreamingUpdate> CreateStreamingAsync(
102102
threadId,
103103
agent.Definition.Id,
104104
overrideModelName: invocationOptions?.ModelName,
105-
instructions,
105+
overrideInstructions: invocationOptions?.OverrideInstructions ?? instructions,
106106
additionalInstructions: invocationOptions?.AdditionalInstructions,
107107
additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(),
108108
overrideTools: tools,

dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,13 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
1010
/// </summary>
1111
internal static class AssistantRunOptionsFactory
1212
{
13-
public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? overrideInstructions, RunCreationOptions? invocationOptions)
13+
public static RunCreationOptions GenerateOptions(RunCreationOptions? defaultOptions, string? agentInstructions, RunCreationOptions? invocationOptions)
1414
{
1515
RunCreationOptions runOptions =
1616
new()
1717
{
1818
AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? defaultOptions?.AdditionalInstructions,
19-
InstructionsOverride = overrideInstructions,
19+
InstructionsOverride = invocationOptions?.InstructionsOverride ?? agentInstructions,
2020
MaxOutputTokenCount = invocationOptions?.MaxOutputTokenCount ?? defaultOptions?.MaxOutputTokenCount,
2121
MaxInputTokenCount = invocationOptions?.MaxInputTokenCount ?? defaultOptions?.MaxInputTokenCount,
2222
ModelOverride = invocationOptions?.ModelOverride ?? defaultOptions?.ModelOverride,

0 commit comments

Comments
 (0)