Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,9 @@ Once the user has deduced what type (knight or knave) both Alice and Bob are, te
app.MapOpenAIResponses("pirate");
app.MapOpenAIResponses("knights-and-knaves");

app.MapOpenAIChatCompletions("pirate");
app.MapOpenAIChatCompletions("knights-and-knaves");

// Map the agents HTTP endpoints
app.MapAgentDiscovery("/agents");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

namespace AgentWebChat.Web;

internal sealed class A2AAgentClient : IAgentClient
internal sealed class A2AAgentClient : AgentClientBase
{
private readonly ILogger _logger;
private readonly Uri _uri;
Expand All @@ -25,7 +25,7 @@ public A2AAgentClient(ILogger logger, Uri baseUri)
this._uri = baseUri;
}

public async IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
public async override IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
string agentName,
IList<ChatMessage> messages,
string? threadId = null,
Expand Down Expand Up @@ -126,7 +126,7 @@ public async IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
}
}

public async Task<AgentCard?> GetAgentCardAsync(string agentName, CancellationToken cancellationToken = default)
public async override Task<AgentCard?> GetAgentCardAsync(string agentName, CancellationToken cancellationToken = default)
{
this._logger.LogInformation("Retrieving agent card for {Agent}", agentName);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<NoWarn>$(NoWarn);CA1812</NoWarn>
</PropertyGroup>

<ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
@inject ILogger<Home> Logger
@inject A2AAgentClient A2AActorClient
@inject OpenAIResponsesAgentClient OpenAIResponsesAgentClient
@inject OpenAIChatCompletionsAgentClient OpenAIChatCompletionsAgentClient
@rendermode InteractiveServer
@using System.Text
@using System.Text.Json
Expand Down Expand Up @@ -52,14 +53,18 @@
<label for="protocol-select" class="protocol-select-label">Choose communication protocol:</label>
<div class="protocol-select-wrapper">
<select id="protocol-select" class="protocol-select" @bind="selectedProtocol" disabled="@(isStreaming)">
<option value="OpenAIResponses">OpenAI Responses</option>
<option value="A2A">A2A (Agent-to-Agent)</option>
<option value="OpenAIResponses">OpenAI Responses</option>
<option value="OpenAIChatCompletions">OpenAI ChatCompletions</option>
<option value="A2A">A2A (Agent-to-Agent)</option>
</select>
<div class="protocol-info">
@switch (selectedProtocol)
{
case Protocol.OpenAIResponses:
<span class="protocol-description">֎ OpenAI Responses</span>
break;
case Protocol.OpenAIChatCompletions:
<span class="protocol-description">֎ OpenAI ChatCompletions</span>
break;
case Protocol.A2A:
default:
Expand Down Expand Up @@ -903,7 +908,8 @@
private enum Protocol
{
A2A, // Agent-to-Agent protocol
OpenAIResponses
OpenAIResponses,
OpenAIChatCompletions
}

private sealed class Conversation
Expand Down Expand Up @@ -1080,11 +1086,11 @@

try
{

// Select the appropriate client based on protocol
IAgentClient agentClient = selectedProtocol switch
AgentClientBase agentClient = selectedProtocol switch
{
Protocol.OpenAIResponses => OpenAIResponsesAgentClient,
Protocol.OpenAIChatCompletions => OpenAIChatCompletionsAgentClient,
Protocol.A2A or _ => A2AActorClient
};

Expand Down
7 changes: 4 additions & 3 deletions dotnet/samples/AgentWebChat/AgentWebChat.Web/IAgentClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ namespace AgentWebChat.Web;
/// <summary>
/// Interface for clients that can interact with agents and provide streaming responses.
/// </summary>
public interface IAgentClient
internal abstract class AgentClientBase
{
/// <summary>
/// Runs an agent with the specified messages and returns a streaming response.
Expand All @@ -19,7 +19,7 @@ public interface IAgentClient
/// <param name="threadId">Optional thread identifier for conversation continuity.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>An asynchronous enumerable of agent response updates.</returns>
IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
public abstract IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
string agentName,
IList<ChatMessage> messages,
string? threadId = null,
Expand All @@ -31,7 +31,8 @@ IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
/// <param name="agentName">The name of the agent.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The agent card if supported, null otherwise.</returns>
Task<AgentCard?> GetAgentCardAsync(string agentName, CancellationToken cancellationToken = default);
public virtual Task<AgentCard?> GetAgentCardAsync(string agentName, CancellationToken cancellationToken = default)
=> Task.FromResult<AgentCard?>(null);
}

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
// Copyright (c) Microsoft. All rights reserved.

using System.ClientModel;
using System.ClientModel.Primitives;
using System.Runtime.CompilerServices;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using OpenAI;
using OpenAI.Chat;
using ChatMessage = Microsoft.Extensions.AI.ChatMessage;

namespace AgentWebChat.Web;

/// <summary>
/// Is a simple frontend client which exercises the ability of exposed agent to communicate via OpenAI ChatCompletions protocol.
/// </summary>
internal sealed class OpenAIChatCompletionsAgentClient(HttpClient httpClient) : AgentClientBase
{
public async override IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
string agentName,
IList<ChatMessage> messages,
string? threadId = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
OpenAIClientOptions options = new()
{
Endpoint = new Uri(httpClient.BaseAddress!, $"/{agentName}/v1/"),
Transport = new HttpClientPipelineTransport(httpClient)
};

var openAiClient = new ChatClient(model: "myModel!", credential: new ApiKeyCredential("dummy-key"), options: options).AsIChatClient();
await foreach (var update in openAiClient.GetStreamingResponseAsync(messages, cancellationToken: cancellationToken))
{
yield return new AgentRunResponseUpdate(update);
}
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.

using System.ClientModel;
using System.ClientModel.Primitives;
using System.Runtime.CompilerServices;
using A2A;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using OpenAI;
Expand All @@ -13,24 +13,18 @@ namespace AgentWebChat.Web;
/// <summary>
/// Is a simple frontend client which exercises the ability of exposed agent to communicate via OpenAI Responses protocol.
/// </summary>
internal sealed class OpenAIResponsesAgentClient : IAgentClient
internal sealed class OpenAIResponsesAgentClient(HttpClient httpClient) : AgentClientBase
{
private readonly Uri _baseUri;

public OpenAIResponsesAgentClient(string baseUri)
{
this._baseUri = new Uri(baseUri.TrimEnd('/'));
}

public async IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
public async override IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
string agentName,
IList<ChatMessage> messages,
string? threadId = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
OpenAIClientOptions options = new()
{
Endpoint = new Uri(this._baseUri, $"/{agentName}/v1/")
Endpoint = new Uri(httpClient.BaseAddress!, $"/{agentName}/v1/"),
Transport = new HttpClientPipelineTransport(httpClient)
};

var openAiClient = new OpenAIResponseClient(model: "myModel!", credential: new ApiKeyCredential("dummy-key"), options: options).AsIChatClient();
Expand All @@ -44,7 +38,4 @@ public async IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync(
yield return new AgentRunResponseUpdate(update);
}
}

public Task<AgentCard?> GetAgentCardAsync(string agentName, CancellationToken cancellationToken = default)
=> Task.FromResult<AgentCard?>(null!);
}
4 changes: 3 additions & 1 deletion dotnet/samples/AgentWebChat/AgentWebChat.Web/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@

builder.Services.AddHttpClient<AgentDiscoveryClient>(client => client.BaseAddress = baseAddress);
builder.Services.AddSingleton(sp => new A2AAgentClient(sp.GetRequiredService<ILogger<A2AAgentClient>>(), a2aAddress));
builder.Services.AddSingleton(sp => new OpenAIResponsesAgentClient("http://localhost:5390"));

builder.Services.AddHttpClient<OpenAIResponsesAgentClient>(client => client.BaseAddress = baseAddress);
builder.Services.AddHttpClient<OpenAIChatCompletionsAgentClient>(client => client.BaseAddress = baseAddress);

var app = builder.Build();

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Buffers;
using System.ClientModel.Primitives;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net.ServerSentEvents;
using System.Runtime.CompilerServices;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Agents.AI.Hosting.OpenAI.ChatCompletions.Utils;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Features;
using OpenAI.Chat;
using ChatMessage = Microsoft.Extensions.AI.ChatMessage;

namespace Microsoft.Agents.AI.Hosting.OpenAI.ChatCompletions;

internal sealed class AIAgentChatCompletionsProcessor
{
private readonly AIAgent _agent;

public AIAgentChatCompletionsProcessor(AIAgent agent)
{
this._agent = agent;
}

public async Task<IResult> CreateChatCompletionAsync(ChatCompletionOptions chatCompletionOptions, CancellationToken cancellationToken)
{
AgentThread? agentThread = null; // not supported to resolve from conversationId

var inputItems = chatCompletionOptions.GetMessages();
var chatMessages = inputItems.AsChatMessages();

if (chatCompletionOptions.GetStream())
{
return new OpenAIStreamingChatCompletionResult(this._agent, chatMessages);
}

var agentResponse = await this._agent.RunAsync(chatMessages, agentThread, cancellationToken: cancellationToken).ConfigureAwait(false);
return new OpenAIChatCompletionResult(agentResponse);
}

private sealed class OpenAIChatCompletionResult(AgentRunResponse agentRunResponse) : IResult
{
public async Task ExecuteAsync(HttpContext httpContext)
{
// note: OpenAI SDK types provide their own serialization implementation
// so we cant simply return IResult wrap for the typed-object.
// instead writing to the response body can be done.

var cancellationToken = httpContext.RequestAborted;
var response = httpContext.Response;

var chatResponse = agentRunResponse.AsChatResponse();
var openAIChatCompletion = chatResponse.AsOpenAIChatCompletion();
var openAIChatCompletionJsonModel = openAIChatCompletion as IJsonModel<ChatCompletion>;
Debug.Assert(openAIChatCompletionJsonModel is not null);

var writer = new Utf8JsonWriter(response.BodyWriter, new JsonWriterOptions { SkipValidation = false });
openAIChatCompletionJsonModel.Write(writer, ModelReaderWriterOptions.Json);
await writer.FlushAsync(cancellationToken).ConfigureAwait(false);
}
}

private sealed class OpenAIStreamingChatCompletionResult(AIAgent agent, IEnumerable<ChatMessage> chatMessages) : IResult
{
public Task ExecuteAsync(HttpContext httpContext)
{
var cancellationToken = httpContext.RequestAborted;
var response = httpContext.Response;

// Set SSE headers
response.Headers.ContentType = "text/event-stream";
response.Headers.CacheControl = "no-cache,no-store";
response.Headers.Connection = "keep-alive";
response.Headers.ContentEncoding = "identity";
httpContext.Features.GetRequiredFeature<IHttpResponseBodyFeature>().DisableBuffering();

return SseFormatter.WriteAsync(
source: this.GetStreamingResponsesAsync(cancellationToken),
destination: response.Body,
itemFormatter: (sseItem, bufferWriter) =>
{
var sseDataJsonModel = (IJsonModel<StreamingChatCompletionUpdate>)sseItem.Data;
var json = sseDataJsonModel.Write(ModelReaderWriterOptions.Json);
bufferWriter.Write(json);
},
cancellationToken);
}

private async IAsyncEnumerable<SseItem<StreamingChatCompletionUpdate>> GetStreamingResponsesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default)
{
AgentThread? agentThread = null;

var agentRunResponseUpdates = agent.RunStreamingAsync(chatMessages, thread: agentThread, cancellationToken: cancellationToken);
var chatResponseUpdates = agentRunResponseUpdates.AsChatResponseUpdatesAsync();
await foreach (var streamingChatCompletionUpdate in chatResponseUpdates.AsOpenAIStreamingChatCompletionUpdatesAsync(cancellationToken).ConfigureAwait(false))
{
yield return new SseItem<StreamingChatCompletionUpdate>(streamingChatCompletionUpdate);
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using System.Reflection;
using Microsoft.Shared.Diagnostics;
using OpenAI.Chat;

namespace Microsoft.Agents.AI.Hosting.OpenAI.ChatCompletions.Utils;

[System.Diagnostics.CodeAnalysis.SuppressMessage("Performance", "CA1810:Initialize reference type static fields inline", Justification = "Specifically for accessing hidden members")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1065:Do not raise exceptions in unexpected locations", Justification = "Specifically for accessing hidden members")]
internal static class ChatCompletionsOptionsExtensions
{
private static readonly Func<ChatCompletionOptions, bool?> _getStreamNullable;
private static readonly Func<ChatCompletionOptions, IList<ChatMessage>> _getMessages;

static ChatCompletionsOptionsExtensions()
{
// OpenAI SDK does not have a simple way to get the input as a c# object.
// However, it does parse most of the interesting fields into internal properties of `ChatCompletionsOptions` object.

// --- Stream (internal bool? Stream { get; set; }) ---
const string streamPropName = "Stream";
var streamProp = typeof(ChatCompletionOptions).GetProperty(streamPropName, BindingFlags.Instance | BindingFlags.NonPublic)
?? throw new MissingMemberException(typeof(ChatCompletionOptions).FullName!, streamPropName);
var streamGetter = streamProp.GetGetMethod(nonPublic: true) ?? throw new MissingMethodException($"{streamPropName} getter not found.");

_getStreamNullable = streamGetter.CreateDelegate<Func<ChatCompletionOptions, bool?>>();

// --- Messages (internal IList<OpenAI.Chat.ChatMessage> Messages { get; set; }) ---
const string inputPropName = "Messages";
var inputProp = typeof(ChatCompletionOptions).GetProperty(inputPropName, BindingFlags.Instance | BindingFlags.NonPublic)
?? throw new MissingMemberException(typeof(ChatCompletionOptions).FullName!, inputPropName);
var inputGetter = inputProp.GetGetMethod(nonPublic: true)
?? throw new MissingMethodException($"{inputPropName} getter not found.");

_getMessages = inputGetter.CreateDelegate<Func<ChatCompletionOptions, IList<ChatMessage>>>();
}

public static IList<ChatMessage> GetMessages(this ChatCompletionOptions options)
{
Throw.IfNull(options);
return _getMessages(options);
}

public static bool GetStream(this ChatCompletionOptions options)
{
Throw.IfNull(options);
return _getStreamNullable(options) ?? false;
}
}
Loading
Loading