Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Overhaul tool handling #89

Merged
merged 5 commits into from
Mar 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 46 additions & 15 deletions README.MD
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ await foreach (var tool in client.ListToolsAsync())
// Execute a tool (this would normally be driven by LLM tool invocations).
var result = await client.CallToolAsync(
"echo",
new() { ["message"] = "Hello MCP!" },
new Dictionary<string, object?>() { ["message"] = "Hello MCP!" },
CancellationToken.None);

// echo always returns one and only one text content object
Expand All @@ -59,16 +59,13 @@ Tools can be exposed easily as `AIFunction` instances so that they are immediate

```csharp
// Get available functions.
IList<AIFunction> tools = await client.GetAIFunctionsAsync();
IList<McpClientTool> tools = await client.ListToolsAsync();

// Call the chat client using the tools.
IChatClient chatClient = ...;
var response = await chatClient.GetResponseAsync(
"your prompt here",
new()
{
Tools = [.. tools],
});
new() { Tools = [.. tools] },
```

## Getting Started (Server)
Expand All @@ -88,17 +85,47 @@ var builder = Host.CreateEmptyApplicationBuilder(settings: null);
builder.Services
.AddMcpServer()
.WithStdioServerTransport()
.WithTools();
.WithToolsFromAssembly();
await builder.Build().RunAsync();

[McpToolType]
[McpServerToolType]
public static class EchoTool
{
[McpTool, Description("Echoes the message back to the client.")]
[McpServerTool, Description("Echoes the message back to the client.")]
public static string Echo(string message) => $"hello {message}";
}
```

Tools can have the `IMcpServer` representing the server injected via a parameter to the method, and can use that for interaction with
the connected client. Similarly, arguments may be injected via dependency injection. For example, this tool will use the supplied
`IMcpServer` to make sampling requests back to the client in order to summarize content it downloads from the specified url via
an `HttpClient` injected via dependency injection.
```csharp
[McpServerTool("SummarizeContentFromUrl"), Description("Summarizes content downloaded from a specific URI")]
public static async Task<string> SummarizeDownloadedContent(
IMcpServer thisServer,
HttpClient httpClient,
[Description("The url from which to download the content to summarize")] string url,
CancellationToken cancellationToken)
{
string content = await httpClient.GetStringAsync(url);

ChatMessage[] messages =
[
new(ChatRole.User, "Briefly summarize the following downloaded content:"),
new(ChatRole.User, content),
]

ChatOptions options = new()
{
MaxOutputTokens = 256,
Temperature = 0.3f,
};

return $"Summary: {await thisServer.AsSamplingChatClient().GetResponseAsync(messages, options, cancellationToken)}";
}
```

More control is also available, with fine-grained control over configuring the server and how it should handle client requests. For example:

```csharp
Expand All @@ -124,14 +151,18 @@ McpServerOptions options = new()
{
Name = "echo",
Description = "Echoes the input back to the client.",
InputSchema = new JsonSchema()
{
Type = "object",
Properties = new Dictionary<string, JsonSchemaProperty>()
InputSchema = JsonSerializer.Deserialize<JsonElement>("""
{
["message"] = new JsonSchemaProperty() { Type = "string", Description = "The input to echo back." }
"type": "object",
"properties": {
"message": {
"type": "string",
"description": "The input to echo back"
}
},
"required": ["message"]
}
},
"""),
}
]
};
Expand Down
2 changes: 1 addition & 1 deletion samples/AspNetCoreSseServer/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
using AspNetCoreSseServer;

var builder = WebApplication.CreateBuilder(args);
builder.Services.AddMcpServer().WithTools();
builder.Services.AddMcpServer().WithToolsFromAssembly();
var app = builder.Build();

app.MapGet("/", () => "Hello World!");
Expand Down
4 changes: 2 additions & 2 deletions samples/AspNetCoreSseServer/Tools/EchoTool.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@

namespace TestServerWithHosting.Tools;

[McpToolType]
[McpServerToolType]
public static class EchoTool
{
[McpTool, Description("Echoes the input back to the client.")]
[McpServerTool, Description("Echoes the input back to the client.")]
public static string Echo(string message)
{
return "hello " + message;
Expand Down
51 changes: 18 additions & 33 deletions samples/AspNetCoreSseServer/Tools/SampleLlmTool.cs
Original file line number Diff line number Diff line change
@@ -1,51 +1,36 @@
using ModelContextProtocol.Protocol.Types;
using Microsoft.Extensions.AI;
using ModelContextProtocol.Server;
using System.ComponentModel;

namespace TestServerWithHosting.Tools;

/// <summary>
/// This tool uses depenency injection and async method
/// This tool uses dependency injection and async method
/// </summary>
[McpToolType]
public class SampleLlmTool
[McpServerToolType]
public static class SampleLlmTool
{
private readonly IMcpServer _server;

public SampleLlmTool(IMcpServer server)
{
_server = server ?? throw new ArgumentNullException(nameof(server));
}

[McpTool("sampleLLM"), Description("Samples from an LLM using MCP's sampling feature")]
public async Task<string> SampleLLM(
[McpServerTool("sampleLLM"), Description("Samples from an LLM using MCP's sampling feature")]
public static async Task<string> SampleLLM(
IMcpServer thisServer,
[Description("The prompt to send to the LLM")] string prompt,
[Description("Maximum number of tokens to generate")] int maxTokens,
CancellationToken cancellationToken)
{
var samplingParams = CreateRequestSamplingParams(prompt ?? string.Empty, "sampleLLM", maxTokens);
var sampleResult = await _server.RequestSamplingAsync(samplingParams, cancellationToken);
ChatMessage[] messages =
[
new(ChatRole.System, "You are a helpful test server."),
new(ChatRole.User, prompt),
];

return $"LLM sampling result: {sampleResult.Content.Text}";
}

private static CreateMessageRequestParams CreateRequestSamplingParams(string context, string uri, int maxTokens = 100)
{
return new CreateMessageRequestParams()
ChatOptions options = new()
{
Messages = [new SamplingMessage()
{
Role = Role.User,
Content = new Content()
{
Type = "text",
Text = $"Resource {uri} context: {context}"
}
}],
SystemPrompt = "You are a helpful test server.",
MaxTokens = maxTokens,
MaxOutputTokens = maxTokens,
Temperature = 0.7f,
IncludeContext = ContextInclusion.ThisServer
};

var samplingResponse = await thisServer.AsSamplingChatClient().GetResponseAsync(messages, options, cancellationToken);

return $"LLM sampling result: {samplingResponse}";
}
}
1 change: 0 additions & 1 deletion samples/ChatWithTools/ChatWithTools.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
<PackageReference Include="Microsoft.Extensions.AI" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" />
<PackageReference Include="Anthropic.SDK" />
<PackageReference Include="System.Linq.AsyncEnumerable" />
</ItemGroup>

<ItemGroup>
Expand Down
2 changes: 1 addition & 1 deletion samples/ChatWithTools/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

// Get all available tools
Console.WriteLine("Tools available:");
var tools = await mcpClient.GetAIFunctionsAsync();
var tools = await mcpClient.ListToolsAsync();
foreach (var tool in tools)
{
Console.WriteLine($" {tool}");
Expand Down
2 changes: 1 addition & 1 deletion samples/TestServerWithHosting/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
builder.Services.AddSerilog();
builder.Services.AddMcpServer()
.WithStdioServerTransport()
.WithTools();
.WithToolsFromAssembly();

var app = builder.Build();

Expand Down
4 changes: 2 additions & 2 deletions samples/TestServerWithHosting/Tools/EchoTool.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@

namespace TestServerWithHosting.Tools;

[McpToolType]
[McpServerToolType]
public static class EchoTool
{
[McpTool, Description("Echoes the input back to the client.")]
[McpServerTool, Description("Echoes the input back to the client.")]
public static string Echo(string message)
{
return "hello " + message;
Expand Down
4 changes: 2 additions & 2 deletions samples/TestServerWithHosting/Tools/SampleLlmTool.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ namespace TestServerWithHosting.Tools;
/// <summary>
/// This tool uses depenency injection and async method
/// </summary>
[McpToolType]
[McpServerToolType]
public class SampleLlmTool
{
private readonly IMcpServer _server;
Expand All @@ -17,7 +17,7 @@ public SampleLlmTool(IMcpServer server)
_server = server ?? throw new ArgumentNullException(nameof(server));
}

[McpTool("sampleLLM"), Description("Samples from an LLM using MCP's sampling feature")]
[McpServerTool("sampleLLM"), Description("Samples from an LLM using MCP's sampling feature")]
public async Task<string> SampleLLM(
[Description("The prompt to send to the LLM")] string prompt,
[Description("Maximum number of tokens to generate")] int maxTokens,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,7 @@ public static TValue GetValueOrDefault<TKey, TValue>(this IReadOnlyDictionary<TK

return dictionary.TryGetValue(key, out TValue? value) ? value : defaultValue;
}

public static Dictionary<TKey, TValue> ToDictionary<TKey, TValue>(this IEnumerable<KeyValuePair<TKey, TValue>> source) =>
source.ToDictionary(kv => kv.Key, kv => kv.Value);
}
104 changes: 104 additions & 0 deletions src/ModelContextProtocol/AIContentExtensions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
using Microsoft.Extensions.AI;
using ModelContextProtocol.Protocol.Types;
using ModelContextProtocol.Utils;
using System.Runtime.InteropServices;

namespace ModelContextProtocol;

/// <summary>Provides helpers for conversions related to <see cref="AIContent"/>.</summary>
public static class AIContentExtensions
{
/// <summary>Creates a <see cref="ChatMessage"/> from a <see cref="PromptMessage"/>.</summary>
/// <param name="promptMessage">The message to convert.</param>
/// <returns>The created <see cref="ChatMessage"/>.</returns>
public static ChatMessage ToChatMessage(this PromptMessage promptMessage)
{
Throw.IfNull(promptMessage);

return new()
{
RawRepresentation = promptMessage,
Role = promptMessage.Role == Role.User ? ChatRole.User : ChatRole.Assistant,
Contents = [ToAIContent(promptMessage.Content)]
};
}

/// <summary>Creates a new <see cref="AIContent"/> from the content of a <see cref="Content"/>.</summary>
/// <param name="content">The <see cref="Content"/> to convert.</param>
/// <returns>The created <see cref="AIContent"/>.</returns>
public static AIContent ToAIContent(this Content content)
{
Throw.IfNull(content);

AIContent ac;
if (content is { Type: "image", MimeType: not null, Data: not null })
{
ac = new DataContent(Convert.FromBase64String(content.Data), content.MimeType);
}
else if (content is { Type: "resource" } && content.Resource is { } resourceContents)
{
ac = resourceContents.Blob is not null && resourceContents.MimeType is not null ?
new DataContent(Convert.FromBase64String(resourceContents.Blob), resourceContents.MimeType) :
new TextContent(resourceContents.Text);

(ac.AdditionalProperties ??= [])["uri"] = resourceContents.Uri;
}
else
{
ac = new TextContent(content.Text);
}

ac.RawRepresentation = content;

return ac;
}

/// <summary>Creates a new <see cref="AIContent"/> from the content of a <see cref="ResourceContents"/>.</summary>
/// <param name="content">The <see cref="ResourceContents"/> to convert.</param>
/// <returns>The created <see cref="AIContent"/>.</returns>
public static AIContent ToAIContent(this ResourceContents content)
{
Throw.IfNull(content);

AIContent ac = content.Blob is not null && content.MimeType is not null ?
new DataContent(Convert.FromBase64String(content.Blob), content.MimeType) :
new TextContent(content.Text);

(ac.AdditionalProperties ??= [])["uri"] = content.Uri;
ac.RawRepresentation = content;

return ac;
}

/// <summary>Creates a list of <see cref="AIContent"/> from a sequence of <see cref="Content"/>.</summary>
/// <param name="contents">The <see cref="Content"/> instances to convert.</param>
/// <returns>The created <see cref="AIContent"/> instances.</returns>
public static IList<AIContent> ToAIContents(this IEnumerable<Content> contents)
{
Throw.IfNull(contents);

return contents.Select(ToAIContent).ToList();
}

/// <summary>Creates a list of <see cref="AIContent"/> from a sequence of <see cref="ResourceContents"/>.</summary>
/// <param name="contents">The <see cref="ResourceContents"/> instances to convert.</param>
/// <returns>The created <see cref="AIContent"/> instances.</returns>
public static IList<AIContent> ToAIContents(this IEnumerable<ResourceContents> contents)
{
Throw.IfNull(contents);

return contents.Select(ToAIContent).ToList();
}

/// <summary>Extracts the data from a <see cref="DataContent"/> as a Base64 string.</summary>
internal static string GetBase64Data(this DataContent dataContent)
{
#if NET
return Convert.ToBase64String(dataContent.Data.Span);
#else
return MemoryMarshal.TryGetArray(dataContent.Data, out ArraySegment<byte> segment) ?
Convert.ToBase64String(segment.Array!, segment.Offset, segment.Count) :
Convert.ToBase64String(dataContent.Data.ToArray());
#endif
}
}
Loading