forked from modelcontextprotocol/csharp-sdk
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSampleLlmTool.cs
36 lines (31 loc) · 1.12 KB
/
SampleLlmTool.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
using Microsoft.Extensions.AI;
using ModelContextProtocol.Server;
using System.ComponentModel;
namespace TestServerWithHosting.Tools;
/// <summary>
/// This tool uses dependency injection and async method
/// </summary>
[McpServerToolType]
public static class SampleLlmTool
{
[McpServerTool("sampleLLM"), Description("Samples from an LLM using MCP's sampling feature")]
public static async Task<string> SampleLLM(
IMcpServer thisServer,
[Description("The prompt to send to the LLM")] string prompt,
[Description("Maximum number of tokens to generate")] int maxTokens,
CancellationToken cancellationToken)
{
ChatMessage[] messages =
[
new(ChatRole.System, "You are a helpful test server."),
new(ChatRole.User, prompt),
];
ChatOptions options = new()
{
MaxOutputTokens = maxTokens,
Temperature = 0.7f,
};
var samplingResponse = await thisServer.AsSamplingChatClient().GetResponseAsync(messages, options, cancellationToken);
return $"LLM sampling result: {samplingResponse}";
}
}