forked from modelcontextprotocol/csharp-sdk
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathProgram.cs
83 lines (71 loc) · 2.58 KB
/
Program.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
using ModelContextProtocol.Client;
using ModelContextProtocol.Protocol.Transport;
using Microsoft.Extensions.AI;
using OpenAI;
using OpenTelemetry;
using OpenTelemetry.Trace;
using Microsoft.Extensions.Logging;
using OpenTelemetry.Logs;
using OpenTelemetry.Metrics;
using var tracerProvider = Sdk.CreateTracerProviderBuilder()
.AddHttpClientInstrumentation()
.AddSource("*")
.AddOtlpExporter()
.Build();
using var metricsProvider = Sdk.CreateMeterProviderBuilder()
.AddHttpClientInstrumentation()
.AddMeter("*")
.AddOtlpExporter()
.Build();
using var loggerFactory = LoggerFactory.Create(builder => builder.AddOpenTelemetry(opt => opt.AddOtlpExporter()));
// Connect to an MCP server
Console.WriteLine("Connecting client to MCP 'everything' server");
// Create OpenAI client (or any other compatible with IChatClient)
// Provide your own OPENAI_API_KEY via an environment variable.
var openAIClient = new OpenAIClient(Environment.GetEnvironmentVariable("OPENAI_API_KEY")).GetChatClient("gpt-4o-mini");
// Create a sampling client.
using IChatClient samplingClient = openAIClient.AsIChatClient()
.AsBuilder()
.UseOpenTelemetry(loggerFactory: loggerFactory, configure: o => o.EnableSensitiveData = true)
.Build();
var mcpClient = await McpClientFactory.CreateAsync(
new StdioClientTransport(new()
{
Command = "npx",
Arguments = ["-y", "--verbose", "@modelcontextprotocol/server-everything"],
Name = "Everything",
}),
clientOptions: new()
{
Capabilities = new() { Sampling = new() { SamplingHandler = samplingClient.CreateSamplingHandler() } },
},
loggerFactory: loggerFactory);
// Get all available tools
Console.WriteLine("Tools available:");
var tools = await mcpClient.ListToolsAsync();
foreach (var tool in tools)
{
Console.WriteLine($" {tool}");
}
Console.WriteLine();
// Create an IChatClient that can use the tools.
using IChatClient chatClient = openAIClient.AsIChatClient()
.AsBuilder()
.UseFunctionInvocation()
.UseOpenTelemetry(loggerFactory: loggerFactory, configure: o => o.EnableSensitiveData = true)
.Build();
// Have a conversation, making all tools available to the LLM.
List<ChatMessage> messages = [];
while (true)
{
Console.Write("Q: ");
messages.Add(new(ChatRole.User, Console.ReadLine()));
List<ChatResponseUpdate> updates = [];
await foreach (var update in chatClient.GetStreamingResponseAsync(messages, new() { Tools = [.. tools] }))
{
Console.Write(update);
updates.Add(update);
}
Console.WriteLine();
messages.AddMessages(updates);
}