Skip to content

Commit 9330774

Browse files
complete fix for SampleLlmTool discovery (#102)
#80 Co-authored-by: Stephen Toub <[email protected]>
1 parent 259f11a commit 9330774

File tree

1 file changed

+4
-10
lines changed

1 file changed

+4
-10
lines changed

samples/TestServerWithHosting/Tools/SampleLlmTool.cs

+4-10
Original file line numberDiff line numberDiff line change
@@ -8,23 +8,17 @@ namespace TestServerWithHosting.Tools;
88
/// This tool uses depenency injection and async method
99
/// </summary>
1010
[McpServerToolType]
11-
public class SampleLlmTool
11+
public static class SampleLlmTool
1212
{
13-
private readonly IMcpServer _server;
14-
15-
public SampleLlmTool(IMcpServer server)
16-
{
17-
_server = server ?? throw new ArgumentNullException(nameof(server));
18-
}
19-
2013
[McpServerTool("sampleLLM"), Description("Samples from an LLM using MCP's sampling feature")]
21-
public async Task<string> SampleLLM(
14+
public static async Task<string> SampleLLM(
15+
IMcpServer thisServer,
2216
[Description("The prompt to send to the LLM")] string prompt,
2317
[Description("Maximum number of tokens to generate")] int maxTokens,
2418
CancellationToken cancellationToken)
2519
{
2620
var samplingParams = CreateRequestSamplingParams(prompt ?? string.Empty, "sampleLLM", maxTokens);
27-
var sampleResult = await _server.RequestSamplingAsync(samplingParams, cancellationToken);
21+
var sampleResult = await thisServer.RequestSamplingAsync(samplingParams, cancellationToken);
2822

2923
return $"LLM sampling result: {sampleResult.Content.Text}";
3024
}

0 commit comments

Comments
 (0)