Skip to content

Commit 4ed55e6

Browse files
committed
Rebase to latest main
1 parent 80ed2c3 commit 4ed55e6

File tree

3 files changed

+16
-16
lines changed

3 files changed

+16
-16
lines changed

samples/EverythingServer/Program.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ await ctx.Server.SampleAsync([
7575
new ChatMessage(ChatRole.System, "You are a helpful test server"),
7676
new ChatMessage(ChatRole.User, $"Resource {uri}, context: A new subscription was started"),
7777
],
78-
options: new ChatOptions
78+
chatOptions: new ChatOptions
7979
{
8080
MaxOutputTokens = 100,
8181
Temperature = 0.7f,

src/ModelContextProtocol.Core/Server/McpServer.Methods.cs

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -79,19 +79,19 @@ public ValueTask<CreateMessageResult> SampleAsync(
7979
/// Requests to sample an LLM via the client using the provided chat messages and options.
8080
/// </summary>
8181
/// <param name="messages">The messages to send as part of the request.</param>
82-
/// <param name="options">The options to use for the request, including model parameters and constraints.</param>
82+
/// <param name="chatOptions">The options to use for the request, including model parameters and constraints.</param>
8383
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
8484
/// <returns>A task containing the chat response from the model.</returns>
8585
/// <exception cref="ArgumentNullException"><paramref name="messages"/> is <see langword="null"/>.</exception>
8686
/// <exception cref="InvalidOperationException">The client does not support sampling.</exception>
8787
public async Task<ChatResponse> SampleAsync(
88-
IEnumerable<ChatMessage> messages, ChatOptions? options = default, CancellationToken cancellationToken = default)
88+
IEnumerable<ChatMessage> messages, ChatOptions? chatOptions = default, CancellationToken cancellationToken = default)
8989
{
9090
Throw.IfNull(messages);
9191

9292
StringBuilder? systemPrompt = null;
9393

94-
if (options?.Instructions is { } instructions)
94+
if (chatOptions?.Instructions is { } instructions)
9595
{
9696
(systemPrompt ??= new()).Append(instructions);
9797
}
@@ -137,15 +137,15 @@ public async Task<ChatResponse> SampleAsync(
137137
}
138138

139139
ModelPreferences? modelPreferences = null;
140-
if (options?.ModelId is { } modelId)
140+
if (chatOptions?.ModelId is { } modelId)
141141
{
142142
modelPreferences = new() { Hints = [new() { Name = modelId }] };
143143
}
144144

145145
IList<Tool>? tools = null;
146-
if (options?.Tools is { Count: > 0 })
146+
if (chatOptions?.Tools is { Count: > 0 })
147147
{
148-
foreach (var tool in options.Tools)
148+
foreach (var tool in chatOptions.Tools)
149149
{
150150
if (tool is AIFunctionDeclaration af)
151151
{
@@ -160,26 +160,26 @@ public async Task<ChatResponse> SampleAsync(
160160
}
161161
}
162162

163-
ToolChoice? toolChoice = options?.ToolMode switch
163+
ToolChoice? toolChoice = chatOptions?.ToolMode switch
164164
{
165165
NoneChatToolMode => new() { Mode = ToolChoice.ModeNone },
166166
AutoChatToolMode => new() { Mode = ToolChoice.ModeAuto },
167167
RequiredChatToolMode => new() { Mode = ToolChoice.ModeRequired },
168168
_ => null,
169169
};
170170

171-
var result = await SampleAsync(new()
171+
var result = await SampleAsync(new CreateMessageRequestParams
172172
{
173-
MaxTokens = options?.MaxOutputTokens ?? ServerOptions.MaxSamplingOutputTokens,
173+
MaxTokens = chatOptions?.MaxOutputTokens ?? ServerOptions.MaxSamplingOutputTokens,
174174
Messages = samplingMessages,
175175
ModelPreferences = modelPreferences,
176-
StopSequences = options?.StopSequences?.ToArray(),
176+
StopSequences = chatOptions?.StopSequences?.ToArray(),
177177
SystemPrompt = systemPrompt?.ToString(),
178-
Temperature = options?.Temperature,
178+
Temperature = chatOptions?.Temperature,
179179
ToolChoice = toolChoice,
180180
Tools = tools,
181-
Meta = options?.AdditionalProperties?.ToJsonObject(),
182-
}, cancellationToken).ConfigureAwait(false);
181+
Meta = chatOptions?.AdditionalProperties?.ToJsonObject(),
182+
}, null, cancellationToken).ConfigureAwait(false);
183183

184184
List<AIContent> responseContents = [];
185185
foreach (var block in result.Content)
@@ -492,7 +492,7 @@ private sealed class SamplingChatClient(McpServer server) : IChatClient
492492

493493
/// <inheritdoc/>
494494
public Task<ChatResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
495-
_server.SampleAsync(messages, options: options, cancellationToken: cancellationToken);
495+
_server.SampleAsync(messages, chatOptions: options, cancellationToken: cancellationToken);
496496

497497
/// <inheritdoc/>
498498
async IAsyncEnumerable<ChatResponseUpdate> IChatClient.GetStreamingResponseAsync(

src/ModelContextProtocol.Core/Server/McpServerOptions.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ public McpServerHandlers Handlers
158158
/// </summary>
159159
/// <remarks>
160160
/// <para>
161-
/// This value is used in <see cref="McpServer.SampleAsync(IEnumerable{Microsoft.Extensions.AI.ChatMessage}, Microsoft.Extensions.AI.ChatOptions?, RequestOptions?, CancellationToken)"/>
161+
/// This value is used in <see cref="McpServer.SampleAsync(IEnumerable{Microsoft.Extensions.AI.ChatMessage}, Microsoft.Extensions.AI.ChatOptions?, CancellationToken)"/>
162162
/// when <see cref="Microsoft.Extensions.AI.ChatOptions.MaxOutputTokens"/> is not set in the request options.
163163
/// </para>
164164
/// <para>

0 commit comments

Comments
 (0)