From 4ae06da3ddafde73b6fad67e7a423b4c436cca00 Mon Sep 17 00:00:00 2001 From: Stephen Toub Date: Mon, 10 Feb 2025 15:51:16 -0500 Subject: [PATCH 1/4] Rename IChatClient members and corresponding types After great feedback on usage of the APIs, re-reviewing the state of the ecosystem, and length discussions, we're renaming the members of IChatClient to destress the notion of "completions" and instead focus just on getting back a {streaming} response. --- .../ChatCompletion/ChatClientExtensions.cs | 24 +-- .../ChatCompletion/ChatClientMetadata.cs | 12 +- .../{ChatCompletion.cs => ChatResponse.cs} | 64 ++++---- ...pletionUpdate.cs => ChatResponseUpdate.cs} | 48 +++--- ...ons.cs => ChatResponseUpdateExtensions.cs} | 92 ++++++------ .../ChatCompletion/ChatRole.cs | 8 +- .../ChatCompletion/DelegatingChatClient.cs | 8 +- .../ChatCompletion/IChatClient.cs | 14 +- .../Contents/UsageContent.cs | 2 +- .../README.md | 60 ++++---- .../Utilities/AIJsonUtilities.Defaults.cs | 6 +- .../AzureAIInferenceChatClient.cs | 36 ++--- .../README.md | 18 +-- .../OllamaChatClient.cs | 14 +- .../Microsoft.Extensions.AI.Ollama/README.md | 18 +-- .../OpenAIAssistantClient.cs | 12 +- .../OpenAIChatClient.cs | 10 +- .../OpenAIChatCompletionRequest.cs | 6 +- .../OpenAIModelMapper.ChatCompletion.cs | 50 +++--- .../OpenAIModelMapper.ChatMessage.cs | 2 +- ...nAIModelMappers.StreamingChatCompletion.cs | 100 ++++++------ .../OpenAIRealtimeExtensions.cs | 2 +- .../OpenAISerializationHelpers.cs | 24 +-- .../Microsoft.Extensions.AI.OpenAI/README.md | 18 +-- .../Microsoft.Extensions.AI/CachingHelpers.cs | 2 +- .../AnonymousDelegatingChatClient.cs | 98 ++++++------ .../ChatCompletion/CachingChatClient.cs | 52 +++---- .../ChatCompletion/ChatClientBuilder.cs | 28 ++-- .../ChatClientStructuredOutputExtensions.cs | 38 ++--- ...hatCompletion{T}.cs => ChatResponse{T}.cs} | 34 ++--- .../ConfigureOptionsChatClient.cs | 8 +- .../DistributedCachingChatClient.cs | 18 +-- .../FunctionInvokingChatClient.cs | 22 +-- .../ChatCompletion/LoggingChatClient.cs | 50 +++--- .../ChatCompletion/OpenTelemetryChatClient.cs | 72 ++++----- .../Embeddings/CachingEmbeddingGenerator.cs | 2 +- .../OpenTelemetryEmbeddingGenerator.cs | 4 +- .../ChatClientExtensionsTests.cs | 18 +-- ...ompletionTests.cs => ChatResponseTests.cs} | 142 +++++++++--------- ...s => ChatResponseUpdateExtensionsTests.cs} | 100 ++++++------ ...ateTests.cs => ChatResponseUpdateTests.cs} | 30 ++-- .../DelegatingChatClientTests.cs | 14 +- .../TestChatClient.cs | 8 +- .../TestJsonSerializerContext.cs | 4 +- .../AzureAIInferenceChatClientTests.cs | 36 ++--- .../IntegrationTestHelpers.cs | 2 +- .../CallCountingChatClient.cs | 8 +- .../ChatClientIntegrationTests.cs | 82 +++++----- .../PromptBasedFunctionCallingChatClient.cs | 4 +- .../ReducingChatClientTests.cs | 12 +- .../OllamaChatClientIntegrationTests.cs | 12 +- .../OllamaChatClientTests.cs | 14 +- .../OpenAIChatClientTests.cs | 52 +++---- .../OpenAISerializationTests.cs | 56 +++---- ...atClientStructuredOutputExtensionsTests.cs | 82 +++++----- .../ConfigureOptionsChatClientTests.cs | 14 +- .../DistributedCachingChatClientTest.cs | 138 ++++++++--------- .../FunctionInvokingChatClientTests.cs | 42 +++--- .../ChatCompletion/LoggingChatClientTests.cs | 40 ++--- .../OpenTelemetryChatClientTests.cs | 18 +-- .../UseDelegateChatClientTests.cs | 58 +++---- .../LoggingEmbeddingGeneratorTests.cs | 2 +- .../TestJsonSerializerContext.cs | 4 +- 63 files changed, 1034 insertions(+), 1034 deletions(-) rename src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/{ChatCompletion.cs => ChatResponse.cs} (65%) rename src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/{StreamingChatCompletionUpdate.cs => ChatResponseUpdate.cs} (65%) rename src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/{StreamingChatCompletionUpdateExtensions.cs => ChatResponseUpdateExtensions.cs} (66%) rename src/Libraries/Microsoft.Extensions.AI/ChatCompletion/{ChatCompletion{T}.cs => ChatResponse{T}.cs} (85%) rename test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/{ChatCompletionTests.cs => ChatResponseTests.cs} (65%) rename test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/{StreamingChatCompletionUpdateExtensionsTests.cs => ChatResponseUpdateExtensionsTests.cs} (60%) rename test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/{StreamingChatCompletionUpdateTests.cs => ChatResponseUpdateTests.cs} (89%) diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs index 163cde97f58..e485a75d4b1 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs @@ -27,13 +27,13 @@ public static class ChatClientExtensions return (TService?)client.GetService(typeof(TService), serviceKey); } - /// Sends a user chat text message to the model and returns the response messages. + /// Sends a user chat text message and returns the response messages. /// The chat client. /// The text content for the chat message to send. /// The chat options to configure the request. /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. - public static Task CompleteAsync( + public static Task GetResponseAsync( this IChatClient client, string chatMessage, ChatOptions? options = null, @@ -42,16 +42,16 @@ public static Task CompleteAsync( _ = Throw.IfNull(client); _ = Throw.IfNull(chatMessage); - return client.CompleteAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken); + return client.GetResponseAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken); } - /// Sends a chat message to the model and returns the response messages. + /// Sends a chat message and returns the response messages. /// The chat client. /// The chat message to send. /// The chat options to configure the request. /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. - public static Task CompleteAsync( + public static Task GetResponseAsync( this IChatClient client, ChatMessage chatMessage, ChatOptions? options = null, @@ -60,16 +60,16 @@ public static Task CompleteAsync( _ = Throw.IfNull(client); _ = Throw.IfNull(chatMessage); - return client.CompleteAsync([chatMessage], options, cancellationToken); + return client.GetResponseAsync([chatMessage], options, cancellationToken); } - /// Sends a user chat text message to the model and streams the response messages. + /// Sends a user chat text message and streams the response messages. /// The chat client. /// The text content for the chat message to send. /// The chat options to configure the request. /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. - public static IAsyncEnumerable CompleteStreamingAsync( + public static IAsyncEnumerable GetStreamingResponseAsync( this IChatClient client, string chatMessage, ChatOptions? options = null, @@ -78,16 +78,16 @@ public static IAsyncEnumerable CompleteStreamingA _ = Throw.IfNull(client); _ = Throw.IfNull(chatMessage); - return client.CompleteStreamingAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken); + return client.GetStreamingResponseAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken); } - /// Sends a chat message to the model and streams the response messages. + /// Sends a chat message and streams the response messages. /// The chat client. /// The chat message to send. /// The chat options to configure the request. /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. - public static IAsyncEnumerable CompleteStreamingAsync( + public static IAsyncEnumerable GetStreamingResponseAsync( this IChatClient client, ChatMessage chatMessage, ChatOptions? options = null, @@ -96,6 +96,6 @@ public static IAsyncEnumerable CompleteStreamingA _ = Throw.IfNull(client); _ = Throw.IfNull(chatMessage); - return client.CompleteStreamingAsync([chatMessage], options, cancellationToken); + return client.GetStreamingResponseAsync([chatMessage], options, cancellationToken); } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs index 406b9768dd7..ace4dead9e3 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs @@ -10,11 +10,11 @@ public class ChatClientMetadata { /// Initializes a new instance of the class. /// - /// The name of the chat completion provider, if applicable. Where possible, this should map to the + /// The name of the chat provider, if applicable. Where possible, this should map to the /// appropriate name defined in the OpenTelemetry Semantic Conventions for Generative AI systems. /// - /// The URL for accessing the chat completion provider, if applicable. - /// The ID of the chat completion model used, if applicable. + /// The URL for accessing the chat provider, if applicable. + /// The ID of the chat model used, if applicable. public ChatClientMetadata(string? providerName = null, Uri? providerUri = null, string? modelId = null) { ModelId = modelId; @@ -22,17 +22,17 @@ public ChatClientMetadata(string? providerName = null, Uri? providerUri = null, ProviderUri = providerUri; } - /// Gets the name of the chat completion provider. + /// Gets the name of the chat provider. /// /// Where possible, this maps to the appropriate name defined in the /// OpenTelemetry Semantic Conventions for Generative AI systems. /// public string? ProviderName { get; } - /// Gets the URL for accessing the chat completion provider. + /// Gets the URL for accessing the chat provider. public Uri? ProviderUri { get; } - /// Gets the ID of the model used by this chat completion provider. + /// Gets the ID of the model used by this chat provider. /// /// This value can be null if either the name is unknown or there are multiple possible models associated with this instance. /// An individual request may override this value via . diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponse.cs similarity index 65% rename from src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatCompletion.cs rename to src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponse.cs index 296c089ce1f..f789fc7f974 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatCompletion.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponse.cs @@ -9,36 +9,36 @@ namespace Microsoft.Extensions.AI; -/// Represents the result of a chat completion request. -public class ChatCompletion +/// Represents the response to a chat request. +public class ChatResponse { - /// The list of choices in the completion. + /// The list of choices in the response. private IList _choices; - /// Initializes a new instance of the class. - /// The list of choices in the completion, one message per choice. + /// Initializes a new instance of the class. + /// The list of choices in the response, one message per choice. [JsonConstructor] - public ChatCompletion(IList choices) + public ChatResponse(IList choices) { _choices = Throw.IfNull(choices); } - /// Initializes a new instance of the class. - /// The chat message representing the singular choice in the completion. - public ChatCompletion(ChatMessage message) + /// Initializes a new instance of the class. + /// The chat message representing the singular choice in the response. + public ChatResponse(ChatMessage message) { _ = Throw.IfNull(message); _choices = [message]; } - /// Gets or sets the list of chat completion choices. + /// Gets or sets the list of chat response choices. public IList Choices { get => _choices; set => _choices = Throw.IfNull(value); } - /// Gets the chat completion message. + /// Gets the chat response message. /// /// If there are multiple choices, this property returns the first choice. /// If is empty, this property will throw. Use to access all choices directly. @@ -51,48 +51,48 @@ public ChatMessage Message var choices = Choices; if (choices.Count == 0) { - throw new InvalidOperationException($"The {nameof(ChatCompletion)} instance does not contain any {nameof(ChatMessage)} choices."); + throw new InvalidOperationException($"The {nameof(ChatResponse)} instance does not contain any {nameof(ChatMessage)} choices."); } return choices[0]; } } - /// Gets or sets the ID of the chat completion. - public string? CompletionId { get; set; } + /// Gets or sets the ID of the chat response. + public string? ResponseId { get; set; } - /// Gets or sets the chat thread ID associated with this chat completion. + /// Gets or sets the chat thread ID associated with this chat response. /// /// Some implementations are capable of storing the state for a chat thread, such that - /// the input messages supplied to need only be the additional messages beyond + /// the input messages supplied to need only be the additional messages beyond /// what's already stored. If this property is non-, it represents an identifier for that state, /// and it should be used in a subsequent instead of supplying the same messages - /// (and this 's message) as part of the chatMessages parameter. + /// (and this 's message) as part of the chatMessages parameter. /// public string? ChatThreadId { get; set; } - /// Gets or sets the model ID used in the creation of the chat completion. + /// Gets or sets the model ID used in the creation of the chat response. public string? ModelId { get; set; } - /// Gets or sets a timestamp for the chat completion. + /// Gets or sets a timestamp for the chat response. public DateTimeOffset? CreatedAt { get; set; } - /// Gets or sets the reason for the chat completion. + /// Gets or sets the reason for the chat response. public ChatFinishReason? FinishReason { get; set; } - /// Gets or sets usage details for the chat completion. + /// Gets or sets usage details for the chat response. public UsageDetails? Usage { get; set; } - /// Gets or sets the raw representation of the chat completion from an underlying implementation. + /// Gets or sets the raw representation of the chat response from an underlying implementation. /// - /// If a is created to represent some underlying object from another object + /// If a is created to represent some underlying object from another object /// model, this property can be used to store that original object. This can be useful for debugging or /// for enabling a consumer to access the underlying object model if needed. /// [JsonIgnore] public object? RawRepresentation { get; set; } - /// Gets or sets any additional properties associated with the chat completion. + /// Gets or sets any additional properties associated with the chat response. public AdditionalPropertiesDictionary? AdditionalProperties { get; set; } /// @@ -117,14 +117,14 @@ public override string ToString() return sb.ToString(); } - /// Creates an array of instances that represent this . - /// An array of instances that may be used to represent this . - public StreamingChatCompletionUpdate[] ToStreamingChatCompletionUpdates() + /// Creates an array of instances that represent this . + /// An array of instances that may be used to represent this . + public ChatResponseUpdate[] ToChatResponseUpdates() { - StreamingChatCompletionUpdate? extra = null; + ChatResponseUpdate? extra = null; if (AdditionalProperties is not null || Usage is not null) { - extra = new StreamingChatCompletionUpdate + extra = new ChatResponseUpdate { AdditionalProperties = AdditionalProperties }; @@ -136,12 +136,12 @@ public StreamingChatCompletionUpdate[] ToStreamingChatCompletionUpdates() } int choicesCount = Choices.Count; - var updates = new StreamingChatCompletionUpdate[choicesCount + (extra is null ? 0 : 1)]; + var updates = new ChatResponseUpdate[choicesCount + (extra is null ? 0 : 1)]; for (int choiceIndex = 0; choiceIndex < choicesCount; choiceIndex++) { ChatMessage choice = Choices[choiceIndex]; - updates[choiceIndex] = new StreamingChatCompletionUpdate + updates[choiceIndex] = new ChatResponseUpdate { ChatThreadId = ChatThreadId, ChoiceIndex = choiceIndex, @@ -152,7 +152,7 @@ public StreamingChatCompletionUpdate[] ToStreamingChatCompletionUpdates() RawRepresentation = choice.RawRepresentation, Role = choice.Role, - CompletionId = CompletionId, + ResponseId = ResponseId, CreatedAt = CreatedAt, FinishReason = FinishReason, ModelId = ModelId diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdate.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs similarity index 65% rename from src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdate.cs rename to src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs index e50fd42169b..2b0cce87bef 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdate.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs @@ -13,46 +13,46 @@ namespace Microsoft.Extensions.AI; /// /// /// -/// Conceptually, this combines the roles of and +/// Conceptually, this combines the roles of and /// in streaming output. For ease of consumption, it also flattens the nested structure you see on /// streaming chunks in some AI service, so instead of a dictionary of choices, each update represents a /// single choice (and hence has its own role, choice ID, etc.). /// /// -/// is so named because it represents streaming updates -/// to a single chat completion. As such, it is considered erroneous for multiple updates that are part -/// of the same completion to contain competing values. For example, some updates that are part of -/// the same completion may have a +/// is so named because it represents updates +/// to a single chat response. As such, it is considered erroneous for multiple updates that are part +/// of the same response to contain competing values. For example, some updates that are part of +/// the same response may have a /// value, and others may have a non- value, but all of those with a non- /// value must have the same value (e.g. . It should never be the case, for example, -/// that one in a completion has a role of +/// that one in a response has a role of /// while another has a role of "AI". /// /// -/// The relationship between and is -/// codified in the and -/// , which enable bidirectional conversions +/// The relationship between and is +/// codified in the and +/// , which enable bidirectional conversions /// between the two. Note, however, that the conversion may be slightly lossy, for example if multiple updates -/// all have different objects whereas there's -/// only one slot for such an object available in . +/// all have different objects whereas there's +/// only one slot for such an object available in . /// /// -public class StreamingChatCompletionUpdate +public class ChatResponseUpdate { - /// The completion update content items. + /// The response update content items. private IList? _contents; /// The name of the author of the update. private string? _authorName; - /// Gets or sets the name of the author of the completion update. + /// Gets or sets the name of the author of the response update. public string? AuthorName { get => _authorName; set => _authorName = string.IsNullOrWhiteSpace(value) ? null : value; } - /// Gets or sets the role of the author of the completion update. + /// Gets or sets the role of the author of the response update. public ChatRole? Role { get; set; } /// @@ -79,7 +79,7 @@ public string? Text } } - /// Gets or sets the chat completion update content items. + /// Gets or sets the chat response update content items. [AllowNull] public IList Contents { @@ -87,9 +87,9 @@ public IList Contents set => _contents = value; } - /// Gets or sets the raw representation of the completion update from an underlying implementation. + /// Gets or sets the raw representation of the response update from an underlying implementation. /// - /// If a is created to represent some underlying object from another object + /// If a is created to represent some underlying object from another object /// model, this property can be used to store that original object. This can be useful for debugging or /// for enabling a consumer to access the underlying object model if needed. /// @@ -99,20 +99,20 @@ public IList Contents /// Gets or sets additional properties for the update. public AdditionalPropertiesDictionary? AdditionalProperties { get; set; } - /// Gets or sets the ID of the completion of which this update is a part. - public string? CompletionId { get; set; } + /// Gets or sets the ID of the response of which this update is a part. + public string? ResponseId { get; set; } - /// Gets or sets the chat thread ID associated with the chat completion of which this update is a part. + /// Gets or sets the chat thread ID associated with the chat response of which this update is a part. /// /// Some implementations are capable of storing the state for a chat thread, such that - /// the input messages supplied to need only be the additional messages beyond + /// the input messages supplied to need only be the additional messages beyond /// what's already stored. If this property is non-, it represents an identifier for that state, /// and it should be used in a subsequent instead of supplying the same messages /// (and this streaming message) as part of the chatMessages parameter. /// public string? ChatThreadId { get; set; } - /// Gets or sets a timestamp for the completion update. + /// Gets or sets a timestamp for the response update. public DateTimeOffset? CreatedAt { get; set; } /// Gets or sets the zero-based index of the choice with which this update is associated in the streaming sequence. @@ -121,7 +121,7 @@ public IList Contents /// Gets or sets the finish reason for the operation. public ChatFinishReason? FinishReason { get; set; } - /// Gets or sets the model ID using in the creation of the chat completion of which this update is a part. + /// Gets or sets the model ID using in the creation of the chat response of which this update is a part. public string? ModelId { get; set; } /// diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdateExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdateExtensions.cs similarity index 66% rename from src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdateExtensions.cs rename to src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdateExtensions.cs index 9694b0e4dc0..bfc2c3fd60f 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdateExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdateExtensions.cs @@ -18,11 +18,11 @@ namespace Microsoft.Extensions.AI; /// -/// Provides extension methods for working with instances. +/// Provides extension methods for working with instances. /// -public static class StreamingChatCompletionUpdateExtensions +public static class ChatResponseUpdateExtensions { - /// Combines instances into a single . + /// Combines instances into a single . /// The updates to be combined. /// /// to attempt to coalesce contiguous items, where applicable, @@ -30,26 +30,26 @@ public static class StreamingChatCompletionUpdateExtensions /// the manufactured instances. When , the original content items are used. /// The default is . /// - /// The combined . - public static ChatCompletion ToChatCompletion( - this IEnumerable updates, bool coalesceContent = true) + /// The combined . + public static ChatResponse ToChatResponse( + this IEnumerable updates, bool coalesceContent = true) { _ = Throw.IfNull(updates); - ChatCompletion completion = new([]); + ChatResponse response = new([]); Dictionary messages = []; foreach (var update in updates) { - ProcessUpdate(update, messages, completion); + ProcessUpdate(update, messages, response); } - AddMessagesToCompletion(messages, completion, coalesceContent); + AddMessagesToResponse(messages, response, coalesceContent); - return completion; + return response; } - /// Combines instances into a single . + /// Combines instances into a single . /// The updates to be combined. /// /// to attempt to coalesce contiguous items, where applicable, @@ -58,41 +58,41 @@ public static ChatCompletion ToChatCompletion( /// The default is . /// /// The to monitor for cancellation requests. The default is . - /// The combined . - public static Task ToChatCompletionAsync( - this IAsyncEnumerable updates, bool coalesceContent = true, CancellationToken cancellationToken = default) + /// The combined . + public static Task ToChatResponseAsync( + this IAsyncEnumerable updates, bool coalesceContent = true, CancellationToken cancellationToken = default) { _ = Throw.IfNull(updates); - return ToChatCompletionAsync(updates, coalesceContent, cancellationToken); + return ToChatResponseAsync(updates, coalesceContent, cancellationToken); - static async Task ToChatCompletionAsync( - IAsyncEnumerable updates, bool coalesceContent, CancellationToken cancellationToken) + static async Task ToChatResponseAsync( + IAsyncEnumerable updates, bool coalesceContent, CancellationToken cancellationToken) { - ChatCompletion completion = new([]); + ChatResponse response = new([]); Dictionary messages = []; await foreach (var update in updates.WithCancellation(cancellationToken).ConfigureAwait(false)) { - ProcessUpdate(update, messages, completion); + ProcessUpdate(update, messages, response); } - AddMessagesToCompletion(messages, completion, coalesceContent); + AddMessagesToResponse(messages, response, coalesceContent); - return completion; + return response; } } - /// Processes the , incorporating its contents into and . + /// Processes the , incorporating its contents into and . /// The update to process. - /// The dictionary mapping to the being built for that choice. - /// The object whose properties should be updated based on . - private static void ProcessUpdate(StreamingChatCompletionUpdate update, Dictionary messages, ChatCompletion completion) + /// The dictionary mapping to the being built for that choice. + /// The object whose properties should be updated based on . + private static void ProcessUpdate(ChatResponseUpdate update, Dictionary messages, ChatResponse response) { - completion.CompletionId ??= update.CompletionId; - completion.CreatedAt ??= update.CreatedAt; - completion.FinishReason ??= update.FinishReason; - completion.ModelId ??= update.ModelId; + response.ResponseId ??= update.ResponseId; + response.CreatedAt ??= update.CreatedAt; + response.FinishReason ??= update.FinishReason; + response.ModelId ??= update.ModelId; #if NET ChatMessage message = CollectionsMarshal.GetValueRefOrAddDefault(messages, update.ChoiceIndex, out _) ??= @@ -104,14 +104,14 @@ private static void ProcessUpdate(StreamingChatCompletionUpdate update, Dictiona } #endif - // Incorporate all content from the update into the completion. + // Incorporate all content from the update into the response. foreach (var content in update.Contents) { switch (content) { - // Usage content is treated specially and propagated to the completion's Usage. + // Usage content is treated specially and propagated to the response's Usage. case UsageContent usage: - (completion.Usage ??= new()).Add(usage.Details); + (response.Usage ??= new()).Add(usage.Details); break; default: @@ -143,28 +143,28 @@ private static void ProcessUpdate(StreamingChatCompletionUpdate update, Dictiona } } - /// Finalizes the object by transferring the into it. - /// The messages to process further and transfer into . - /// The result being built. - /// The corresponding option value provided to or . - private static void AddMessagesToCompletion(Dictionary messages, ChatCompletion completion, bool coalesceContent) + /// Finalizes the object by transferring the into it. + /// The messages to process further and transfer into . + /// The result being built. + /// The corresponding option value provided to or . + private static void AddMessagesToResponse(Dictionary messages, ChatResponse response, bool coalesceContent) { if (messages.Count <= 1) { // Add the single message if there is one. foreach (var entry in messages) { - AddMessage(completion, coalesceContent, entry); + AddMessage(response, coalesceContent, entry); } // In the vast majority case where there's only one choice, promote any additional properties - // from the single message to the chat completion, making them more discoverable and more similar + // from the single message to the chat response, making them more discoverable and more similar // to how they're typically surfaced from non-streaming services. - if (completion.Choices.Count == 1 && - completion.Choices[0].AdditionalProperties is { } messageProps) + if (response.Choices.Count == 1 && + response.Choices[0].AdditionalProperties is { } messageProps) { - completion.Choices[0].AdditionalProperties = null; - completion.AdditionalProperties = messageProps; + response.Choices[0].AdditionalProperties = null; + response.AdditionalProperties = messageProps; } } else @@ -172,7 +172,7 @@ private static void AddMessagesToCompletion(Dictionary message // Add all of the messages, sorted by choice index. foreach (var entry in messages.OrderBy(entry => entry.Key)) { - AddMessage(completion, coalesceContent, entry); + AddMessage(response, coalesceContent, entry); } // If there are multiple choices, we don't promote additional properties from the individual messages. @@ -180,7 +180,7 @@ private static void AddMessagesToCompletion(Dictionary message // conflicting values across the choices, it would be unclear which one should be used. } - static void AddMessage(ChatCompletion completion, bool coalesceContent, KeyValuePair entry) + static void AddMessage(ChatResponse response, bool coalesceContent, KeyValuePair entry) { if (entry.Value.Role == default) { @@ -192,7 +192,7 @@ static void AddMessage(ChatCompletion completion, bool coalesceContent, KeyValue CoalesceTextContent((List)entry.Value.Contents); } - completion.Choices.Add(entry.Value); + response.Choices.Add(entry.Value); } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs index 0b5f72adfa5..77717d7c96c 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs @@ -11,21 +11,21 @@ namespace Microsoft.Extensions.AI; /// -/// Describes the intended purpose of a message within a chat completion interaction. +/// Describes the intended purpose of a message within a chat interaction. /// [JsonConverter(typeof(Converter))] public readonly struct ChatRole : IEquatable { - /// Gets the role that instructs or sets the behavior of the assistant. + /// Gets the role that instructs or sets the behavior of the system. public static ChatRole System { get; } = new("system"); /// Gets the role that provides responses to system-instructed, user-prompted input. public static ChatRole Assistant { get; } = new("assistant"); - /// Gets the role that provides input for chat completions. + /// Gets the role that provides user input for chat interactions. public static ChatRole User { get; } = new("user"); - /// Gets the role that provides additional information and references for chat completions. + /// Gets the role that provides additional information and references in response to tool use requests. public static ChatRole Tool { get; } = new("tool"); /// diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs index 875aa31e87e..7882529ac85 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs @@ -38,15 +38,15 @@ public void Dispose() protected IChatClient InnerClient { get; } /// - public virtual Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public virtual Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { - return InnerClient.CompleteAsync(chatMessages, options, cancellationToken); + return InnerClient.GetResponseAsync(chatMessages, options, cancellationToken); } /// - public virtual IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public virtual IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { - return InnerClient.CompleteStreamingAsync(chatMessages, options, cancellationToken); + return InnerClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken); } /// diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs index 5d86b1fd985..79e2f658cb9 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs @@ -8,15 +8,15 @@ namespace Microsoft.Extensions.AI; -/// Represents a chat completion client. +/// Represents a chat client. /// /// /// Unless otherwise specified, all members of are thread-safe for concurrent use. /// It is expected that all implementations of support being used by multiple requests concurrently. /// /// -/// However, implementations of might mutate the arguments supplied to and -/// , such as by adding additional messages to the messages list or configuring the options +/// However, implementations of might mutate the arguments supplied to and +/// , such as by adding additional messages to the messages list or configuring the options /// instance. Thus, consumers of the interface either should avoid using shared instances of these arguments for concurrent /// invocations or should otherwise ensure by construction that no instances are used which might employ /// such mutation. For example, the WithChatOptions method be provided with a callback that could mutate the supplied options @@ -25,7 +25,7 @@ namespace Microsoft.Extensions.AI; /// public interface IChatClient : IDisposable { - /// Sends chat messages to the model and returns the response messages. + /// Sends chat messages and returns the response. /// The chat content to send. /// The chat options to configure the request. /// The to monitor for cancellation requests. The default is . @@ -34,12 +34,12 @@ public interface IChatClient : IDisposable /// The returned messages aren't added to . However, any intermediate messages generated implicitly /// by the client, including any messages for roundtrips to the model as part of the implementation of this request, are included. /// - Task CompleteAsync( + Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default); - /// Sends chat messages to the model and streams the response messages. + /// Sends chat messages and streams the response. /// The chat content to send. /// The chat options to configure the request. /// The to monitor for cancellation requests. The default is . @@ -48,7 +48,7 @@ Task CompleteAsync( /// The returned messages aren't added to . However, any intermediate messages generated implicitly /// by the client, including any messages for roundtrips to the model as part of the implementation of this request, are included. /// - IAsyncEnumerable CompleteStreamingAsync( + IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default); diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs index 16e9d08b324..dcb2f87df7c 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs @@ -8,7 +8,7 @@ namespace Microsoft.Extensions.AI; /// -/// Represents usage information associated with a chat response. +/// Represents usage information associated with a chat request and response. /// [DebuggerDisplay("{DebuggerDisplay,nq}")] public class UsageContent : AIContent diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md b/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md index ea0d6d9f74f..0d0ae279bef 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md @@ -22,7 +22,7 @@ Or directly in the C# project file: ### `IChatClient` -The `IChatClient` interface defines a client abstraction responsible for interacting with AI services that provide chat capabilities. It defines methods for sending and receiving messages comprised of multi-modal content (text, images, audio, etc.), either as a complete set or streamed incrementally. Additionally, it provides metadata information about the client and allows for retrieving strongly-typed services that may be provided by the client or its underlying services. +The `IChatClient` interface defines a client abstraction responsible for interacting with AI services that provide chat capabilities. It defines methods for sending and receiving messages comprised of multi-modal content (text, images, audio, etc.), either as a complete set or streamed incrementally. Additionally, it allows for retrieving strongly-typed services that may be provided by the client or its underlying services. #### Sample Implementation @@ -45,7 +45,7 @@ public class SampleChatClient : IChatClient public SampleChatClient(Uri endpoint, string modelId) => _metadata = new("SampleChatClient", endpoint, modelId); - public async Task CompleteAsync( + public async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) @@ -53,7 +53,7 @@ public class SampleChatClient : IChatClient // Simulate some operation. await Task.Delay(300, cancellationToken); - // Return a sample chat completion response randomly. + // Return a sample chat response randomly. string[] responses = [ "This is the first sample response.", @@ -68,7 +68,7 @@ public class SampleChatClient : IChatClient }); } - public async IAsyncEnumerable CompleteStreamingAsync( + public async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -81,7 +81,7 @@ public class SampleChatClient : IChatClient await Task.Delay(100, cancellationToken); // Yield the next message in the response. - yield return new StreamingChatCompletionUpdate + yield return new ChatResponseUpdate { Role = ChatRole.Assistant, Text = word, @@ -99,44 +99,44 @@ public class SampleChatClient : IChatClient } ``` -#### Requesting a Chat Completion: `CompleteAsync` +#### Requesting a Chat Response: `GetResponseAsync` -With an instance of `IChatClient`, the `CompleteAsync` method may be used to send a request. The request is composed of one or more messages, each of which is composed of one or more pieces of content. Accelerator methods exist to simplify common cases, such as constructing a request for a single piece of text content. +With an instance of `IChatClient`, the `GetResponseAsync` method may be used to send a request and get a response. The request is composed of one or more messages, each of which is composed of one or more pieces of content. Accelerator methods exist to simplify common cases, such as constructing a request for a single piece of text content. ```csharp using Microsoft.Extensions.AI; IChatClient client = new SampleChatClient(new Uri("http://coolsite.ai"), "my-custom-model"); -var response = await client.CompleteAsync("What is AI?"); +var response = await client.GetResponseAsync("What is AI?"); Console.WriteLine(response.Message); ``` -The core `CompleteAsync` method on the `IChatClient` interface accepts a list of messages. This list represents the history of all messages that are part of the conversation. +The core `GetResponseAsync` method on the `IChatClient` interface accepts a list of messages. This list represents the history of all messages that are part of the conversation. ```csharp using Microsoft.Extensions.AI; IChatClient client = new SampleChatClient(new Uri("http://coolsite.ai"), "my-custom-model"); -Console.WriteLine(await client.CompleteAsync( +Console.WriteLine(await client.GetResponseAsync( [ new(ChatRole.System, "You are a helpful AI assistant"), new(ChatRole.User, "What is AI?"), ])); ``` -#### Requesting a Streaming Chat Completion: `CompleteStreamingAsync` +#### Requesting a Streaming Chat Response: `GetStreamingResponseAsync` -The inputs to `CompleteStreamingAsync` are identical to those of `CompleteAsync`. However, rather than returning the complete response as part of a `ChatCompletion` object, the method returns an `IAsyncEnumerable`, providing a stream of updates that together form the single response. +The inputs to `GetStreamingResponseAsync` are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a `ChatResponse` object, the method returns an `IAsyncEnumerable`, providing a stream of updates that together form the single response. ```csharp using Microsoft.Extensions.AI; IChatClient client = new SampleChatClient(new Uri("http://coolsite.ai"), "my-custom-model"); -await foreach (var update in client.CompleteStreamingAsync("What is AI?")) +await foreach (var update in client.GetStreamingResponseAsync("What is AI?")) { Console.Write(update); } @@ -157,7 +157,7 @@ IChatClient client = new ChatClientBuilder(new OllamaChatClient(new Uri("http:// .UseFunctionInvocation() .Build(); -var response = client.CompleteStreamingAsync( +var response = client.GetStreamingResponseAsync( "Should I wear a rain coat?", new() { Tools = [AIFunctionFactory.Create(GetCurrentWeather)] }); @@ -185,7 +185,7 @@ string[] prompts = ["What is AI?", "What is .NET?", "What is AI?"]; foreach (var prompt in prompts) { - await foreach (var update in client.CompleteStreamingAsync(prompt)) + await foreach (var update in client.GetStreamingResponseAsync(prompt)) { Console.Write(update); } @@ -212,12 +212,12 @@ IChatClient client = new ChatClientBuilder(new SampleChatClient(new Uri("http:// .UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true) .Build(); -Console.WriteLine((await client.CompleteAsync("What is AI?")).Message); +Console.WriteLine((await client.GetResponseAsync("What is AI?")).Message); ``` #### Options -Every call to `CompleteAsync` or `CompleteStreamingAsync` may optionally supply a `ChatOptions` instance containing additional parameters for the operation. The most common parameters that are common amongst AI models and services show up as strongly-typed properties on the type, such as `ChatOptions.Temperature`. Other parameters may be supplied by name in a weakly-typed manner via the `ChatOptions.AdditionalProperties` dictionary. +Every call to `GetResponseAsync` or `GetStreamingResponseAsync` may optionally supply a `ChatOptions` instance containing additional parameters for the operation. The most common parameters that are common amongst AI models and services show up as strongly-typed properties on the type, such as `ChatOptions.Temperature`. Other parameters may be supplied by name in a weakly-typed manner via the `ChatOptions.AdditionalProperties` dictionary. Options may also be baked into an `IChatClient` via the `ConfigureOptions` extension method on `ChatClientBuilder`. This delegating client wraps another client and invokes the supplied delegate to populate a `ChatOptions` instance for every call. For example, to ensure that the `ChatOptions.ModelId` property defaults to a particular model name, code like the following may be used: ```csharp @@ -227,8 +227,8 @@ IChatClient client = new ChatClientBuilder(new OllamaChatClient(new Uri("http:// .ConfigureOptions(options => options.ModelId ??= "phi3") .Build(); -Console.WriteLine(await client.CompleteAsync("What is AI?")); // will request "phi3" -Console.WriteLine(await client.CompleteAsync("What is AI?", new() { ModelId = "llama3.1" })); // will request "llama3.1" +Console.WriteLine(await client.GetResponseAsync("What is AI?")); // will request "phi3" +Console.WriteLine(await client.GetResponseAsync("What is AI?", new() { ModelId = "llama3.1" })); // will request "llama3.1" ``` #### Pipelines of Functionality @@ -273,13 +273,13 @@ for (int i = 0; i < 3; i++) new ChatMessage(ChatRole.User, "Do I need an umbrella?") ]; - Console.WriteLine(await client.CompleteAsync(history, options)); + Console.WriteLine(await client.GetResponseAsync(history, options)); } ``` #### Custom `IChatClient` Middleware -Anyone can layer in such additional functionality. While it's possible to implement `IChatClient` directly, the `DelegatingChatClient` class is an implementation of the `IChatClient` interface that serves as a base class for creating chat clients that delegate their operations to another `IChatClient` instance. It is designed to facilitate the chaining of multiple clients, allowing calls to be passed through to an underlying client. The class provides default implementations for methods such as `CompleteAsync`, `CompleteStreamingAsync`, and `Dispose`, simply forwarding the calls to the inner client instance. A derived type may then override just the methods it needs to in order to augment the behavior, delegating to the base implementation in order to forward the call along to the wrapped client. This setup is useful for creating flexible and modular chat clients that can be easily extended and composed. +Anyone can layer in such additional functionality. While it's possible to implement `IChatClient` directly, the `DelegatingChatClient` class is an implementation of the `IChatClient` interface that serves as a base class for creating chat clients that delegate their operations to another `IChatClient` instance. It is designed to facilitate the chaining of multiple clients, allowing calls to be passed through to an underlying client. The class provides default implementations for methods such as `GetResponseAsync`, `GetStreamingResponseAsync`, and `Dispose`, simply forwarding the calls to the inner client instance. A derived type may then override just the methods it needs to in order to augment the behavior, delegating to the base implementation in order to forward the call along to the wrapped client. This setup is useful for creating flexible and modular chat clients that can be easily extended and composed. Here is an example class derived from `DelegatingChatClient` to provide rate limiting functionality, utilizing the [System.Threading.RateLimiting](https://www.nuget.org/packages/System.Threading.RateLimiting) library: ```csharp @@ -288,24 +288,24 @@ using System.Threading.RateLimiting; public sealed class RateLimitingChatClient(IChatClient innerClient, RateLimiter rateLimiter) : DelegatingChatClient(innerClient) { - public override async Task CompleteAsync( + public override async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken).ConfigureAwait(false); if (!lease.IsAcquired) throw new InvalidOperationException("Unable to acquire lease."); - return await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + return await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); } - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken).ConfigureAwait(false); if (!lease.IsAcquired) throw new InvalidOperationException("Unable to acquire lease."); - await foreach (var update in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) + await foreach (var update in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) yield return update; } @@ -329,7 +329,7 @@ var client = new RateLimitingChatClient( new SampleChatClient(new Uri("http://localhost"), "test"), new ConcurrencyLimiter(new() { PermitLimit = 1, QueueLimit = int.MaxValue })); -await client.CompleteAsync("What color is the sky?"); +await client.GetResponseAsync("What color is the sky?"); ``` To make it easier to compose such components with others, the author of the component is recommended to create a "Use" extension method for registering this component into a pipeline, e.g. @@ -361,9 +361,9 @@ var client = new SampleChatClient(new Uri("http://localhost"), "test") ``` The above extension methods demonstrate using a `Use` method on `ChatClientBuilder`. `ChatClientBuilder` also provides `Use` overloads that make it easier to -write such delegating handlers. For example, in the earlier `RateLimitingChatClient` example, the overrides of `CompleteAsync` and `CompleteStreamingAsync` only +write such delegating handlers. For example, in the earlier `RateLimitingChatClient` example, the overrides of `GetResponseAsync` and `GetStreamingResponseAsync` only need to do work before and after delegating to the next client in the pipeline. To achieve the same thing without writing a custom class, an overload of `Use` may -be used that accepts a delegate which is used for both `CompleteAsync` and `CompleteStreamingAsync`, reducing the boilderplate required: +be used that accepts a delegate which is used for both `GetResponseAsync` and `GetStreamingResponseAsync`, reducing the boilderplate required: ```csharp RateLimiter rateLimiter = ...; var client = new SampleChatClient(new Uri("http://localhost"), "test") @@ -402,7 +402,7 @@ var client = new SampleChatClient(new Uri("http://localhost"), "test") .Build(); ``` -For scenarios where the developer would like to specify delegating implementations of `CompleteAsync` and `CompleteStreamingAsync` inline, +For scenarios where the developer would like to specify delegating implementations of `GetResponseAsync` and `GetStreamingResponseAsync` inline, and where it's important to be able to write a different implementation for each in order to handle their unique return types specially, another overload of `Use` exists that accepts a delegate for each. @@ -424,7 +424,7 @@ var host = builder.Build(); // Elsewhere in the app var chatClient = host.Services.GetRequiredService(); -Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); +Console.WriteLine(await chatClient.GetResponseAsync("What is AI?")); ``` What instance and configuration is injected may differ based on the current needs of the application, and multiple pipelines may be injected with different keys. diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs index 9ae0fac76c4..dff7fbab373 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs @@ -82,9 +82,9 @@ private static JsonSerializerOptions CreateDefaultOptions() [JsonSerializable(typeof(EmbeddingGenerationOptions))] [JsonSerializable(typeof(ChatClientMetadata))] [JsonSerializable(typeof(EmbeddingGeneratorMetadata))] - [JsonSerializable(typeof(ChatCompletion))] - [JsonSerializable(typeof(StreamingChatCompletionUpdate))] - [JsonSerializable(typeof(IReadOnlyList))] + [JsonSerializable(typeof(ChatResponse))] + [JsonSerializable(typeof(ChatResponseUpdate))] + [JsonSerializable(typeof(IReadOnlyList))] [JsonSerializable(typeof(Dictionary))] [JsonSerializable(typeof(IDictionary))] [JsonSerializable(typeof(IDictionary))] diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs index 137b2d80e70..3447fa2be28 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs @@ -77,7 +77,7 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions } /// - public async Task CompleteAsync( + public async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -129,20 +129,20 @@ public async Task CompleteAsync( }; } - // Wrap the content in a ChatCompletion to return. - return new ChatCompletion(returnMessages) + // Wrap the content in a ChatResponse to return. + return new ChatResponse(returnMessages) { - CompletionId = response.Id, CreatedAt = response.Created, ModelId = response.Model, FinishReason = ToFinishReason(response.FinishReason), RawRepresentation = response, + ResponseId = response.Id, Usage = usage, }; } /// - public async IAsyncEnumerable CompleteStreamingAsync( + public async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -150,7 +150,7 @@ public async IAsyncEnumerable CompleteStreamingAs Dictionary? functionCallInfos = null; ChatRole? streamedRole = default; ChatFinishReason? finishReason = default; - string? completionId = null; + string? responseId = null; DateTimeOffset? createdAt = null; string? modelId = null; string lastCallId = string.Empty; @@ -162,25 +162,25 @@ public async IAsyncEnumerable CompleteStreamingAs // The role and finish reason may arrive during any update, but once they've arrived, the same value should be the same for all subsequent updates. streamedRole ??= chatCompletionUpdate.Role is global::Azure.AI.Inference.ChatRole role ? ToChatRole(role) : null; finishReason ??= chatCompletionUpdate.FinishReason is CompletionsFinishReason reason ? ToFinishReason(reason) : null; - completionId ??= chatCompletionUpdate.Id; + responseId ??= chatCompletionUpdate.Id; createdAt ??= chatCompletionUpdate.Created; modelId ??= chatCompletionUpdate.Model; // Create the response content object. - StreamingChatCompletionUpdate completionUpdate = new() + ChatResponseUpdate responseUpdate = new() { - CompletionId = chatCompletionUpdate.Id, CreatedAt = chatCompletionUpdate.Created, FinishReason = finishReason, ModelId = modelId, RawRepresentation = chatCompletionUpdate, + ResponseId = chatCompletionUpdate.Id, Role = streamedRole, }; // Transfer over content update items. if (chatCompletionUpdate.ContentUpdate is string update) { - completionUpdate.Contents.Add(new TextContent(update)); + responseUpdate.Contents.Add(new TextContent(update)); } // Transfer over tool call updates. @@ -213,7 +213,7 @@ public async IAsyncEnumerable CompleteStreamingAs if (chatCompletionUpdate.Usage is { } usage) { - completionUpdate.Contents.Add(new UsageContent(new() + responseUpdate.Contents.Add(new UsageContent(new() { InputTokenCount = usage.PromptTokens, OutputTokenCount = usage.CompletionTokens, @@ -222,18 +222,18 @@ public async IAsyncEnumerable CompleteStreamingAs } // Now yield the item. - yield return completionUpdate; + yield return responseUpdate; } // Now that we've received all updates, combine any for function calls into a single item to yield. if (functionCallInfos is not null) { - var completionUpdate = new StreamingChatCompletionUpdate + var responseUpdate = new ChatResponseUpdate { - CompletionId = completionId, CreatedAt = createdAt, FinishReason = finishReason, ModelId = modelId, + ResponseId = responseId, Role = streamedRole, }; @@ -246,11 +246,11 @@ public async IAsyncEnumerable CompleteStreamingAs fci.Arguments?.ToString() ?? string.Empty, entry.Key, fci.Name!); - completionUpdate.Contents.Add(callContent); + responseUpdate.Contents.Add(callContent); } } - yield return completionUpdate; + yield return responseUpdate; } } @@ -309,7 +309,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, } } - // These properties are strongly typed on ChatOptions but not on ChatCompletionsOptions. + // These properties are strongly typed on ChatOptions but not on ChatResponsesOptions. if (options.TopK is int topK) { result.AdditionalProperties["top_k"] = new BinaryData(JsonSerializer.SerializeToUtf8Bytes(topK, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(int)))); @@ -321,7 +321,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, { switch (prop.Key) { - // Propagate everything else to the ChatCompletionOptions' AdditionalProperties. + // Propagate everything else to the ChatResponseOptions' AdditionalProperties. default: if (prop.Value is not null) { diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md index b781f1ae4c3..c22f6306204 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md @@ -32,7 +32,7 @@ IChatClient client = new AzureKeyCredential(Environment.GetEnvironmentVariable("GH_TOKEN")!)) .AsChatClient("gpt-4o-mini"); -Console.WriteLine(await client.CompleteAsync("What is AI?")); +Console.WriteLine(await client.GetResponseAsync("What is AI?")); ``` > **Note:** When connecting with Azure Open AI, the URL passed into the `ChatCompletionsClient` needs to include `openai/deployments/{yourDeployment}`. For example: @@ -54,7 +54,7 @@ IChatClient client = new AzureKeyCredential(Environment.GetEnvironmentVariable("GH_TOKEN")!)) .AsChatClient("gpt-4o-mini"); -Console.WriteLine(await client.CompleteAsync( +Console.WriteLine(await client.GetResponseAsync( [ new ChatMessage(ChatRole.System, "You are a helpful AI assistant"), new ChatMessage(ChatRole.User, "What is AI?"), @@ -73,7 +73,7 @@ IChatClient client = new AzureKeyCredential(Environment.GetEnvironmentVariable("GH_TOKEN")!)) .AsChatClient("gpt-4o-mini"); -await foreach (var update in client.CompleteStreamingAsync("What is AI?")) +await foreach (var update in client.GetStreamingResponseAsync("What is AI?")) { Console.Write(update); } @@ -101,7 +101,7 @@ ChatOptions chatOptions = new() Tools = [AIFunctionFactory.Create(GetWeather)] }; -await foreach (var message in client.CompleteStreamingAsync("Do I need an umbrella?", chatOptions)) +await foreach (var message in client.GetStreamingResponseAsync("Do I need an umbrella?", chatOptions)) { Console.Write(message); } @@ -133,7 +133,7 @@ IChatClient client = new ChatClientBuilder(azureClient) for (int i = 0; i < 3; i++) { - await foreach (var message in client.CompleteStreamingAsync("In less than 100 words, what is AI?")) + await foreach (var message in client.GetStreamingResponseAsync("In less than 100 words, what is AI?")) { Console.Write(message); } @@ -167,7 +167,7 @@ IChatClient client = new ChatClientBuilder(azureClient) .UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true) .Build(); -Console.WriteLine(await client.CompleteAsync("What is AI?")); +Console.WriteLine(await client.GetResponseAsync("What is AI?")); ``` ### Telemetry, Caching, and Tool Calling @@ -211,7 +211,7 @@ IChatClient client = new ChatClientBuilder(azureClient) for (int i = 0; i < 3; i++) { - Console.WriteLine(await client.CompleteAsync("How much older is Alice than Bob?", chatOptions)); + Console.WriteLine(await client.GetResponseAsync("How much older is Alice than Bob?", chatOptions)); } [Description("Gets the age of a person specified by name.")] @@ -251,7 +251,7 @@ var app = builder.Build(); // Elsewhere in the app var chatClient = app.Services.GetRequiredService(); -Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); +Console.WriteLine(await chatClient.GetResponseAsync("What is AI?")); ``` ### Minimal Web API @@ -274,7 +274,7 @@ var app = builder.Build(); app.MapPost("/chat", async (IChatClient client, string message) => { - var response = await client.CompleteAsync(message); + var response = await client.GetResponseAsync(message); return response.Message; }); diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs index 450b83bd6a2..efeff58d592 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs @@ -78,7 +78,7 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions } /// - public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -104,16 +104,16 @@ public async Task CompleteAsync(IList chatMessages, return new([FromOllamaMessage(response.Message!)]) { - CompletionId = response.CreatedAt, - ModelId = response.Model ?? options?.ModelId ?? _metadata.ModelId, CreatedAt = DateTimeOffset.TryParse(response.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null, FinishReason = ToFinishReason(response), + ModelId = response.Model ?? options?.ModelId ?? _metadata.ModelId, + ResponseId = response.CreatedAt, Usage = ParseOllamaChatResponseUsage(response), }; } /// - public async IAsyncEnumerable CompleteStreamingAsync( + public async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -152,13 +152,13 @@ public async IAsyncEnumerable CompleteStreamingAs string? modelId = chunk.Model ?? _metadata.ModelId; - StreamingChatCompletionUpdate update = new() + ChatResponseUpdate update = new() { - CompletionId = chunk.CreatedAt, - Role = chunk.Message?.Role is not null ? new ChatRole(chunk.Message.Role) : null, CreatedAt = DateTimeOffset.TryParse(chunk.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null, FinishReason = ToFinishReason(chunk), ModelId = modelId, + ResponseId = chunk.CreatedAt, + Role = chunk.Message?.Role is not null ? new ChatRole(chunk.Message.Role) : null, }; if (chunk.Message is { } message) diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md b/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md index e468965b9a8..bb0c360d877 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md +++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md @@ -27,7 +27,7 @@ using Microsoft.Extensions.AI; IChatClient client = new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.1"); -Console.WriteLine(await client.CompleteAsync("What is AI?")); +Console.WriteLine(await client.GetResponseAsync("What is AI?")); ``` ### Chat + Conversation History @@ -37,7 +37,7 @@ using Microsoft.Extensions.AI; IChatClient client = new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.1"); -Console.WriteLine(await client.CompleteAsync( +Console.WriteLine(await client.GetResponseAsync( [ new ChatMessage(ChatRole.System, "You are a helpful AI assistant"), new ChatMessage(ChatRole.User, "What is AI?"), @@ -51,7 +51,7 @@ using Microsoft.Extensions.AI; IChatClient client = new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.1"); -await foreach (var update in client.CompleteStreamingAsync("What is AI?")) +await foreach (var update in client.GetStreamingResponseAsync("What is AI?")) { Console.Write(update); } @@ -79,7 +79,7 @@ ChatOptions chatOptions = new() Tools = [AIFunctionFactory.Create(GetWeather)] }; -Console.WriteLine(await client.CompleteAsync("Do I need an umbrella?", chatOptions)); +Console.WriteLine(await client.GetResponseAsync("Do I need an umbrella?", chatOptions)); [Description("Gets the weather")] static string GetWeather() => Random.Shared.NextDouble() > 0.5 ? "It's sunny" : "It's raining"; @@ -103,7 +103,7 @@ IChatClient client = new ChatClientBuilder(ollamaClient) for (int i = 0; i < 3; i++) { - await foreach (var message in client.CompleteStreamingAsync("In less than 100 words, what is AI?")) + await foreach (var message in client.GetStreamingResponseAsync("In less than 100 words, what is AI?")) { Console.Write(message); } @@ -132,7 +132,7 @@ IChatClient client = new ChatClientBuilder(ollamaClient) .UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true) .Build(); -Console.WriteLine(await client.CompleteAsync("What is AI?")); +Console.WriteLine(await client.GetResponseAsync("What is AI?")); ``` ### Telemetry, Caching, and Tool Calling @@ -171,7 +171,7 @@ IChatClient client = new ChatClientBuilder(ollamaClient) for (int i = 0; i < 3; i++) { - Console.WriteLine(await client.CompleteAsync("How much older is Alice than Bob?", chatOptions)); + Console.WriteLine(await client.GetResponseAsync("How much older is Alice than Bob?", chatOptions)); } [Description("Gets the age of a person specified by name.")] @@ -243,7 +243,7 @@ var app = builder.Build(); // Elsewhere in the app var chatClient = app.Services.GetRequiredService(); -Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); +Console.WriteLine(await chatClient.GetResponseAsync("What is AI?")); ``` ### Minimal Web API @@ -262,7 +262,7 @@ var app = builder.Build(); app.MapPost("/chat", async (IChatClient client, string message) => { - var response = await client.CompleteAsync(message, cancellationToken: default); + var response = await client.GetResponseAsync(message, cancellationToken: default); return response.Message; }); diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs index b820fde3134..110ea0bf7fe 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs @@ -24,7 +24,7 @@ namespace Microsoft.Extensions.AI; -/// Represents an for an OpenAI or . +/// Represents an for an OpenAI or . internal sealed class OpenAIAssistantClient : IChatClient { /// Metadata for the client. @@ -69,12 +69,12 @@ public OpenAIAssistantClient(AssistantClient assistantClient, string assistantId } /// - public Task CompleteAsync( + public Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) => - CompleteStreamingAsync(chatMessages, options, cancellationToken).ToChatCompletionAsync(coalesceContent: true, cancellationToken); + GetStreamingResponseAsync(chatMessages, options, cancellationToken).ToChatResponseAsync(coalesceContent: true, cancellationToken); /// - public async IAsyncEnumerable CompleteStreamingAsync( + public async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { // Extract necessary state from chatMessages and options. @@ -133,14 +133,14 @@ public async IAsyncEnumerable CompleteStreamingAs case RunUpdate ru: threadId ??= ru.Value.ThreadId; - StreamingChatCompletionUpdate ruUpdate = new() + ChatResponseUpdate ruUpdate = new() { AuthorName = ru.Value.AssistantId, ChatThreadId = threadId, - CompletionId = ru.Value.Id, CreatedAt = ru.Value.CreatedAt, ModelId = ru.Value.Model, RawRepresentation = ru, + ResponseId = ru.Value.Id, Role = ChatRole.Assistant, }; diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs index acd73142dcf..f9e254aea0b 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs @@ -18,7 +18,7 @@ namespace Microsoft.Extensions.AI; -/// Represents an for an OpenAI or . +/// Represents an for an OpenAI or . public sealed class OpenAIChatClient : IChatClient { /// Gets the default OpenAI endpoint. @@ -99,7 +99,7 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions } /// - public async Task CompleteAsync( + public async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -110,11 +110,11 @@ public async Task CompleteAsync( // Make the call to OpenAI. var response = await _chatClient.CompleteChatAsync(openAIChatMessages, openAIOptions, cancellationToken).ConfigureAwait(false); - return OpenAIModelMappers.FromOpenAIChatCompletion(response.Value, options, openAIOptions); + return OpenAIModelMappers.FromOpenAIChatResponse(response.Value, options, openAIOptions); } /// - public IAsyncEnumerable CompleteStreamingAsync( + public IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -125,7 +125,7 @@ public IAsyncEnumerable CompleteStreamingAsync( // Make the call to OpenAI. var chatCompletionUpdates = _chatClient.CompleteChatStreamingAsync(openAIChatMessages, openAIOptions, cancellationToken); - return OpenAIModelMappers.FromOpenAIStreamingChatCompletionAsync(chatCompletionUpdates, cancellationToken); + return OpenAIModelMappers.FromOpenAIStreamingChatResponseAsync(chatCompletionUpdates, cancellationToken); } /// diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs index 5b7d7fe5a34..6a28c9f5490 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs @@ -19,17 +19,17 @@ namespace Microsoft.Extensions.AI; public sealed class OpenAIChatCompletionRequest { /// - /// Gets the chat messages specified in the completion request. + /// Gets the chat messages specified in the request. /// public required IList Messages { get; init; } /// - /// Gets the chat options governing the completion request. + /// Gets the chat options governing the request. /// public required ChatOptions Options { get; init; } /// - /// Gets a value indicating whether the completion response should be streamed. + /// Gets a value indicating whether the response should be streamed. /// public bool Stream { get; init; } diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs index e7eba21e4a8..53b18e0abb7 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs @@ -28,17 +28,17 @@ internal static partial class OpenAIModelMappers { internal static JsonElement DefaultParameterSchema { get; } = JsonDocument.Parse("{}").RootElement; - public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion chatCompletion, JsonSerializerOptions options) + public static ChatCompletion ToOpenAIChatResponse(ChatResponse response, JsonSerializerOptions options) { - _ = Throw.IfNull(chatCompletion); + _ = Throw.IfNull(response); - if (chatCompletion.Choices.Count > 1) + if (response.Choices.Count > 1) { throw new NotSupportedException("Creating OpenAI ChatCompletion models with multiple choices is currently not supported."); } List? toolCalls = null; - foreach (AIContent content in chatCompletion.Message.Contents) + foreach (AIContent content in response.Message.Contents) { if (content is FunctionCallContent callRequest) { @@ -53,27 +53,27 @@ public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion c } ChatTokenUsage? chatTokenUsage = null; - if (chatCompletion.Usage is UsageDetails usageDetails) + if (response.Usage is UsageDetails usageDetails) { chatTokenUsage = ToOpenAIUsage(usageDetails); } return OpenAIChatModelFactory.ChatCompletion( - id: chatCompletion.CompletionId ?? CreateCompletionId(), - model: chatCompletion.ModelId, - createdAt: chatCompletion.CreatedAt ?? DateTimeOffset.UtcNow, - role: ToOpenAIChatRole(chatCompletion.Message.Role).Value, - finishReason: ToOpenAIFinishReason(chatCompletion.FinishReason), - content: new(ToOpenAIChatContent(chatCompletion.Message.Contents)), + id: response.ResponseId ?? CreateCompletionId(), + model: response.ModelId, + createdAt: response.CreatedAt ?? DateTimeOffset.UtcNow, + role: ToOpenAIChatRole(response.Message.Role).Value, + finishReason: ToOpenAIFinishReason(response.FinishReason), + content: new(ToOpenAIChatContent(response.Message.Contents)), toolCalls: toolCalls, - refusal: chatCompletion.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.ChatCompletion.Refusal)), - contentTokenLogProbabilities: chatCompletion.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.ChatCompletion.ContentTokenLogProbabilities)), - refusalTokenLogProbabilities: chatCompletion.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.ChatCompletion.RefusalTokenLogProbabilities)), - systemFingerprint: chatCompletion.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)), + refusal: response.AdditionalProperties.GetValueOrDefault(nameof(ChatCompletion.Refusal)), + contentTokenLogProbabilities: response.AdditionalProperties.GetValueOrDefault>(nameof(ChatCompletion.ContentTokenLogProbabilities)), + refusalTokenLogProbabilities: response.AdditionalProperties.GetValueOrDefault>(nameof(ChatCompletion.RefusalTokenLogProbabilities)), + systemFingerprint: response.AdditionalProperties.GetValueOrDefault(nameof(ChatCompletion.SystemFingerprint)), usage: chatTokenUsage); } - public static ChatCompletion FromOpenAIChatCompletion(OpenAI.Chat.ChatCompletion openAICompletion, ChatOptions? options, ChatCompletionOptions chatCompletionOptions) + public static ChatResponse FromOpenAIChatResponse(ChatCompletion openAICompletion, ChatOptions? options, ChatCompletionOptions chatCompletionOptions) { _ = Throw.IfNull(openAICompletion); @@ -139,42 +139,42 @@ public static ChatCompletion FromOpenAIChatCompletion(OpenAI.Chat.ChatCompletion } } - // Wrap the content in a ChatCompletion to return. - var completion = new ChatCompletion([returnMessage]) + // Wrap the content in a ChatResponse to return. + var response = new ChatResponse([returnMessage]) { - CompletionId = openAICompletion.Id, CreatedAt = openAICompletion.CreatedAt, FinishReason = FromOpenAIFinishReason(openAICompletion.FinishReason), ModelId = openAICompletion.Model, RawRepresentation = openAICompletion, + ResponseId = openAICompletion.Id, }; if (openAICompletion.Usage is ChatTokenUsage tokenUsage) { - completion.Usage = FromOpenAIUsage(tokenUsage); + response.Usage = FromOpenAIUsage(tokenUsage); } if (openAICompletion.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs) { - (completion.AdditionalProperties ??= [])[nameof(openAICompletion.ContentTokenLogProbabilities)] = contentTokenLogProbs; + (response.AdditionalProperties ??= [])[nameof(openAICompletion.ContentTokenLogProbabilities)] = contentTokenLogProbs; } if (openAICompletion.Refusal is string refusal) { - (completion.AdditionalProperties ??= [])[nameof(openAICompletion.Refusal)] = refusal; + (response.AdditionalProperties ??= [])[nameof(openAICompletion.Refusal)] = refusal; } if (openAICompletion.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs) { - (completion.AdditionalProperties ??= [])[nameof(openAICompletion.RefusalTokenLogProbabilities)] = refusalTokenLogProbs; + (response.AdditionalProperties ??= [])[nameof(openAICompletion.RefusalTokenLogProbabilities)] = refusalTokenLogProbs; } if (openAICompletion.SystemFingerprint is string systemFingerprint) { - (completion.AdditionalProperties ??= [])[nameof(openAICompletion.SystemFingerprint)] = systemFingerprint; + (response.AdditionalProperties ??= [])[nameof(openAICompletion.SystemFingerprint)] = systemFingerprint; } - return completion; + return response; } public static ChatOptions FromOpenAIOptions(ChatCompletionOptions? options) diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs index 399ca5484f0..c051c208f1e 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs @@ -23,8 +23,8 @@ public static OpenAIChatCompletionRequest FromOpenAIChatCompletionRequest(OpenAI return new() { Messages = messages, - Options = chatOptions, ModelId = chatOptions.ModelId, + Options = chatOptions, Stream = _getStreamAccessor(chatCompletionOptions) ?? false, }; } diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs index 5d743a1baea..f77424ee528 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs @@ -18,17 +18,17 @@ namespace Microsoft.Extensions.AI; internal static partial class OpenAIModelMappers { - public static async IAsyncEnumerable ToOpenAIStreamingChatCompletionAsync( - IAsyncEnumerable chatCompletions, + public static async IAsyncEnumerable ToOpenAIStreamingChatResponseAsync( + IAsyncEnumerable updates, JsonSerializerOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - await foreach (var chatCompletionUpdate in chatCompletions.WithCancellation(cancellationToken).ConfigureAwait(false)) + await foreach (var update in updates.WithCancellation(cancellationToken).ConfigureAwait(false)) { List? toolCallUpdates = null; ChatTokenUsage? chatTokenUsage = null; - foreach (var content in chatCompletionUpdate.Contents) + foreach (var content in update.Contents) { if (content is FunctionCallContent functionCallContent) { @@ -46,92 +46,92 @@ internal static partial class OpenAIModelMappers } yield return OpenAIChatModelFactory.StreamingChatCompletionUpdate( - completionId: chatCompletionUpdate.CompletionId ?? CreateCompletionId(), - model: chatCompletionUpdate.ModelId, - createdAt: chatCompletionUpdate.CreatedAt ?? DateTimeOffset.UtcNow, - role: ToOpenAIChatRole(chatCompletionUpdate.Role), - finishReason: chatCompletionUpdate.FinishReason is null ? null : ToOpenAIFinishReason(chatCompletionUpdate.FinishReason), - contentUpdate: [.. ToOpenAIChatContent(chatCompletionUpdate.Contents)], + completionId: update.ResponseId ?? CreateCompletionId(), + model: update.ModelId, + createdAt: update.CreatedAt ?? DateTimeOffset.UtcNow, + role: ToOpenAIChatRole(update.Role), + finishReason: update.FinishReason is null ? null : ToOpenAIFinishReason(update.FinishReason), + contentUpdate: [.. ToOpenAIChatContent(update.Contents)], toolCallUpdates: toolCallUpdates, - refusalUpdate: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.RefusalUpdate)), - contentTokenLogProbabilities: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.ContentTokenLogProbabilities)), - refusalTokenLogProbabilities: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.RefusalTokenLogProbabilities)), - systemFingerprint: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.SystemFingerprint)), + refusalUpdate: update.AdditionalProperties.GetValueOrDefault(nameof(StreamingChatCompletionUpdate.RefusalUpdate)), + contentTokenLogProbabilities: update.AdditionalProperties.GetValueOrDefault>(nameof(StreamingChatCompletionUpdate.ContentTokenLogProbabilities)), + refusalTokenLogProbabilities: update.AdditionalProperties.GetValueOrDefault>(nameof(StreamingChatCompletionUpdate.RefusalTokenLogProbabilities)), + systemFingerprint: update.AdditionalProperties.GetValueOrDefault(nameof(StreamingChatCompletionUpdate.SystemFingerprint)), usage: chatTokenUsage); } } - public static async IAsyncEnumerable FromOpenAIStreamingChatCompletionAsync( - IAsyncEnumerable chatCompletionUpdates, + public static async IAsyncEnumerable FromOpenAIStreamingChatResponseAsync( + IAsyncEnumerable updates, [EnumeratorCancellation] CancellationToken cancellationToken = default) { Dictionary? functionCallInfos = null; ChatRole? streamedRole = null; ChatFinishReason? finishReason = null; StringBuilder? refusal = null; - string? completionId = null; + string? responseId = null; DateTimeOffset? createdAt = null; string? modelId = null; string? fingerprint = null; // Process each update as it arrives - await foreach (OpenAI.Chat.StreamingChatCompletionUpdate chatCompletionUpdate in chatCompletionUpdates.WithCancellation(cancellationToken).ConfigureAwait(false)) + await foreach (StreamingChatCompletionUpdate update in updates.WithCancellation(cancellationToken).ConfigureAwait(false)) { // The role and finish reason may arrive during any update, but once they've arrived, the same value should be the same for all subsequent updates. - streamedRole ??= chatCompletionUpdate.Role is ChatMessageRole role ? FromOpenAIChatRole(role) : null; - finishReason ??= chatCompletionUpdate.FinishReason is OpenAI.Chat.ChatFinishReason reason ? FromOpenAIFinishReason(reason) : null; - completionId ??= chatCompletionUpdate.CompletionId; - createdAt ??= chatCompletionUpdate.CreatedAt; - modelId ??= chatCompletionUpdate.Model; - fingerprint ??= chatCompletionUpdate.SystemFingerprint; + streamedRole ??= update.Role is ChatMessageRole role ? FromOpenAIChatRole(role) : null; + finishReason ??= update.FinishReason is OpenAI.Chat.ChatFinishReason reason ? FromOpenAIFinishReason(reason) : null; + responseId ??= update.CompletionId; + createdAt ??= update.CreatedAt; + modelId ??= update.Model; + fingerprint ??= update.SystemFingerprint; // Create the response content object. - StreamingChatCompletionUpdate completionUpdate = new() + ChatResponseUpdate responseUpdate = new() { - CompletionId = chatCompletionUpdate.CompletionId, - CreatedAt = chatCompletionUpdate.CreatedAt, + ResponseId = update.CompletionId, + CreatedAt = update.CreatedAt, FinishReason = finishReason, ModelId = modelId, - RawRepresentation = chatCompletionUpdate, + RawRepresentation = update, Role = streamedRole, }; // Populate it with any additional metadata from the OpenAI object. - if (chatCompletionUpdate.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs) + if (update.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs) { - (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.ContentTokenLogProbabilities)] = contentTokenLogProbs; + (responseUpdate.AdditionalProperties ??= [])[nameof(update.ContentTokenLogProbabilities)] = contentTokenLogProbs; } - if (chatCompletionUpdate.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs) + if (update.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs) { - (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.RefusalTokenLogProbabilities)] = refusalTokenLogProbs; + (responseUpdate.AdditionalProperties ??= [])[nameof(update.RefusalTokenLogProbabilities)] = refusalTokenLogProbs; } if (fingerprint is not null) { - (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.SystemFingerprint)] = fingerprint; + (responseUpdate.AdditionalProperties ??= [])[nameof(update.SystemFingerprint)] = fingerprint; } // Transfer over content update items. - if (chatCompletionUpdate.ContentUpdate is { Count: > 0 }) + if (update.ContentUpdate is { Count: > 0 }) { - foreach (ChatMessageContentPart contentPart in chatCompletionUpdate.ContentUpdate) + foreach (ChatMessageContentPart contentPart in update.ContentUpdate) { if (ToAIContent(contentPart) is AIContent aiContent) { - completionUpdate.Contents.Add(aiContent); + responseUpdate.Contents.Add(aiContent); } } } // Transfer over refusal updates. - if (chatCompletionUpdate.RefusalUpdate is not null) + if (update.RefusalUpdate is not null) { - _ = (refusal ??= new()).Append(chatCompletionUpdate.RefusalUpdate); + _ = (refusal ??= new()).Append(update.RefusalUpdate); } // Transfer over tool call updates. - if (chatCompletionUpdate.ToolCallUpdates is { Count: > 0 } toolCallUpdates) + if (update.ToolCallUpdates is { Count: > 0 } toolCallUpdates) { foreach (StreamingChatToolCallUpdate toolCallUpdate in toolCallUpdates) { @@ -143,30 +143,30 @@ public static async IAsyncEnumerable FromOpenAISt existing.CallId ??= toolCallUpdate.ToolCallId; existing.Name ??= toolCallUpdate.FunctionName; - if (toolCallUpdate.FunctionArgumentsUpdate is { } update && !update.ToMemory().IsEmpty) + if (toolCallUpdate.FunctionArgumentsUpdate is { } argUpdate && !argUpdate.ToMemory().IsEmpty) { - _ = (existing.Arguments ??= new()).Append(update.ToString()); + _ = (existing.Arguments ??= new()).Append(argUpdate.ToString()); } } } // Transfer over usage updates. - if (chatCompletionUpdate.Usage is ChatTokenUsage tokenUsage) + if (update.Usage is ChatTokenUsage tokenUsage) { var usageDetails = FromOpenAIUsage(tokenUsage); - completionUpdate.Contents.Add(new UsageContent(usageDetails)); + responseUpdate.Contents.Add(new UsageContent(usageDetails)); } // Now yield the item. - yield return completionUpdate; + yield return responseUpdate; } // Now that we've received all updates, combine any for function calls into a single item to yield. if (functionCallInfos is not null) { - StreamingChatCompletionUpdate completionUpdate = new() + ChatResponseUpdate responseUpdate = new() { - CompletionId = completionId, + ResponseId = responseId, CreatedAt = createdAt, FinishReason = finishReason, ModelId = modelId, @@ -182,7 +182,7 @@ public static async IAsyncEnumerable FromOpenAISt fci.Arguments?.ToString() ?? string.Empty, fci.CallId!, fci.Name!); - completionUpdate.Contents.Add(callContent); + responseUpdate.Contents.Add(callContent); } } @@ -190,16 +190,16 @@ public static async IAsyncEnumerable FromOpenAISt // add it to this function calling item. if (refusal is not null) { - (completionUpdate.AdditionalProperties ??= [])[nameof(ChatMessageContentPart.Refusal)] = refusal.ToString(); + (responseUpdate.AdditionalProperties ??= [])[nameof(ChatMessageContentPart.Refusal)] = refusal.ToString(); } // Propagate additional relevant metadata. if (fingerprint is not null) { - (completionUpdate.AdditionalProperties ??= [])[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)] = fingerprint; + (responseUpdate.AdditionalProperties ??= [])[nameof(ChatCompletion.SystemFingerprint)] = fingerprint; } - yield return completionUpdate; + yield return responseUpdate; } } } diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs index db12baf962d..695e4d569d6 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs @@ -52,7 +52,7 @@ public static ConversationFunctionTool ToConversationFunctionTool(this AIFunctio /// An optional flag specifying whether to disclose detailed exception information to the model. The default value is . /// An optional that controls JSON handling. /// An optional . - /// A that represents the completion of processing, including invoking any asynchronous tools. + /// A that represents the response of processing, including invoking any asynchronous tools. public static async Task HandleToolCallsAsync( this RealtimeConversationSession session, ConversationUpdate update, diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs index 899a69630b8..9c6b91c5f28 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs @@ -36,50 +36,50 @@ public static async Task DeserializeChatCompletionR } /// - /// Serializes a Microsoft.Extensions.AI completion using the OpenAI wire format. + /// Serializes a Microsoft.Extensions.AI response using the OpenAI wire format. /// /// The stream to write the value. - /// The chat completion to serialize. + /// The chat response to serialize. /// The governing function call content serialization. /// A token used to cancel the serialization operation. /// A task tracking the serialization operation. public static async Task SerializeAsync( Stream stream, - ChatCompletion chatCompletion, + ChatResponse response, JsonSerializerOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(stream); - _ = Throw.IfNull(chatCompletion); + _ = Throw.IfNull(response); options ??= AIJsonUtilities.DefaultOptions; - OpenAI.Chat.ChatCompletion openAiChatCompletion = OpenAIModelMappers.ToOpenAIChatCompletion(chatCompletion, options); - BinaryData binaryData = JsonModelHelpers.Serialize(openAiChatCompletion); + ChatCompletion openAiChatResponse = OpenAIModelMappers.ToOpenAIChatResponse(response, options); + BinaryData binaryData = JsonModelHelpers.Serialize(openAiChatResponse); await stream.WriteAsync(binaryData.ToMemory(), cancellationToken).ConfigureAwait(false); } /// - /// Serializes a Microsoft.Extensions.AI streaming completion using the OpenAI wire format. + /// Serializes a Microsoft.Extensions.AI streaming response using the OpenAI wire format. /// /// The stream to write the value. - /// The streaming chat completions to serialize. + /// The chat response updates to serialize. /// The governing function call content serialization. /// A token used to cancel the serialization operation. /// A task tracking the serialization operation. public static Task SerializeStreamingAsync( Stream stream, - IAsyncEnumerable streamingChatCompletionUpdates, + IAsyncEnumerable updates, JsonSerializerOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(stream); - _ = Throw.IfNull(streamingChatCompletionUpdates); + _ = Throw.IfNull(updates); options ??= AIJsonUtilities.DefaultOptions; - var mappedUpdates = OpenAIModelMappers.ToOpenAIStreamingChatCompletionAsync(streamingChatCompletionUpdates, options, cancellationToken); + var mappedUpdates = OpenAIModelMappers.ToOpenAIStreamingChatResponseAsync(updates, options, cancellationToken); return SseFormatter.WriteAsync(ToSseEventsAsync(mappedUpdates), stream, FormatAsSseEvent, cancellationToken); - static async IAsyncEnumerable> ToSseEventsAsync(IAsyncEnumerable updates) + static async IAsyncEnumerable> ToSseEventsAsync(IAsyncEnumerable updates) { await foreach (var update in updates.ConfigureAwait(false)) { diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md b/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md index dacafd33a7f..0c5b32472e0 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md @@ -30,7 +30,7 @@ IChatClient client = new OpenAIClient(Environment.GetEnvironmentVariable("OPENAI_API_KEY")) .AsChatClient("gpt-4o-mini"); -Console.WriteLine(await client.CompleteAsync("What is AI?")); +Console.WriteLine(await client.GetResponseAsync("What is AI?")); ``` ### Chat + Conversation History @@ -43,7 +43,7 @@ IChatClient client = new OpenAIClient(Environment.GetEnvironmentVariable("OPENAI_API_KEY")) .AsChatClient("gpt-4o-mini"); -Console.WriteLine(await client.CompleteAsync( +Console.WriteLine(await client.GetResponseAsync( [ new ChatMessage(ChatRole.System, "You are a helpful AI assistant"), new ChatMessage(ChatRole.User, "What is AI?"), @@ -60,7 +60,7 @@ IChatClient client = new OpenAIClient(Environment.GetEnvironmentVariable("OPENAI_API_KEY")) .AsChatClient("gpt-4o-mini"); -await foreach (var update in client.CompleteStreamingAsync("What is AI?")) +await foreach (var update in client.GetStreamingResponseAsync("What is AI?")) { Console.Write(update); } @@ -86,7 +86,7 @@ ChatOptions chatOptions = new() Tools = [AIFunctionFactory.Create(GetWeather)] }; -await foreach (var message in client.CompleteStreamingAsync("Do I need an umbrella?", chatOptions)) +await foreach (var message in client.GetStreamingResponseAsync("Do I need an umbrella?", chatOptions)) { Console.Write(message); } @@ -116,7 +116,7 @@ IChatClient client = new ChatClientBuilder(openaiClient) for (int i = 0; i < 3; i++) { - await foreach (var message in client.CompleteStreamingAsync("In less than 100 words, what is AI?")) + await foreach (var message in client.GetStreamingResponseAsync("In less than 100 words, what is AI?")) { Console.Write(message); } @@ -148,7 +148,7 @@ IChatClient client = new ChatClientBuilder(openaiClient) .UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true) .Build(); -Console.WriteLine(await client.CompleteAsync("What is AI?")); +Console.WriteLine(await client.GetResponseAsync("What is AI?")); ``` ### Telemetry, Caching, and Tool Calling @@ -190,7 +190,7 @@ IChatClient client = new ChatClientBuilder(openaiClient) for (int i = 0; i < 3; i++) { - Console.WriteLine(await client.CompleteAsync("How much older is Alice than Bob?", chatOptions)); + Console.WriteLine(await client.GetResponseAsync("How much older is Alice than Bob?", chatOptions)); } [Description("Gets the age of a person specified by name.")] @@ -268,7 +268,7 @@ var app = builder.Build(); // Elsewhere in the app var chatClient = app.Services.GetRequiredService(); -Console.WriteLine(await chatClient.CompleteAsync("What is AI?")); +Console.WriteLine(await chatClient.GetResponseAsync("What is AI?")); ``` ### Minimal Web API @@ -291,7 +291,7 @@ var app = builder.Build(); app.MapPost("/chat", async (IChatClient client, string message) => { - var response = await client.CompleteAsync(message); + var response = await client.GetResponseAsync(message); return response.Message; }); diff --git a/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs b/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs index 3b5f5531755..e0d6a5a3ac1 100644 --- a/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs +++ b/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs @@ -42,7 +42,7 @@ public static string GetCacheKey(ReadOnlySpan values, JsonSerializerOpt // We need to ensure that the value in ThreadStaticInstance is always ready to use. // If we start using an instance, write to it, and then fail, we will have left it // in an inconsistent state. So, when renting it, we null it out, and we only put - // it back upon successful completion after resetting it. + // it back upon successful response after resetting it. IncrementalHashStream.ThreadStaticInstance = null; } else diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs index ebd5477f177..79682381f70 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs @@ -16,18 +16,18 @@ namespace Microsoft.Extensions.AI; /// Represents a delegating chat client that wraps an inner client with implementations provided by delegates. public sealed class AnonymousDelegatingChatClient : DelegatingChatClient { - /// The delegate to use as the implementation of . - private readonly Func, ChatOptions?, IChatClient, CancellationToken, Task>? _completeFunc; + /// The delegate to use as the implementation of . + private readonly Func, ChatOptions?, IChatClient, CancellationToken, Task>? _getResponseFunc; - /// The delegate to use as the implementation of . + /// The delegate to use as the implementation of . /// - /// When non-, this delegate is used as the implementation of and + /// When non-, this delegate is used as the implementation of and /// will be invoked with the same arguments as the method itself, along with a reference to the inner client. - /// When , will delegate directly to the inner client. + /// When , will delegate directly to the inner client. /// - private readonly Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? _completeStreamingFunc; + private readonly Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? _getStreamingResponseFunc; - /// The delegate to use as the implementation of both and . + /// The delegate to use as the implementation of both and . private readonly CompleteSharedFunc? _sharedFunc; /// @@ -35,7 +35,7 @@ public sealed class AnonymousDelegatingChatClient : DelegatingChatClient /// /// The inner client. /// - /// A delegate that provides the implementation for both and . + /// A delegate that provides the implementation for both and . /// In addition to the arguments for the operation, it's provided with a delegate to the inner client that should be /// used to perform the operation on the inner client. It will handle both the non-streaming and streaming cases. /// @@ -57,77 +57,77 @@ public AnonymousDelegatingChatClient(IChatClient innerClient, CompleteSharedFunc /// Initializes a new instance of the class. /// /// The inner client. - /// - /// A delegate that provides the implementation for . When , - /// must be non-null, and the implementation of - /// will use for the implementation. + /// + /// A delegate that provides the implementation for . When , + /// must be non-null, and the implementation of + /// will use for the implementation. /// - /// - /// A delegate that provides the implementation for . When , - /// must be non-null, and the implementation of - /// will use for the implementation. + /// + /// A delegate that provides the implementation for . When , + /// must be non-null, and the implementation of + /// will use for the implementation. /// /// is . - /// Both and are . + /// Both and are . public AnonymousDelegatingChatClient( IChatClient innerClient, - Func, ChatOptions?, IChatClient, CancellationToken, Task>? completeFunc, - Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? completeStreamingFunc) + Func, ChatOptions?, IChatClient, CancellationToken, Task>? getResponseFunc, + Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? getStreamingResponseFunc) : base(innerClient) { - ThrowIfBothDelegatesNull(completeFunc, completeStreamingFunc); + ThrowIfBothDelegatesNull(getResponseFunc, getStreamingResponseFunc); - _completeFunc = completeFunc; - _completeStreamingFunc = completeStreamingFunc; + _getResponseFunc = getResponseFunc; + _getStreamingResponseFunc = getStreamingResponseFunc; } /// - public override Task CompleteAsync( + public override Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); if (_sharedFunc is not null) { - return CompleteViaSharedAsync(chatMessages, options, cancellationToken); + return GetResponseViaSharedAsync(chatMessages, options, cancellationToken); - async Task CompleteViaSharedAsync(IList chatMessages, ChatOptions? options, CancellationToken cancellationToken) + async Task GetResponseViaSharedAsync(IList chatMessages, ChatOptions? options, CancellationToken cancellationToken) { - ChatCompletion? completion = null; + ChatResponse? response = null; await _sharedFunc(chatMessages, options, async (chatMessages, options, cancellationToken) => { - completion = await InnerClient.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + response = await InnerClient.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); }, cancellationToken).ConfigureAwait(false); - if (completion is null) + if (response is null) { - throw new InvalidOperationException("The wrapper completed successfully without producing a ChatCompletion."); + throw new InvalidOperationException("The wrapper completed successfully without producing a ChatResponse."); } - return completion; + return response; } } - else if (_completeFunc is not null) + else if (_getResponseFunc is not null) { - return _completeFunc(chatMessages, options, InnerClient, cancellationToken); + return _getResponseFunc(chatMessages, options, InnerClient, cancellationToken); } else { - Debug.Assert(_completeStreamingFunc is not null, "Expected non-null streaming delegate."); - return _completeStreamingFunc!(chatMessages, options, InnerClient, cancellationToken) - .ToChatCompletionAsync(coalesceContent: true, cancellationToken); + Debug.Assert(_getStreamingResponseFunc is not null, "Expected non-null streaming delegate."); + return _getStreamingResponseFunc!(chatMessages, options, InnerClient, cancellationToken) + .ToChatResponseAsync(coalesceContent: true, cancellationToken); } } /// - public override IAsyncEnumerable CompleteStreamingAsync( + public override IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); if (_sharedFunc is not null) { - var updates = Channel.CreateBounded(1); + var updates = Channel.CreateBounded(1); #pragma warning disable CA2016 // explicitly not forwarding the cancellation token, as we need to ensure the channel is always completed _ = Task.Run(async () => @@ -138,7 +138,7 @@ public override IAsyncEnumerable CompleteStreamin { await _sharedFunc(chatMessages, options, async (chatMessages, options, cancellationToken) => { - await foreach (var update in InnerClient.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) + await foreach (var update in InnerClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) { await updates.Writer.WriteAsync(update, cancellationToken).ConfigureAwait(false); } @@ -157,19 +157,19 @@ await _sharedFunc(chatMessages, options, async (chatMessages, options, cancellat return updates.Reader.ReadAllAsync(cancellationToken); } - else if (_completeStreamingFunc is not null) + else if (_getStreamingResponseFunc is not null) { - return _completeStreamingFunc(chatMessages, options, InnerClient, cancellationToken); + return _getStreamingResponseFunc(chatMessages, options, InnerClient, cancellationToken); } else { - Debug.Assert(_completeFunc is not null, "Expected non-null non-streaming delegate."); - return CompleteStreamingAsyncViaCompleteAsync(_completeFunc!(chatMessages, options, InnerClient, cancellationToken)); + Debug.Assert(_getResponseFunc is not null, "Expected non-null non-streaming delegate."); + return CompleteStreamingAsyncViaCompleteAsync(_getResponseFunc!(chatMessages, options, InnerClient, cancellationToken)); - static async IAsyncEnumerable CompleteStreamingAsyncViaCompleteAsync(Task task) + static async IAsyncEnumerable CompleteStreamingAsyncViaCompleteAsync(Task task) { - ChatCompletion completion = await task.ConfigureAwait(false); - foreach (var update in completion.ToStreamingChatCompletionUpdates()) + ChatResponse response = await task.ConfigureAwait(false); + foreach (var update in response.ToChatResponseUpdates()) { yield return update; } @@ -193,18 +193,18 @@ internal static void ThrowIfBothDelegatesNull(object? completeFunc, object? comp // signature with the nextAsync delegate parameter. /// - /// Represents a method used to call or . + /// Represents a method used to call or . /// /// The chat content to send. /// The chat options to configure the request. /// - /// A delegate that provides the implementation for the inner client's or - /// . It should be invoked to continue the pipeline. It accepts + /// A delegate that provides the implementation for the inner client's or + /// . It should be invoked to continue the pipeline. It accepts /// the chat messages, options, and cancellation token, which are typically the same instances as provided to this method /// but need not be. /// /// The to monitor for cancellation requests. The default is . - /// A that represents the completion of the operation. + /// A that represents the response of the operation. public delegate Task CompleteSharedFunc( IList chatMessages, ChatOptions? options, diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs index 698025e8901..5229d7d6031 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs @@ -45,7 +45,7 @@ protected CachingChatClient(IChatClient innerClient) public bool CoalesceStreamingUpdates { get; set; } = true; /// - public override async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public override async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -56,7 +56,7 @@ public override async Task CompleteAsync(IList chat if (await ReadCacheAsync(cacheKey, cancellationToken).ConfigureAwait(false) is not { } result) { - result = await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + result = await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); await WriteCacheAsync(cacheKey, result, cancellationToken).ConfigureAwait(false); } @@ -64,7 +64,7 @@ public override async Task CompleteAsync(IList chat } /// - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -76,10 +76,10 @@ public override async IAsyncEnumerable CompleteSt // result and cache it. When we get a cache hit, we yield the non-streaming result as a streaming one. var cacheKey = GetCacheKey(_boxedTrue, chatMessages, options); - if (await ReadCacheAsync(cacheKey, cancellationToken).ConfigureAwait(false) is { } chatCompletion) + if (await ReadCacheAsync(cacheKey, cancellationToken).ConfigureAwait(false) is { } chatResponse) { // Yield all of the cached items. - foreach (var chunk in chatCompletion.ToStreamingChatCompletionUpdates()) + foreach (var chunk in chatResponse.ToChatResponseUpdates()) { yield return chunk; } @@ -87,15 +87,15 @@ public override async IAsyncEnumerable CompleteSt else { // Yield and store all of the items. - List capturedItems = []; - await foreach (var chunk in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) + List capturedItems = []; + await foreach (var chunk in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) { capturedItems.Add(chunk); yield return chunk; } // Write the captured items to the cache as a non-streaming result. - await WriteCacheAsync(cacheKey, capturedItems.ToChatCompletion(), cancellationToken).ConfigureAwait(false); + await WriteCacheAsync(cacheKey, capturedItems.ToChatResponse(), cancellationToken).ConfigureAwait(false); } } else @@ -112,8 +112,8 @@ public override async IAsyncEnumerable CompleteSt else { // Yield and store all of the items. - List capturedItems = []; - await foreach (var chunk in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) + List capturedItems = []; + await foreach (var chunk in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) { capturedItems.Add(chunk); yield return chunk; @@ -131,40 +131,40 @@ public override async IAsyncEnumerable CompleteSt protected abstract string GetCacheKey(params ReadOnlySpan values); /// - /// Returns a previously cached , if available. - /// This is used when there is a call to . + /// Returns a previously cached , if available. + /// This is used when there is a call to . /// /// The cache key. /// The to monitor for cancellation requests. /// The previously cached data, if available, otherwise . - protected abstract Task ReadCacheAsync(string key, CancellationToken cancellationToken); + protected abstract Task ReadCacheAsync(string key, CancellationToken cancellationToken); /// - /// Returns a previously cached list of values, if available. - /// This is used when there is a call to . + /// Returns a previously cached list of values, if available. + /// This is used when there is a call to . /// /// The cache key. /// The to monitor for cancellation requests. /// The previously cached data, if available, otherwise . - protected abstract Task?> ReadCacheStreamingAsync(string key, CancellationToken cancellationToken); + protected abstract Task?> ReadCacheStreamingAsync(string key, CancellationToken cancellationToken); /// - /// Stores a in the underlying cache. - /// This is used when there is a call to . + /// Stores a in the underlying cache. + /// This is used when there is a call to . /// /// The cache key. - /// The to be stored. + /// The to be stored. /// The to monitor for cancellation requests. - /// A representing the completion of the operation. - protected abstract Task WriteCacheAsync(string key, ChatCompletion value, CancellationToken cancellationToken); + /// A representing the response of the operation. + protected abstract Task WriteCacheAsync(string key, ChatResponse value, CancellationToken cancellationToken); /// - /// Stores a list of values in the underlying cache. - /// This is used when there is a call to . + /// Stores a list of values in the underlying cache. + /// This is used when there is a call to . /// /// The cache key. - /// The to be stored. + /// The to be stored. /// The to monitor for cancellation requests. - /// A representing the completion of the operation. - protected abstract Task WriteCacheStreamingAsync(string key, IReadOnlyList value, CancellationToken cancellationToken); + /// A representing the response of the operation. + protected abstract Task WriteCacheStreamingAsync(string key, IReadOnlyList value, CancellationToken cancellationToken); } diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientBuilder.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientBuilder.cs index 83d7a749063..1833e255111 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientBuilder.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientBuilder.cs @@ -81,11 +81,11 @@ public ChatClientBuilder Use(Func cl /// /// Adds to the chat client pipeline an anonymous delegating chat client based on a delegate that provides - /// an implementation for both and . + /// an implementation for both and . /// /// - /// A delegate that provides the implementation for both and - /// . In addition to the arguments for the operation, it's + /// A delegate that provides the implementation for both and + /// . In addition to the arguments for the operation, it's /// provided with a delegate to the inner client that should be used to perform the operation on the inner client. /// It will handle both the non-streaming and streaming cases. /// @@ -104,33 +104,33 @@ public ChatClientBuilder Use(AnonymousDelegatingChatClient.CompleteSharedFunc sh /// /// Adds to the chat client pipeline an anonymous delegating chat client based on a delegate that provides - /// an implementation for both and . + /// an implementation for both and . /// /// - /// A delegate that provides the implementation for . When , - /// must be non-null, and the implementation of + /// A delegate that provides the implementation for . When , + /// must be non-null, and the implementation of /// will use for the implementation. /// /// - /// A delegate that provides the implementation for . When , - /// must be non-null, and the implementation of + /// A delegate that provides the implementation for . When , + /// must be non-null, and the implementation of /// will use for the implementation. /// /// The updated instance. /// /// One or both delegates may be provided. If both are provided, they will be used for their respective methods: - /// will provide the implementation of , and - /// will provide the implementation of . + /// will provide the implementation of , and + /// will provide the implementation of . /// If only one of the delegates is provided, it will be used for both methods. That means that if - /// is supplied without , the implementation of + /// is supplied without , the implementation of /// will employ limited streaming, as it will be operating on the batch output produced by . And if /// is supplied without , the implementation of - /// will be implemented by combining the updates from . + /// will be implemented by combining the updates from . /// /// Both and are . public ChatClientBuilder Use( - Func, ChatOptions?, IChatClient, CancellationToken, Task>? completeFunc, - Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? completeStreamingFunc) + Func, ChatOptions?, IChatClient, CancellationToken, Task>? completeFunc, + Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? completeStreamingFunc) { AnonymousDelegatingChatClient.ThrowIfBothDelegatesNull(completeFunc, completeStreamingFunc); diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs index 008ad280dc5..778150f1ac1 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs @@ -25,7 +25,7 @@ public static class ChatClientStructuredOutputExtensions IncludeTypeInEnumSchemas = true }; - /// Sends chat messages to the model, requesting a response matching the type . + /// Sends chat messages, requesting a response matching the type . /// The . /// The chat content to send. /// The chat options to configure the request. @@ -41,15 +41,15 @@ public static class ChatClientStructuredOutputExtensions /// by the client, including any messages for roundtrips to the model as part of the implementation of this request, will be included. /// /// The type of structured output to request. - public static Task> CompleteAsync( + public static Task> GetResponseAsync( this IChatClient chatClient, IList chatMessages, ChatOptions? options = null, bool? useNativeJsonSchema = null, CancellationToken cancellationToken = default) => - CompleteAsync(chatClient, chatMessages, AIJsonUtilities.DefaultOptions, options, useNativeJsonSchema, cancellationToken); + GetResponseAsync(chatClient, chatMessages, AIJsonUtilities.DefaultOptions, options, useNativeJsonSchema, cancellationToken); - /// Sends a user chat text message to the model, requesting a response matching the type . + /// Sends a user chat text message, requesting a response matching the type . /// The . /// The text content for the chat message to send. /// The chat options to configure the request. @@ -61,15 +61,15 @@ public static Task> CompleteAsync( /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. /// The type of structured output to request. - public static Task> CompleteAsync( + public static Task> GetResponseAsync( this IChatClient chatClient, string chatMessage, ChatOptions? options = null, bool? useNativeJsonSchema = null, CancellationToken cancellationToken = default) => - CompleteAsync(chatClient, new ChatMessage(ChatRole.User, chatMessage), options, useNativeJsonSchema, cancellationToken); + GetResponseAsync(chatClient, new ChatMessage(ChatRole.User, chatMessage), options, useNativeJsonSchema, cancellationToken); - /// Sends a chat message to the model, requesting a response matching the type . + /// Sends a chat message, requesting a response matching the type . /// The . /// The chat message to send. /// The chat options to configure the request. @@ -81,15 +81,15 @@ public static Task> CompleteAsync( /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. /// The type of structured output to request. - public static Task> CompleteAsync( + public static Task> GetResponseAsync( this IChatClient chatClient, ChatMessage chatMessage, ChatOptions? options = null, bool? useNativeJsonSchema = null, CancellationToken cancellationToken = default) => - CompleteAsync(chatClient, [chatMessage], options, useNativeJsonSchema, cancellationToken); + GetResponseAsync(chatClient, [chatMessage], options, useNativeJsonSchema, cancellationToken); - /// Sends a user chat text message to the model, requesting a response matching the type . + /// Sends a user chat text message, requesting a response matching the type . /// The . /// The text content for the chat message to send. /// The JSON serialization options to use. @@ -102,16 +102,16 @@ public static Task> CompleteAsync( /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. /// The type of structured output to request. - public static Task> CompleteAsync( + public static Task> GetResponseAsync( this IChatClient chatClient, string chatMessage, JsonSerializerOptions serializerOptions, ChatOptions? options = null, bool? useNativeJsonSchema = null, CancellationToken cancellationToken = default) => - CompleteAsync(chatClient, new ChatMessage(ChatRole.User, chatMessage), serializerOptions, options, useNativeJsonSchema, cancellationToken); + GetResponseAsync(chatClient, new ChatMessage(ChatRole.User, chatMessage), serializerOptions, options, useNativeJsonSchema, cancellationToken); - /// Sends a chat message to the model, requesting a response matching the type . + /// Sends a chat message, requesting a response matching the type . /// The . /// The chat message to send. /// The JSON serialization options to use. @@ -124,16 +124,16 @@ public static Task> CompleteAsync( /// The to monitor for cancellation requests. The default is . /// The response messages generated by the client. /// The type of structured output to request. - public static Task> CompleteAsync( + public static Task> GetResponseAsync( this IChatClient chatClient, ChatMessage chatMessage, JsonSerializerOptions serializerOptions, ChatOptions? options = null, bool? useNativeJsonSchema = null, CancellationToken cancellationToken = default) => - CompleteAsync(chatClient, [chatMessage], serializerOptions, options, useNativeJsonSchema, cancellationToken); + GetResponseAsync(chatClient, [chatMessage], serializerOptions, options, useNativeJsonSchema, cancellationToken); - /// Sends chat messages to the model, requesting a response matching the type . + /// Sends chat messages, requesting a response matching the type . /// The . /// The chat content to send. /// The JSON serialization options to use. @@ -150,7 +150,7 @@ public static Task> CompleteAsync( /// by the client, including any messages for roundtrips to the model as part of the implementation of this request, will be included. /// /// The type of structured output to request. - public static async Task> CompleteAsync( + public static async Task> GetResponseAsync( this IChatClient chatClient, IList chatMessages, JsonSerializerOptions serializerOptions, @@ -226,8 +226,8 @@ public static async Task> CompleteAsync( try { - var result = await chatClient.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); - return new ChatCompletion(result, serializerOptions) { IsWrappedInObject = isWrappedInObject }; + var result = await chatClient.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + return new ChatResponse(result, serializerOptions) { IsWrappedInObject = isWrappedInObject }; } finally { diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatCompletion{T}.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatResponse{T}.cs similarity index 85% rename from src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatCompletion{T}.cs rename to src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatResponse{T}.cs index 182ab378b12..36999aac6aa 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatCompletion{T}.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatResponse{T}.cs @@ -12,39 +12,39 @@ namespace Microsoft.Extensions.AI; -/// Represents the result of a chat completion request with structured output. -/// The type of value expected from the chat completion. +/// Represents the response to a chat request with structured output. +/// The type of value expected from the chat response. /// /// Language models are not guaranteed to honor the requested schema. If the model's output is not /// parseable as the expected type, then will return . -/// You can access the underlying JSON response on the property. +/// You can access the underlying JSON response on the property. /// -public class ChatCompletion : ChatCompletion +public class ChatResponse : ChatResponse { - private static readonly JsonReaderOptions _allowMultipleValuesJsonReaderOptions = new JsonReaderOptions { AllowMultipleValues = true }; + private static readonly JsonReaderOptions _allowMultipleValuesJsonReaderOptions = new() { AllowMultipleValues = true }; private readonly JsonSerializerOptions _serializerOptions; private T? _deserializedResult; private bool _hasDeserializedResult; - /// Initializes a new instance of the class. - /// The unstructured that is being wrapped. + /// Initializes a new instance of the class. + /// The unstructured that is being wrapped. /// The to use when deserializing the result. - public ChatCompletion(ChatCompletion completion, JsonSerializerOptions serializerOptions) - : base(Throw.IfNull(completion).Choices) + public ChatResponse(ChatResponse response, JsonSerializerOptions serializerOptions) + : base(Throw.IfNull(response).Choices) { _serializerOptions = Throw.IfNull(serializerOptions); - CompletionId = completion.CompletionId; - ModelId = completion.ModelId; - CreatedAt = completion.CreatedAt; - FinishReason = completion.FinishReason; - Usage = completion.Usage; - RawRepresentation = completion.RawRepresentation; - AdditionalProperties = completion.AdditionalProperties; + AdditionalProperties = response.AdditionalProperties; + CreatedAt = response.CreatedAt; + FinishReason = response.FinishReason; + ModelId = response.ModelId; + RawRepresentation = response.RawRepresentation; + ResponseId = response.ResponseId; + Usage = response.Usage; } /// - /// Gets the result of the chat completion as an instance of . + /// Gets the result value of the chat response as an instance of . /// /// /// If the response did not contain JSON, or if deserialization fails, this property will throw. diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ConfigureOptionsChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ConfigureOptionsChatClient.cs index ce2fe3ca29d..551441139a7 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ConfigureOptionsChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ConfigureOptionsChatClient.cs @@ -34,16 +34,16 @@ public ConfigureOptionsChatClient(IChatClient innerClient, Action c } /// - public override async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public override async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { - return await base.CompleteAsync(chatMessages, Configure(options), cancellationToken).ConfigureAwait(false); + return await base.GetResponseAsync(chatMessages, Configure(options), cancellationToken).ConfigureAwait(false); } /// - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - await foreach (var update in base.CompleteStreamingAsync(chatMessages, Configure(options), cancellationToken).ConfigureAwait(false)) + await foreach (var update in base.GetStreamingResponseAsync(chatMessages, Configure(options), cancellationToken).ConfigureAwait(false)) { yield return update; } diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/DistributedCachingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/DistributedCachingChatClient.cs index a5bee20fa48..364ae19229a 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/DistributedCachingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/DistributedCachingChatClient.cs @@ -12,7 +12,7 @@ namespace Microsoft.Extensions.AI; /// -/// A delegating chat client that caches the results of completion calls, storing them as JSON in an . +/// A delegating chat client that caches the results of response calls, storing them as JSON in an . /// /// /// The provided implementation of is thread-safe for concurrent use so long as the employed @@ -43,52 +43,52 @@ public JsonSerializerOptions JsonSerializerOptions } /// - protected override async Task ReadCacheAsync(string key, CancellationToken cancellationToken) + protected override async Task ReadCacheAsync(string key, CancellationToken cancellationToken) { _ = Throw.IfNull(key); _jsonSerializerOptions.MakeReadOnly(); if (await _storage.GetAsync(key, cancellationToken).ConfigureAwait(false) is byte[] existingJson) { - return (ChatCompletion?)JsonSerializer.Deserialize(existingJson, _jsonSerializerOptions.GetTypeInfo(typeof(ChatCompletion))); + return (ChatResponse?)JsonSerializer.Deserialize(existingJson, _jsonSerializerOptions.GetTypeInfo(typeof(ChatResponse))); } return null; } /// - protected override async Task?> ReadCacheStreamingAsync(string key, CancellationToken cancellationToken) + protected override async Task?> ReadCacheStreamingAsync(string key, CancellationToken cancellationToken) { _ = Throw.IfNull(key); _jsonSerializerOptions.MakeReadOnly(); if (await _storage.GetAsync(key, cancellationToken).ConfigureAwait(false) is byte[] existingJson) { - return (IReadOnlyList?)JsonSerializer.Deserialize(existingJson, _jsonSerializerOptions.GetTypeInfo(typeof(IReadOnlyList))); + return (IReadOnlyList?)JsonSerializer.Deserialize(existingJson, _jsonSerializerOptions.GetTypeInfo(typeof(IReadOnlyList))); } return null; } /// - protected override async Task WriteCacheAsync(string key, ChatCompletion value, CancellationToken cancellationToken) + protected override async Task WriteCacheAsync(string key, ChatResponse value, CancellationToken cancellationToken) { _ = Throw.IfNull(key); _ = Throw.IfNull(value); _jsonSerializerOptions.MakeReadOnly(); - var newJson = JsonSerializer.SerializeToUtf8Bytes(value, _jsonSerializerOptions.GetTypeInfo(typeof(ChatCompletion))); + var newJson = JsonSerializer.SerializeToUtf8Bytes(value, _jsonSerializerOptions.GetTypeInfo(typeof(ChatResponse))); await _storage.SetAsync(key, newJson, cancellationToken).ConfigureAwait(false); } /// - protected override async Task WriteCacheStreamingAsync(string key, IReadOnlyList value, CancellationToken cancellationToken) + protected override async Task WriteCacheStreamingAsync(string key, IReadOnlyList value, CancellationToken cancellationToken) { _ = Throw.IfNull(key); _ = Throw.IfNull(value); _jsonSerializerOptions.MakeReadOnly(); - var newJson = JsonSerializer.SerializeToUtf8Bytes(value, _jsonSerializerOptions.GetTypeInfo(typeof(IReadOnlyList))); + var newJson = JsonSerializer.SerializeToUtf8Bytes(value, _jsonSerializerOptions.GetTypeInfo(typeof(IReadOnlyList))); await _storage.SetAsync(key, newJson, cancellationToken).ConfigureAwait(false); } diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs index 6e3923ed1ad..83522fca6c8 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs @@ -22,7 +22,7 @@ namespace Microsoft.Extensions.AI; /// /// /// -/// When this client receives a in a chat completion, it responds +/// When this client receives a in a chat response, it responds /// by calling the corresponding defined in , /// producing a . /// @@ -145,7 +145,7 @@ public FunctionInvokingChatClient(IChatClient innerClient, ILogger? logger = nul /// /// /// if intermediate messages persist in the list provided - /// to and by the caller. + /// to and by the caller. /// if intermediate messages are removed prior to completing the operation. /// The default value is . /// @@ -157,7 +157,7 @@ public FunctionInvokingChatClient(IChatClient innerClient, ILogger? logger = nul /// it creates with the results of invoking the requested functions. The resulting augmented /// list of messages is then passed to the inner client in order to send the results back. /// By default, those messages persist in the list provided to - /// and by the caller, such that those + /// and by the caller, such that those /// messages are available to the caller. Set to avoid including /// those messages in the caller-provided . /// @@ -166,7 +166,7 @@ public FunctionInvokingChatClient(IChatClient innerClient, ILogger? logger = nul /// as to whether function calling messages are kept during an in-flight request. /// /// - /// If the underlying responds with + /// If the underlying responds with /// set to a non- value, this property may be ignored and behave as if it is /// , with any such intermediate messages not stored in the messages list. /// @@ -209,15 +209,15 @@ public int? MaximumIterationsPerRequest } /// - public override async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public override async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); - // A single request into this CompleteAsync may result in multiple requests to the inner client. + // A single request into this GetResponseAsync may result in multiple requests to the inner client. // Create an activity to group them together for better observability. using Activity? activity = _activitySource?.StartActivity(nameof(FunctionInvokingChatClient)); - ChatCompletion? response = null; + ChatResponse? response = null; UsageDetails? totalUsage = null; IList originalChatMessages = chatMessages; try @@ -225,7 +225,7 @@ public override async Task CompleteAsync(IList chat for (int iteration = 0; ; iteration++) { // Make the call to the handler. - response = await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + response = await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); // Aggregate usage data over all calls if (response.Usage is not null) @@ -322,12 +322,12 @@ public override async Task CompleteAsync(IList chat } /// - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); - // A single request into this CompleteStreamingAsync may result in multiple requests to the inner client. + // A single request into this GetStreamingResponseAsync may result in multiple requests to the inner client. // Create an activity to group them together for better observability. using Activity? activity = _activitySource?.StartActivity(nameof(FunctionInvokingChatClient)); @@ -339,7 +339,7 @@ public override async IAsyncEnumerable CompleteSt choice = null; string? chatThreadId = null; functionCallContents.Clear(); - await foreach (var update in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) + await foreach (var update in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) { // We're going to emit all StreamingChatMessage items upstream, even ones that represent // function calls, because a given StreamingChatMessage can contain other content, too. diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs index 9e8fb07c43f..b8e15718b78 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs @@ -43,86 +43,86 @@ public JsonSerializerOptions JsonSerializerOptions } /// - public override async Task CompleteAsync( + public override async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { if (_logger.IsEnabled(LogLevel.Debug)) { if (_logger.IsEnabled(LogLevel.Trace)) { - LogInvokedSensitive(nameof(CompleteAsync), AsJson(chatMessages), AsJson(options), AsJson(this.GetService())); + LogInvokedSensitive(nameof(GetResponseAsync), AsJson(chatMessages), AsJson(options), AsJson(this.GetService())); } else { - LogInvoked(nameof(CompleteAsync)); + LogInvoked(nameof(GetResponseAsync)); } } try { - var completion = await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + var response = await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); if (_logger.IsEnabled(LogLevel.Debug)) { if (_logger.IsEnabled(LogLevel.Trace)) { - LogCompletedSensitive(nameof(CompleteAsync), AsJson(completion)); + LogCompletedSensitive(nameof(GetResponseAsync), AsJson(response)); } else { - LogCompleted(nameof(CompleteAsync)); + LogCompleted(nameof(GetResponseAsync)); } } - return completion; + return response; } catch (OperationCanceledException) { - LogInvocationCanceled(nameof(CompleteAsync)); + LogInvocationCanceled(nameof(GetResponseAsync)); throw; } catch (Exception ex) { - LogInvocationFailed(nameof(CompleteAsync), ex); + LogInvocationFailed(nameof(GetResponseAsync), ex); throw; } } /// - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { if (_logger.IsEnabled(LogLevel.Debug)) { if (_logger.IsEnabled(LogLevel.Trace)) { - LogInvokedSensitive(nameof(CompleteStreamingAsync), AsJson(chatMessages), AsJson(options), AsJson(this.GetService())); + LogInvokedSensitive(nameof(GetStreamingResponseAsync), AsJson(chatMessages), AsJson(options), AsJson(this.GetService())); } else { - LogInvoked(nameof(CompleteStreamingAsync)); + LogInvoked(nameof(GetStreamingResponseAsync)); } } - IAsyncEnumerator e; + IAsyncEnumerator e; try { - e = base.CompleteStreamingAsync(chatMessages, options, cancellationToken).GetAsyncEnumerator(cancellationToken); + e = base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).GetAsyncEnumerator(cancellationToken); } catch (OperationCanceledException) { - LogInvocationCanceled(nameof(CompleteStreamingAsync)); + LogInvocationCanceled(nameof(GetStreamingResponseAsync)); throw; } catch (Exception ex) { - LogInvocationFailed(nameof(CompleteStreamingAsync), ex); + LogInvocationFailed(nameof(GetStreamingResponseAsync), ex); throw; } try { - StreamingChatCompletionUpdate? update = null; + ChatResponseUpdate? update = null; while (true) { try @@ -136,12 +136,12 @@ public override async IAsyncEnumerable CompleteSt } catch (OperationCanceledException) { - LogInvocationCanceled(nameof(CompleteStreamingAsync)); + LogInvocationCanceled(nameof(GetStreamingResponseAsync)); throw; } catch (Exception ex) { - LogInvocationFailed(nameof(CompleteStreamingAsync), ex); + LogInvocationFailed(nameof(GetStreamingResponseAsync), ex); throw; } @@ -160,7 +160,7 @@ public override async IAsyncEnumerable CompleteSt yield return update; } - LogCompleted(nameof(CompleteStreamingAsync)); + LogCompleted(nameof(GetStreamingResponseAsync)); } finally { @@ -179,14 +179,14 @@ public override async IAsyncEnumerable CompleteSt [LoggerMessage(LogLevel.Debug, "{MethodName} completed.")] private partial void LogCompleted(string methodName); - [LoggerMessage(LogLevel.Trace, "{MethodName} completed: {ChatCompletion}.")] - private partial void LogCompletedSensitive(string methodName, string chatCompletion); + [LoggerMessage(LogLevel.Trace, "{MethodName} completed: {ChatResponse}.")] + private partial void LogCompletedSensitive(string methodName, string chatResponse); - [LoggerMessage(LogLevel.Debug, "CompleteStreamingAsync received update.")] + [LoggerMessage(LogLevel.Debug, "GetStreamingResponseAsync received update.")] private partial void LogStreamingUpdate(); - [LoggerMessage(LogLevel.Trace, "CompleteStreamingAsync received update: {StreamingChatCompletionUpdate}")] - private partial void LogStreamingUpdateSensitive(string streamingChatCompletionUpdate); + [LoggerMessage(LogLevel.Trace, "GetStreamingResponseAsync received update: {ChatResponseUpdate}")] + private partial void LogStreamingUpdateSensitive(string chatResponseUpdate); [LoggerMessage(LogLevel.Debug, "{MethodName} canceled.")] private partial void LogInvocationCanceled(string methodName); diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs index 65905f8688b..1ae5f83b4b2 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs @@ -121,7 +121,7 @@ protected override void Dispose(bool disposing) base.GetService(serviceType, serviceKey); /// - public override async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public override async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); _jsonSerializerOptions.MakeReadOnly(); @@ -132,12 +132,12 @@ public override async Task CompleteAsync(IList chat LogChatMessages(chatMessages); - ChatCompletion? completion = null; + ChatResponse? response = null; Exception? error = null; try { - completion = await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); - return completion; + response = await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + return response; } catch (Exception ex) { @@ -146,12 +146,12 @@ public override async Task CompleteAsync(IList chat } finally { - TraceCompletion(activity, requestModelId, completion, error, stopwatch); + TraceResponse(activity, requestModelId, response, error, stopwatch); } } /// - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { _ = Throw.IfNull(chatMessages); @@ -163,25 +163,25 @@ public override async IAsyncEnumerable CompleteSt LogChatMessages(chatMessages); - IAsyncEnumerable updates; + IAsyncEnumerable updates; try { - updates = base.CompleteStreamingAsync(chatMessages, options, cancellationToken); + updates = base.GetStreamingResponseAsync(chatMessages, options, cancellationToken); } catch (Exception ex) { - TraceCompletion(activity, requestModelId, completion: null, ex, stopwatch); + TraceResponse(activity, requestModelId, response: null, ex, stopwatch); throw; } var responseEnumerator = updates.ConfigureAwait(false).GetAsyncEnumerator(); - List trackedUpdates = []; + List trackedUpdates = []; Exception? error = null; try { while (true) { - StreamingChatCompletionUpdate update; + ChatResponseUpdate update; try { if (!await responseEnumerator.MoveNextAsync()) @@ -204,13 +204,13 @@ public override async IAsyncEnumerable CompleteSt } finally { - TraceCompletion(activity, requestModelId, trackedUpdates.ToChatCompletion(), error, stopwatch); + TraceResponse(activity, requestModelId, trackedUpdates.ToChatResponse(), error, stopwatch); await responseEnumerator.DisposeAsync(); } } - /// Creates an activity for a chat completion request, or returns null if not enabled. + /// Creates an activity for a chat request, or returns if not enabled. private Activity? CreateAndConfigureActivity(ChatOptions? options) { Activity? activity = null; @@ -312,11 +312,11 @@ public override async IAsyncEnumerable CompleteSt return activity; } - /// Adds chat completion information to the activity. - private void TraceCompletion( + /// Adds chat response information to the activity. + private void TraceResponse( Activity? activity, string? requestModelId, - ChatCompletion? completion, + ChatResponse? response, Exception? error, Stopwatch? stopwatch) { @@ -324,7 +324,7 @@ private void TraceCompletion( { TagList tags = default; - AddMetricTags(ref tags, requestModelId, completion); + AddMetricTags(ref tags, requestModelId, response); if (error is not null) { tags.Add(OpenTelemetryConsts.Error.Type, error.GetType().FullName); @@ -333,13 +333,13 @@ private void TraceCompletion( _operationDurationHistogram.Record(stopwatch.Elapsed.TotalSeconds, tags); } - if (_tokenUsageHistogram.Enabled && completion?.Usage is { } usage) + if (_tokenUsageHistogram.Enabled && response?.Usage is { } usage) { if (usage.InputTokenCount is long inputTokens) { TagList tags = default; tags.Add(OpenTelemetryConsts.GenAI.Token.Type, "input"); - AddMetricTags(ref tags, requestModelId, completion); + AddMetricTags(ref tags, requestModelId, response); _tokenUsageHistogram.Record((int)inputTokens); } @@ -347,7 +347,7 @@ private void TraceCompletion( { TagList tags = default; tags.Add(OpenTelemetryConsts.GenAI.Token.Type, "output"); - AddMetricTags(ref tags, requestModelId, completion); + AddMetricTags(ref tags, requestModelId, response); _tokenUsageHistogram.Record((int)outputTokens); } } @@ -359,35 +359,35 @@ private void TraceCompletion( .SetStatus(ActivityStatusCode.Error, error.Message); } - if (completion is not null) + if (response is not null) { - LogChatCompletion(completion); + LogChatResponse(response); if (activity is not null) { - if (completion.FinishReason is ChatFinishReason finishReason) + if (response.FinishReason is ChatFinishReason finishReason) { #pragma warning disable CA1308 // Normalize strings to uppercase _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.FinishReasons, $"[\"{finishReason.Value.ToLowerInvariant()}\"]"); #pragma warning restore CA1308 } - if (!string.IsNullOrWhiteSpace(completion.CompletionId)) + if (!string.IsNullOrWhiteSpace(response.ResponseId)) { - _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.Id, completion.CompletionId); + _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.Id, response.ResponseId); } - if (completion.ModelId is not null) + if (response.ModelId is not null) { - _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.Model, completion.ModelId); + _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.Model, response.ModelId); } - if (completion.Usage?.InputTokenCount is long inputTokens) + if (response.Usage?.InputTokenCount is long inputTokens) { _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.InputTokens, (int)inputTokens); } - if (completion.Usage?.OutputTokenCount is long outputTokens) + if (response.Usage?.OutputTokenCount is long outputTokens) { _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.OutputTokens, (int)outputTokens); } @@ -397,7 +397,7 @@ private void TraceCompletion( // Log all additional response properties as per-provider tags. This is non-normative, but it covers cases where // there's a per-provider specification in a best-effort manner (e.g. gen_ai.openai.response.system_fingerprint), // and more generally cases where there's additional useful information to be logged. - if (completion.AdditionalProperties is { } props) + if (response.AdditionalProperties is { } props) { foreach (KeyValuePair prop in props) { @@ -410,7 +410,7 @@ private void TraceCompletion( } } - void AddMetricTags(ref TagList tags, string? requestModelId, ChatCompletion? completions) + void AddMetricTags(ref TagList tags, string? requestModelId, ChatResponse? response) { tags.Add(OpenTelemetryConsts.GenAI.Operation.Name, OpenTelemetryConsts.GenAI.Chat); @@ -427,7 +427,7 @@ void AddMetricTags(ref TagList tags, string? requestModelId, ChatCompletion? com tags.Add(OpenTelemetryConsts.Server.Port, _serverPort); } - if (completions?.ModelId is string responseModel) + if (response?.ModelId is string responseModel) { tags.Add(OpenTelemetryConsts.GenAI.Response.Model, responseModel); } @@ -474,7 +474,7 @@ private void LogChatMessages(IEnumerable messages) } } - private void LogChatCompletion(ChatCompletion completion) + private void LogChatResponse(ChatResponse response) { if (!_logger.IsEnabled(EventLogLevel)) { @@ -482,14 +482,14 @@ private void LogChatCompletion(ChatCompletion completion) } EventId id = new(1, OpenTelemetryConsts.GenAI.Choice); - int choiceCount = completion.Choices.Count; + int choiceCount = response.Choices.Count; for (int choiceIndex = 0; choiceIndex < choiceCount; choiceIndex++) { Log(id, JsonSerializer.Serialize(new() { - FinishReason = completion.FinishReason?.Value ?? "error", + FinishReason = response.FinishReason?.Value ?? "error", Index = choiceIndex, - Message = CreateAssistantEvent(completion.Choices[choiceIndex]), + Message = CreateAssistantEvent(response.Choices[choiceIndex]), }, OtelContext.Default.ChoiceEvent)); } } diff --git a/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs index 688e4b2353d..e5595164d1b 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs @@ -121,6 +121,6 @@ public override async Task> GenerateAsync( /// The cache key. /// The to be stored. /// The to monitor for cancellation requests. - /// A representing the completion of the operation. + /// A representing the response of the operation. protected abstract Task WriteCacheAsync(string key, TEmbedding value, CancellationToken cancellationToken); } diff --git a/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs index 8ef13ce368b..3fd92a103aa 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs @@ -104,7 +104,7 @@ public override async Task> GenerateAsync(IEnume } finally { - TraceCompletion(activity, requestModelId, response, error, stopwatch); + TraceResponse(activity, requestModelId, response, error, stopwatch); } return response; @@ -177,7 +177,7 @@ protected override void Dispose(bool disposing) } /// Adds embedding generation response information to the activity. - private void TraceCompletion( + private void TraceResponse( Activity? activity, string? requestModelId, GeneratedEmbeddings? embeddings, diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs index 41e92f1ad66..8ca61022255 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs @@ -22,12 +22,12 @@ public void CompleteAsync_InvalidArgs_Throws() { Assert.Throws("client", () => { - _ = ChatClientExtensions.CompleteAsync(null!, "hello"); + _ = ChatClientExtensions.GetResponseAsync(null!, "hello"); }); Assert.Throws("chatMessage", () => { - _ = ChatClientExtensions.CompleteAsync(new TestChatClient(), (ChatMessage)null!); + _ = ChatClientExtensions.GetResponseAsync(new TestChatClient(), (ChatMessage)null!); }); } @@ -36,19 +36,19 @@ public void CompleteStreamingAsync_InvalidArgs_Throws() { Assert.Throws("client", () => { - _ = ChatClientExtensions.CompleteStreamingAsync(null!, "hello"); + _ = ChatClientExtensions.GetStreamingResponseAsync(null!, "hello"); }); Assert.Throws("chatMessage", () => { - _ = ChatClientExtensions.CompleteStreamingAsync(new TestChatClient(), (ChatMessage)null!); + _ = ChatClientExtensions.GetStreamingResponseAsync(new TestChatClient(), (ChatMessage)null!); }); } [Fact] public async Task CompleteAsync_CreatesTextMessageAsync() { - var expectedResponse = new ChatCompletion([new ChatMessage()]); + var expectedResponse = new ChatResponse([new ChatMessage()]); var expectedOptions = new ChatOptions(); using var cts = new CancellationTokenSource(); @@ -68,7 +68,7 @@ public async Task CompleteAsync_CreatesTextMessageAsync() }, }; - ChatCompletion response = await client.CompleteAsync("hello", expectedOptions, cts.Token); + ChatResponse response = await client.GetResponseAsync("hello", expectedOptions, cts.Token); Assert.Same(expectedResponse, response); } @@ -91,12 +91,12 @@ public async Task CompleteStreamingAsync_CreatesTextMessageAsync() Assert.Equal(cts.Token, cancellationToken); - return YieldAsync([new StreamingChatCompletionUpdate { Text = "world" }]); + return YieldAsync([new ChatResponseUpdate { Text = "world" }]); }, }; int count = 0; - await foreach (var update in client.CompleteStreamingAsync("hello", expectedOptions, cts.Token)) + await foreach (var update in client.GetStreamingResponseAsync("hello", expectedOptions, cts.Token)) { Assert.Equal(0, count); Assert.Equal("world", update.Text); @@ -106,7 +106,7 @@ public async Task CompleteStreamingAsync_CreatesTextMessageAsync() Assert.Equal(1, count); } - private static async IAsyncEnumerable YieldAsync(params StreamingChatCompletionUpdate[] updates) + private static async IAsyncEnumerable YieldAsync(params ChatResponseUpdate[] updates) { await Task.Yield(); foreach (var update in updates) diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatCompletionTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseTests.cs similarity index 65% rename from test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatCompletionTests.cs rename to test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseTests.cs index 261682ab801..e222b6d5215 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatCompletionTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseTests.cs @@ -8,13 +8,13 @@ namespace Microsoft.Extensions.AI; -public class ChatCompletionTests +public class ChatResponseTests { [Fact] public void Constructor_InvalidArgs_Throws() { - Assert.Throws("message", () => new ChatCompletion((ChatMessage)null!)); - Assert.Throws("choices", () => new ChatCompletion((IList)null!)); + Assert.Throws("message", () => new ChatResponse((ChatMessage)null!)); + Assert.Throws("choices", () => new ChatResponse((IList)null!)); } [Fact] @@ -22,9 +22,9 @@ public void Constructor_Message_Roundtrips() { ChatMessage message = new(); - ChatCompletion completion = new(message); - Assert.Same(message, completion.Message); - Assert.Same(message, Assert.Single(completion.Choices)); + ChatResponse response = new(message); + Assert.Same(message, response.Message); + Assert.Same(message, Assert.Single(response.Choices)); } [Fact] @@ -37,95 +37,95 @@ public void Constructor_Choices_Roundtrips() new ChatMessage(), ]; - ChatCompletion completion = new(messages); - Assert.Same(messages, completion.Choices); + ChatResponse response = new(messages); + Assert.Same(messages, response.Choices); Assert.Equal(3, messages.Count); } [Fact] public void Message_EmptyChoices_Throws() { - ChatCompletion completion = new([]); + ChatResponse response = new([]); - Assert.Empty(completion.Choices); - Assert.Throws(() => completion.Message); + Assert.Empty(response.Choices); + Assert.Throws(() => response.Message); } [Fact] public void Message_SingleChoice_Returned() { ChatMessage message = new(); - ChatCompletion completion = new([message]); + ChatResponse response = new([message]); - Assert.Same(message, completion.Message); - Assert.Same(message, completion.Choices[0]); + Assert.Same(message, response.Message); + Assert.Same(message, response.Choices[0]); } [Fact] public void Message_MultipleChoices_ReturnsFirst() { ChatMessage first = new(); - ChatCompletion completion = new([ + ChatResponse response = new([ first, new ChatMessage(), ]); - Assert.Same(first, completion.Message); - Assert.Same(first, completion.Choices[0]); + Assert.Same(first, response.Message); + Assert.Same(first, response.Choices[0]); } [Fact] public void Choices_SetNull_Throws() { - ChatCompletion completion = new([]); - Assert.Throws("value", () => completion.Choices = null!); + ChatResponse response = new([]); + Assert.Throws("value", () => response.Choices = null!); } [Fact] public void Properties_Roundtrip() { - ChatCompletion completion = new([]); + ChatResponse response = new([]); - Assert.Null(completion.CompletionId); - completion.CompletionId = "id"; - Assert.Equal("id", completion.CompletionId); + Assert.Null(response.ResponseId); + response.ResponseId = "id"; + Assert.Equal("id", response.ResponseId); - Assert.Null(completion.ModelId); - completion.ModelId = "modelId"; - Assert.Equal("modelId", completion.ModelId); + Assert.Null(response.ModelId); + response.ModelId = "modelId"; + Assert.Equal("modelId", response.ModelId); - Assert.Null(completion.CreatedAt); - completion.CreatedAt = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero); - Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), completion.CreatedAt); + Assert.Null(response.CreatedAt); + response.CreatedAt = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), response.CreatedAt); - Assert.Null(completion.FinishReason); - completion.FinishReason = ChatFinishReason.ContentFilter; - Assert.Equal(ChatFinishReason.ContentFilter, completion.FinishReason); + Assert.Null(response.FinishReason); + response.FinishReason = ChatFinishReason.ContentFilter; + Assert.Equal(ChatFinishReason.ContentFilter, response.FinishReason); - Assert.Null(completion.Usage); + Assert.Null(response.Usage); UsageDetails usage = new(); - completion.Usage = usage; - Assert.Same(usage, completion.Usage); + response.Usage = usage; + Assert.Same(usage, response.Usage); - Assert.Null(completion.RawRepresentation); + Assert.Null(response.RawRepresentation); object raw = new(); - completion.RawRepresentation = raw; - Assert.Same(raw, completion.RawRepresentation); + response.RawRepresentation = raw; + Assert.Same(raw, response.RawRepresentation); - Assert.Null(completion.AdditionalProperties); + Assert.Null(response.AdditionalProperties); AdditionalPropertiesDictionary additionalProps = []; - completion.AdditionalProperties = additionalProps; - Assert.Same(additionalProps, completion.AdditionalProperties); + response.AdditionalProperties = additionalProps; + Assert.Same(additionalProps, response.AdditionalProperties); List newChoices = [new ChatMessage(), new ChatMessage()]; - completion.Choices = newChoices; - Assert.Same(newChoices, completion.Choices); + response.Choices = newChoices; + Assert.Same(newChoices, response.Choices); } [Fact] public void JsonSerialization_Roundtrips() { - ChatCompletion original = new( + ChatResponse original = new( [ new ChatMessage(ChatRole.Assistant, "Choice1"), new ChatMessage(ChatRole.Assistant, "Choice2"), @@ -133,7 +133,7 @@ public void JsonSerialization_Roundtrips() new ChatMessage(ChatRole.Assistant, "Choice4"), ]) { - CompletionId = "id", + ResponseId = "id", ModelId = "modelId", CreatedAt = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), FinishReason = ChatFinishReason.ContentFilter, @@ -142,9 +142,9 @@ public void JsonSerialization_Roundtrips() AdditionalProperties = new() { ["key"] = "value" }, }; - string json = JsonSerializer.Serialize(original, TestJsonSerializerContext.Default.ChatCompletion); + string json = JsonSerializer.Serialize(original, TestJsonSerializerContext.Default.ChatResponse); - ChatCompletion? result = JsonSerializer.Deserialize(json, TestJsonSerializerContext.Default.ChatCompletion); + ChatResponse? result = JsonSerializer.Deserialize(json, TestJsonSerializerContext.Default.ChatResponse); Assert.NotNull(result); Assert.Equal(4, result.Choices.Count); @@ -155,7 +155,7 @@ public void JsonSerialization_Roundtrips() Assert.Equal($"Choice{i + 1}", result.Choices[i].Text); } - Assert.Equal("id", result.CompletionId); + Assert.Equal("id", result.ResponseId); Assert.Equal("modelId", result.ModelId); Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), result.CreatedAt); Assert.Equal(ChatFinishReason.ContentFilter, result.FinishReason); @@ -171,18 +171,18 @@ public void JsonSerialization_Roundtrips() [Fact] public void ToString_OneChoice_OutputsChatMessageToString() { - ChatCompletion completion = new( + ChatResponse response = new( [ new ChatMessage(ChatRole.Assistant, "This is a test." + Environment.NewLine + "It's multiple lines.") ]); - Assert.Equal(completion.Choices[0].Text, completion.ToString()); + Assert.Equal(response.Choices[0].Text, response.ToString()); } [Fact] public void ToString_MultipleChoices_OutputsAllChoicesWithPrefix() { - ChatCompletion completion = new( + ChatResponse response = new( [ new ChatMessage(ChatRole.Assistant, "This is a test." + Environment.NewLine + "It's multiple lines."), new ChatMessage(ChatRole.Assistant, "So is" + Environment.NewLine + " this."), @@ -191,50 +191,50 @@ public void ToString_MultipleChoices_OutputsAllChoicesWithPrefix() Assert.Equal( "Choice 0:" + Environment.NewLine + - completion.Choices[0] + Environment.NewLine + Environment.NewLine + + response.Choices[0] + Environment.NewLine + Environment.NewLine + "Choice 1:" + Environment.NewLine + - completion.Choices[1] + Environment.NewLine + Environment.NewLine + + response.Choices[1] + Environment.NewLine + Environment.NewLine + "Choice 2:" + Environment.NewLine + - completion.Choices[2], + response.Choices[2], - completion.ToString()); + response.ToString()); } [Fact] - public void ToStreamingChatCompletionUpdates_SingleChoice() + public void ToChatResponseUpdates_SingleChoice() { - ChatCompletion completion = new(new ChatMessage(new ChatRole("customRole"), "Text")) + ChatResponse response = new(new ChatMessage(new ChatRole("customRole"), "Text")) { - CompletionId = "12345", + ResponseId = "12345", ModelId = "someModel", FinishReason = ChatFinishReason.ContentFilter, CreatedAt = new DateTimeOffset(2024, 11, 10, 9, 20, 0, TimeSpan.Zero), AdditionalProperties = new() { ["key1"] = "value1", ["key2"] = 42 }, }; - StreamingChatCompletionUpdate[] updates = completion.ToStreamingChatCompletionUpdates(); + ChatResponseUpdate[] updates = response.ToChatResponseUpdates(); Assert.NotNull(updates); Assert.Equal(2, updates.Length); - StreamingChatCompletionUpdate update0 = updates[0]; - Assert.Equal("12345", update0.CompletionId); + ChatResponseUpdate update0 = updates[0]; + Assert.Equal("12345", update0.ResponseId); Assert.Equal("someModel", update0.ModelId); Assert.Equal(ChatFinishReason.ContentFilter, update0.FinishReason); Assert.Equal(new DateTimeOffset(2024, 11, 10, 9, 20, 0, TimeSpan.Zero), update0.CreatedAt); Assert.Equal("customRole", update0.Role?.Value); Assert.Equal("Text", update0.Text); - StreamingChatCompletionUpdate update1 = updates[1]; + ChatResponseUpdate update1 = updates[1]; Assert.Equal("value1", update1.AdditionalProperties?["key1"]); Assert.Equal(42, update1.AdditionalProperties?["key2"]); } [Fact] - public void ToStreamingChatCompletionUpdates_MultiChoice() + public void ToChatResponseUpdates_MultiChoice() { - ChatCompletion completion = new( + ChatResponse response = new( [ new ChatMessage(ChatRole.Assistant, [ @@ -256,7 +256,7 @@ public void ToStreamingChatCompletionUpdates_MultiChoice() }, ]) { - CompletionId = "12345", + ResponseId = "12345", ModelId = "someModel", FinishReason = ChatFinishReason.ContentFilter, CreatedAt = new DateTimeOffset(2024, 11, 10, 9, 20, 0, TimeSpan.Zero), @@ -264,12 +264,12 @@ public void ToStreamingChatCompletionUpdates_MultiChoice() Usage = new UsageDetails { TotalTokenCount = 123 }, }; - StreamingChatCompletionUpdate[] updates = completion.ToStreamingChatCompletionUpdates(); + ChatResponseUpdate[] updates = response.ToChatResponseUpdates(); Assert.NotNull(updates); Assert.Equal(3, updates.Length); - StreamingChatCompletionUpdate update0 = updates[0]; - Assert.Equal("12345", update0.CompletionId); + ChatResponseUpdate update0 = updates[0]; + Assert.Equal("12345", update0.ResponseId); Assert.Equal("someModel", update0.ModelId); Assert.Equal(ChatFinishReason.ContentFilter, update0.FinishReason); Assert.Equal(new DateTimeOffset(2024, 11, 10, 9, 20, 0, TimeSpan.Zero), update0.CreatedAt); @@ -279,8 +279,8 @@ public void ToStreamingChatCompletionUpdates_MultiChoice() Assert.Equal("world!", Assert.IsType(update0.Contents[2]).Text); Assert.Equal("choice1Value", update0.AdditionalProperties?["choice1Key"]); - StreamingChatCompletionUpdate update1 = updates[1]; - Assert.Equal("12345", update1.CompletionId); + ChatResponseUpdate update1 = updates[1]; + Assert.Equal("12345", update1.ResponseId); Assert.Equal("someModel", update1.ModelId); Assert.Equal(ChatFinishReason.ContentFilter, update1.FinishReason); Assert.Equal(new DateTimeOffset(2024, 11, 10, 9, 20, 0, TimeSpan.Zero), update1.CreatedAt); @@ -289,7 +289,7 @@ public void ToStreamingChatCompletionUpdates_MultiChoice() Assert.IsType(update1.Contents[1]); Assert.Equal("choice2Value", update1.AdditionalProperties?["choice2Key"]); - StreamingChatCompletionUpdate update2 = updates[2]; + ChatResponseUpdate update2 = updates[2]; Assert.Equal("value1", update2.AdditionalProperties?["key1"]); Assert.Equal(42, update2.AdditionalProperties?["key2"]); Assert.Equal(123, Assert.IsType(Assert.Single(update2.Contents)).Details.TotalTokenCount); diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseUpdateExtensionsTests.cs similarity index 60% rename from test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateExtensionsTests.cs rename to test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseUpdateExtensionsTests.cs index 5b5294d24f4..c233ab85b29 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateExtensionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseUpdateExtensionsTests.cs @@ -13,15 +13,15 @@ namespace Microsoft.Extensions.AI; -public class StreamingChatCompletionUpdateExtensionsTests +public class ChatResponseUpdateExtensionsTests { [Fact] public void InvalidArgs_Throws() { - Assert.Throws("updates", () => ((List)null!).ToChatCompletion()); + Assert.Throws("updates", () => ((List)null!).ToChatResponse()); } - public static IEnumerable ToChatCompletion_SuccessfullyCreatesCompletion_MemberData() + public static IEnumerable ToChatResponse_SuccessfullyCreatesResponse_MemberData() { foreach (bool useAsync in new[] { false, true }) { @@ -33,13 +33,13 @@ public void InvalidArgs_Throws() } [Theory] - [MemberData(nameof(ToChatCompletion_SuccessfullyCreatesCompletion_MemberData))] - public async Task ToChatCompletion_SuccessfullyCreatesCompletion(bool useAsync, bool? coalesceContent) + [MemberData(nameof(ToChatResponse_SuccessfullyCreatesResponse_MemberData))] + public async Task ToChatResponse_SuccessfullyCreatesResponse(bool useAsync, bool? coalesceContent) { - StreamingChatCompletionUpdate[] updates = + ChatResponseUpdate[] updates = [ - new() { ChoiceIndex = 0, Text = "Hello", CompletionId = "12345", CreatedAt = new DateTimeOffset(1, 2, 3, 4, 5, 6, TimeSpan.Zero), ModelId = "model123" }, - new() { ChoiceIndex = 1, Text = "Hey", CompletionId = "12345", CreatedAt = new DateTimeOffset(1, 2, 3, 4, 5, 6, TimeSpan.Zero), ModelId = "model124" }, + new() { ChoiceIndex = 0, Text = "Hello", ResponseId = "12345", CreatedAt = new DateTimeOffset(1, 2, 3, 4, 5, 6, TimeSpan.Zero), ModelId = "model123" }, + new() { ChoiceIndex = 1, Text = "Hey", ResponseId = "12345", CreatedAt = new DateTimeOffset(1, 2, 3, 4, 5, 6, TimeSpan.Zero), ModelId = "model124" }, new() { ChoiceIndex = 0, Text = ", ", AuthorName = "Someone", Role = ChatRole.User, AdditionalProperties = new() { ["a"] = "b" } }, new() { ChoiceIndex = 1, Text = ", ", AuthorName = "Else", Role = ChatRole.System, AdditionalProperties = new() { ["g"] = "h" } }, @@ -51,27 +51,27 @@ public async Task ToChatCompletion_SuccessfullyCreatesCompletion(bool useAsync, new() { ChoiceIndex = 3, Contents = new[] { new UsageContent(new() { InputTokenCount = 4, OutputTokenCount = 5 }) } }, ]; - ChatCompletion completion = (coalesceContent is bool, useAsync) switch + ChatResponse response = (coalesceContent is bool, useAsync) switch { - (false, false) => updates.ToChatCompletion(), - (false, true) => await YieldAsync(updates).ToChatCompletionAsync(), + (false, false) => updates.ToChatResponse(), + (false, true) => await YieldAsync(updates).ToChatResponseAsync(), - (true, false) => updates.ToChatCompletion(coalesceContent.GetValueOrDefault()), - (true, true) => await YieldAsync(updates).ToChatCompletionAsync(coalesceContent.GetValueOrDefault()), + (true, false) => updates.ToChatResponse(coalesceContent.GetValueOrDefault()), + (true, true) => await YieldAsync(updates).ToChatResponseAsync(coalesceContent.GetValueOrDefault()), }; - Assert.NotNull(completion); + Assert.NotNull(response); - Assert.NotNull(completion.Usage); - Assert.Equal(5, completion.Usage.InputTokenCount); - Assert.Equal(7, completion.Usage.OutputTokenCount); + Assert.NotNull(response.Usage); + Assert.Equal(5, response.Usage.InputTokenCount); + Assert.Equal(7, response.Usage.OutputTokenCount); - Assert.Equal("12345", completion.CompletionId); - Assert.Equal(new DateTimeOffset(1, 2, 3, 4, 5, 6, TimeSpan.Zero), completion.CreatedAt); - Assert.Equal("model123", completion.ModelId); + Assert.Equal("12345", response.ResponseId); + Assert.Equal(new DateTimeOffset(1, 2, 3, 4, 5, 6, TimeSpan.Zero), response.CreatedAt); + Assert.Equal("model123", response.ModelId); - Assert.Equal(3, completion.Choices.Count); + Assert.Equal(3, response.Choices.Count); - ChatMessage message = completion.Choices[0]; + ChatMessage message = response.Choices[0]; Assert.Equal(ChatRole.User, message.Role); Assert.Equal("Someone", message.AuthorName); Assert.NotNull(message.AdditionalProperties); @@ -79,7 +79,7 @@ public async Task ToChatCompletion_SuccessfullyCreatesCompletion(bool useAsync, Assert.Equal("b", message.AdditionalProperties["a"]); Assert.Equal("d", message.AdditionalProperties["c"]); - message = completion.Choices[1]; + message = response.Choices[1]; Assert.Equal(ChatRole.System, message.Role); Assert.Equal("Else", message.AuthorName); Assert.NotNull(message.AdditionalProperties); @@ -88,7 +88,7 @@ public async Task ToChatCompletion_SuccessfullyCreatesCompletion(bool useAsync, Assert.Equal("f", message.AdditionalProperties["e"]); Assert.Equal(42, message.AdditionalProperties["i"]); - message = completion.Choices[2]; + message = response.Choices[2]; Assert.Equal(ChatRole.Assistant, message.Role); Assert.Null(message.AuthorName); Assert.Null(message.AdditionalProperties); @@ -96,25 +96,25 @@ public async Task ToChatCompletion_SuccessfullyCreatesCompletion(bool useAsync, if (coalesceContent is null or true) { - Assert.Equal("Hello, world!", completion.Choices[0].Text); - Assert.Equal("Hey, you!", completion.Choices[1].Text); - Assert.Null(completion.Choices[2].Text); + Assert.Equal("Hello, world!", response.Choices[0].Text); + Assert.Equal("Hey, you!", response.Choices[1].Text); + Assert.Null(response.Choices[2].Text); } else { - Assert.Equal("Hello", completion.Choices[0].Contents[0].ToString()); - Assert.Equal(", ", completion.Choices[0].Contents[1].ToString()); - Assert.Equal("world!", completion.Choices[0].Contents[2].ToString()); + Assert.Equal("Hello", response.Choices[0].Contents[0].ToString()); + Assert.Equal(", ", response.Choices[0].Contents[1].ToString()); + Assert.Equal("world!", response.Choices[0].Contents[2].ToString()); - Assert.Equal("Hey", completion.Choices[1].Contents[0].ToString()); - Assert.Equal(", ", completion.Choices[1].Contents[1].ToString()); - Assert.Equal("you!", completion.Choices[1].Contents[2].ToString()); + Assert.Equal("Hey", response.Choices[1].Contents[0].ToString()); + Assert.Equal(", ", response.Choices[1].Contents[1].ToString()); + Assert.Equal("you!", response.Choices[1].Contents[2].ToString()); - Assert.Null(completion.Choices[2].Text); + Assert.Null(response.Choices[2].Text); } } - public static IEnumerable ToChatCompletion_Coalescing_VariousSequenceAndGapLengths_MemberData() + public static IEnumerable ToChatResponse_Coalescing_VariousSequenceAndGapLengths_MemberData() { foreach (bool useAsync in new[] { false, true }) { @@ -135,10 +135,10 @@ public static IEnumerable ToChatCompletion_Coalescing_VariousSequenceA } [Theory] - [MemberData(nameof(ToChatCompletion_Coalescing_VariousSequenceAndGapLengths_MemberData))] - public async Task ToChatCompletion_Coalescing_VariousSequenceAndGapLengths(bool useAsync, int numSequences, int sequenceLength, int gapLength, bool gapBeginningEnd) + [MemberData(nameof(ToChatResponse_Coalescing_VariousSequenceAndGapLengths_MemberData))] + public async Task ToChatResponse_Coalescing_VariousSequenceAndGapLengths(bool useAsync, int numSequences, int sequenceLength, int gapLength, bool gapBeginningEnd) { - List updates = []; + List updates = []; List expected = []; @@ -178,10 +178,10 @@ void AddGap() } } - ChatCompletion completion = useAsync ? await YieldAsync(updates).ToChatCompletionAsync() : updates.ToChatCompletion(); - Assert.Single(completion.Choices); + ChatResponse response = useAsync ? await YieldAsync(updates).ToChatResponseAsync() : updates.ToChatResponse(); + Assert.Single(response.Choices); - ChatMessage message = completion.Message; + ChatMessage message = response.Message; Assert.Equal(expected.Count + (gapLength * ((numSequences - 1) + (gapBeginningEnd ? 2 : 0))), message.Contents.Count); TextContent[] contents = message.Contents.OfType().ToArray(); @@ -193,28 +193,28 @@ void AddGap() } [Fact] - public async Task ToChatCompletion_UsageContentExtractedFromContents() + public async Task ToChatResponse_UsageContentExtractedFromContents() { - StreamingChatCompletionUpdate[] updates = + ChatResponseUpdate[] updates = { new() { Text = "Hello, " }, new() { Text = "world!" }, new() { Contents = [new UsageContent(new() { TotalTokenCount = 42 })] }, }; - ChatCompletion completion = await YieldAsync(updates).ToChatCompletionAsync(); + ChatResponse response = await YieldAsync(updates).ToChatResponseAsync(); - Assert.NotNull(completion); + Assert.NotNull(response); - Assert.NotNull(completion.Usage); - Assert.Equal(42, completion.Usage.TotalTokenCount); + Assert.NotNull(response.Usage); + Assert.Equal(42, response.Usage.TotalTokenCount); - Assert.Equal("Hello, world!", Assert.IsType(Assert.Single(completion.Message.Contents)).Text); + Assert.Equal("Hello, world!", Assert.IsType(Assert.Single(response.Message.Contents)).Text); } - private static async IAsyncEnumerable YieldAsync(IEnumerable updates) + private static async IAsyncEnumerable YieldAsync(IEnumerable updates) { - foreach (StreamingChatCompletionUpdate update in updates) + foreach (ChatResponseUpdate update in updates) { await Task.Yield(); yield return update; diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseUpdateTests.cs similarity index 89% rename from test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateTests.cs rename to test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseUpdateTests.cs index 725d9cea57f..be4108f8148 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatResponseUpdateTests.cs @@ -8,19 +8,19 @@ namespace Microsoft.Extensions.AI; -public class StreamingChatCompletionUpdateTests +public class ChatResponseUpdateTests { [Fact] public void Constructor_PropsDefaulted() { - StreamingChatCompletionUpdate update = new(); + ChatResponseUpdate update = new(); Assert.Null(update.AuthorName); Assert.Null(update.Role); Assert.Null(update.Text); Assert.Empty(update.Contents); Assert.Null(update.RawRepresentation); Assert.Null(update.AdditionalProperties); - Assert.Null(update.CompletionId); + Assert.Null(update.ResponseId); Assert.Null(update.CreatedAt); Assert.Null(update.FinishReason); Assert.Equal(0, update.ChoiceIndex); @@ -30,7 +30,7 @@ public void Constructor_PropsDefaulted() [Fact] public void Properties_Roundtrip() { - StreamingChatCompletionUpdate update = new(); + ChatResponseUpdate update = new(); Assert.Null(update.AuthorName); update.AuthorName = "author"; @@ -66,9 +66,9 @@ public void Properties_Roundtrip() update.AdditionalProperties = props; Assert.Same(props, update.AdditionalProperties); - Assert.Null(update.CompletionId); - update.CompletionId = "id"; - Assert.Equal("id", update.CompletionId); + Assert.Null(update.ResponseId); + update.ResponseId = "id"; + Assert.Equal("id", update.ResponseId); Assert.Null(update.CreatedAt); update.CreatedAt = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero); @@ -86,7 +86,7 @@ public void Properties_Roundtrip() [Fact] public void Text_GetSet_UsesFirstTextContent() { - StreamingChatCompletionUpdate update = new() + ChatResponseUpdate update = new() { Role = ChatRole.User, Contents = @@ -115,7 +115,7 @@ public void Text_GetSet_UsesFirstTextContent() [Fact] public void Text_Set_AddsTextMessageToEmptyList() { - StreamingChatCompletionUpdate update = new() + ChatResponseUpdate update = new() { Role = ChatRole.User, }; @@ -132,7 +132,7 @@ public void Text_Set_AddsTextMessageToEmptyList() [Fact] public void Text_Set_AddsTextMessageToListWithNoText() { - StreamingChatCompletionUpdate update = new() + ChatResponseUpdate update = new() { Contents = [ @@ -162,7 +162,7 @@ public void Text_Set_AddsTextMessageToListWithNoText() [Fact] public void JsonSerialization_Roundtrips() { - StreamingChatCompletionUpdate original = new() + ChatResponseUpdate original = new() { AuthorName = "author", Role = ChatRole.Assistant, @@ -175,16 +175,16 @@ public void JsonSerialization_Roundtrips() new TextContent("text-2"), ], RawRepresentation = new object(), - CompletionId = "id", + ResponseId = "id", CreatedAt = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), FinishReason = ChatFinishReason.ContentFilter, AdditionalProperties = new() { ["key"] = "value" }, ChoiceIndex = 42, }; - string json = JsonSerializer.Serialize(original, TestJsonSerializerContext.Default.StreamingChatCompletionUpdate); + string json = JsonSerializer.Serialize(original, TestJsonSerializerContext.Default.ChatResponseUpdate); - StreamingChatCompletionUpdate? result = JsonSerializer.Deserialize(json, TestJsonSerializerContext.Default.StreamingChatCompletionUpdate); + ChatResponseUpdate? result = JsonSerializer.Deserialize(json, TestJsonSerializerContext.Default.ChatResponseUpdate); Assert.NotNull(result); Assert.Equal(5, result.Contents.Count); @@ -206,7 +206,7 @@ public void JsonSerialization_Roundtrips() Assert.Equal("author", result.AuthorName); Assert.Equal(ChatRole.Assistant, result.Role); - Assert.Equal("id", result.CompletionId); + Assert.Equal("id", result.ResponseId); Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), result.CreatedAt); Assert.Equal(ChatFinishReason.ContentFilter, result.FinishReason); Assert.Equal(42, result.ChoiceIndex); diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs index a6b6e024681..d408f8e5fd1 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs @@ -24,8 +24,8 @@ public async Task ChatAsyncDefaultsToInnerClientAsync() var expectedChatContents = new List(); var expectedChatOptions = new ChatOptions(); var expectedCancellationToken = CancellationToken.None; - var expectedResult = new TaskCompletionSource(); - var expectedCompletion = new ChatCompletion([]); + var expectedResult = new TaskCompletionSource(); + var expectedResponse = new ChatResponse([]); using var inner = new TestChatClient { CompleteAsyncCallback = (chatContents, options, cancellationToken) => @@ -40,13 +40,13 @@ public async Task ChatAsyncDefaultsToInnerClientAsync() using var delegating = new NoOpDelegatingChatClient(inner); // Act - var resultTask = delegating.CompleteAsync(expectedChatContents, expectedChatOptions, expectedCancellationToken); + var resultTask = delegating.GetResponseAsync(expectedChatContents, expectedChatOptions, expectedCancellationToken); // Assert Assert.False(resultTask.IsCompleted); - expectedResult.SetResult(expectedCompletion); + expectedResult.SetResult(expectedResponse); Assert.True(resultTask.IsCompleted); - Assert.Same(expectedCompletion, await resultTask); + Assert.Same(expectedResponse, await resultTask); } [Fact] @@ -56,7 +56,7 @@ public async Task ChatStreamingAsyncDefaultsToInnerClientAsync() var expectedChatContents = new List(); var expectedChatOptions = new ChatOptions(); var expectedCancellationToken = CancellationToken.None; - StreamingChatCompletionUpdate[] expectedResults = + ChatResponseUpdate[] expectedResults = [ new() { Role = ChatRole.User, Text = "Message 1" }, new() { Role = ChatRole.User, Text = "Message 2" } @@ -76,7 +76,7 @@ public async Task ChatStreamingAsyncDefaultsToInnerClientAsync() using var delegating = new NoOpDelegatingChatClient(inner); // Act - var resultAsyncEnumerable = delegating.CompleteStreamingAsync(expectedChatContents, expectedChatOptions, expectedCancellationToken); + var resultAsyncEnumerable = delegating.GetStreamingResponseAsync(expectedChatContents, expectedChatOptions, expectedCancellationToken); // Assert var enumerator = resultAsyncEnumerable.GetAsyncEnumerator(); diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs index 09723371191..1bb147bb24e 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs @@ -17,19 +17,19 @@ public TestChatClient() public IServiceProvider? Services { get; set; } - public Func, ChatOptions?, CancellationToken, Task>? CompleteAsyncCallback { get; set; } + public Func, ChatOptions?, CancellationToken, Task>? CompleteAsyncCallback { get; set; } - public Func, ChatOptions?, CancellationToken, IAsyncEnumerable>? CompleteStreamingAsyncCallback { get; set; } + public Func, ChatOptions?, CancellationToken, IAsyncEnumerable>? CompleteStreamingAsyncCallback { get; set; } public Func GetServiceCallback { get; set; } private object? DefaultGetServiceCallback(Type serviceType, object? serviceKey) => serviceType is not null && serviceKey is null && serviceType.IsInstanceOfType(this) ? this : null; - public Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) => CompleteAsyncCallback!.Invoke(chatMessages, options, cancellationToken); - public IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) => CompleteStreamingAsyncCallback!.Invoke(chatMessages, options, cancellationToken); public object? GetService(Type serviceType, object? serviceKey = null) diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestJsonSerializerContext.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestJsonSerializerContext.cs index 5a3e966c17b..4af54d6cfd9 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestJsonSerializerContext.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestJsonSerializerContext.cs @@ -14,8 +14,8 @@ namespace Microsoft.Extensions.AI; PropertyNamingPolicy = JsonKnownNamingPolicy.CamelCase, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, UseStringEnumConverter = true)] -[JsonSerializable(typeof(ChatCompletion))] -[JsonSerializable(typeof(StreamingChatCompletionUpdate))] +[JsonSerializable(typeof(ChatResponse))] +[JsonSerializable(typeof(ChatResponseUpdate))] [JsonSerializable(typeof(ChatOptions))] [JsonSerializable(typeof(EmbeddingGenerationOptions))] [JsonSerializable(typeof(Dictionary))] diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs index 1f306063085..9169205b394 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs @@ -151,14 +151,14 @@ public async Task BasicRequestResponse_NonStreaming(bool multiContent) [new ChatMessage(ChatRole.User, "hello".Select(c => (AIContent)new TextContent(c.ToString())).ToList())] : [new ChatMessage(ChatRole.User, "hello")]; - var response = await client.CompleteAsync(chatMessages, new() + var response = await client.GetResponseAsync(chatMessages, new() { MaxOutputTokens = 10, Temperature = 0.5f, }); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADx3PvAnCwJg0woha4pYsBTi3ZpOI", response.CompletionId); + Assert.Equal("chatcmpl-ADx3PvAnCwJg0woha4pYsBTi3ZpOI", response.ResponseId); Assert.Equal("Hello! How can I assist you today?", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -218,8 +218,8 @@ public async Task BasicRequestResponse_Streaming(bool multiContent) [new ChatMessage(ChatRole.User, "hello".Select(c => (AIContent)new TextContent(c.ToString())).ToList())] : [new ChatMessage(ChatRole.User, "hello")]; - List updates = []; - await foreach (var update in client.CompleteStreamingAsync(chatMessages, new() + List updates = []; + await foreach (var update in client.GetStreamingResponseAsync(chatMessages, new() { MaxOutputTokens = 20, Temperature = 0.5f, @@ -234,7 +234,7 @@ [new ChatMessage(ChatRole.User, "hello".Select(c => (AIContent)new TextContent(c Assert.Equal(12, updates.Count); for (int i = 0; i < updates.Count; i++) { - Assert.Equal("chatcmpl-ADxFKtX6xIwdWRN42QvBj2u1RZpCK", updates[i].CompletionId); + Assert.Equal("chatcmpl-ADxFKtX6xIwdWRN42QvBj2u1RZpCK", updates[i].ResponseId); Assert.Equal(createdAt, updates[i].CreatedAt); Assert.Equal("gpt-4o-mini-2024-07-18", updates[i].ModelId); Assert.Equal(ChatRole.Assistant, updates[i].Role); @@ -282,7 +282,7 @@ public async Task AdditionalOptions_NonStreaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - Assert.NotNull(await client.CompleteAsync("hello", new() + Assert.NotNull(await client.GetResponseAsync("hello", new() { MaxOutputTokens = 10, Temperature = 0.5f, @@ -330,7 +330,7 @@ public async Task ResponseFormat_Text_NonStreaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - Assert.NotNull(await client.CompleteAsync("hello", new() + Assert.NotNull(await client.GetResponseAsync("hello", new() { ResponseFormat = ChatResponseFormat.Text, })); @@ -366,7 +366,7 @@ public async Task ResponseFormat_Json_NonStreaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - Assert.NotNull(await client.CompleteAsync("hello", new() + Assert.NotNull(await client.GetResponseAsync("hello", new() { ResponseFormat = ChatResponseFormat.Json, })); @@ -405,7 +405,7 @@ public async Task ResponseFormat_JsonSchema_NonStreaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - Assert.NotNull(await client.CompleteAsync("hello", new() + Assert.NotNull(await client.GetResponseAsync("hello", new() { ResponseFormat = ChatResponseFormat.ForJsonSchema(JsonSerializer.Deserialize(""" { @@ -512,7 +512,7 @@ public async Task MultipleMessages_NonStreaming() new(ChatRole.Tool, [new FunctionResultContent("abcd123", "happy")]), ]; - var response = await client.CompleteAsync(messages, new() + var response = await client.GetResponseAsync(messages, new() { Temperature = 0.25f, FrequencyPenalty = 0.75f, @@ -522,7 +522,7 @@ public async Task MultipleMessages_NonStreaming() }); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.CompletionId); + Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.ResponseId); Assert.Equal("I’m doing well, thank you! What’s on your mind today?", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -584,7 +584,7 @@ public async Task MultipleContent_NonStreaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - Assert.NotNull(await client.CompleteAsync([new(ChatRole.User, + Assert.NotNull(await client.GetResponseAsync([new(ChatRole.User, [ new TextContent("Describe this picture."), new DataContent("http://dot.net/someimage.png", mediaType: "image/png"), @@ -653,10 +653,10 @@ public async Task NullAssistantText_ContentEmpty_NonStreaming() new(ChatRole.User, "hello!"), ]; - var response = await client.CompleteAsync(messages); + var response = await client.GetResponseAsync(messages); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.CompletionId); + Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.ResponseId); Assert.Equal("Hello.", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -766,7 +766,7 @@ public async Task FunctionCallContent_NonStreaming(ChatToolMode mode) using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - var response = await client.CompleteAsync("How old is Alice?", new() + var response = await client.GetResponseAsync("How old is Alice?", new() { Tools = [AIFunctionFactory.Create(([Description("The person whose age is being requested")] string personName) => 42, "GetPersonAge", "Gets the age of the specified person.")], ToolMode = mode, @@ -853,8 +853,8 @@ public async Task FunctionCallContent_Streaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - List updates = []; - await foreach (var update in client.CompleteStreamingAsync("How old is Alice?", new() + List updates = []; + await foreach (var update in client.GetStreamingResponseAsync("How old is Alice?", new() { Tools = [AIFunctionFactory.Create(([Description("The person whose age is being requested")] string personName) => 42, "GetPersonAge", "Gets the age of the specified person.")], })) @@ -868,7 +868,7 @@ public async Task FunctionCallContent_Streaming() Assert.Equal(10, updates.Count); for (int i = 0; i < updates.Count; i++) { - Assert.Equal("chatcmpl-ADymNiWWeqCJqHNFXiI1QtRcLuXcl", updates[i].CompletionId); + Assert.Equal("chatcmpl-ADymNiWWeqCJqHNFXiI1QtRcLuXcl", updates[i].ResponseId); Assert.Equal(createdAt, updates[i].CreatedAt); Assert.Equal("gpt-4o-mini-2024-07-18", updates[i].ModelId); Assert.Equal(ChatRole.Assistant, updates[i].Role); diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs index 7518d987cc4..7fa87ad336b 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs @@ -18,7 +18,7 @@ internal static class IntegrationTestHelpers TestRunnerConfiguration.Instance["AzureAIInference:Endpoint"] ?? "https://api.openai.com/v1"; - /// Gets an to use for testing, or null if the associated tests should be disabled. + /// Gets an to use for testing, or null if the associated tests should be disabled. public static ChatCompletionsClient? GetChatCompletionsClient() => _apiKey is string apiKey ? new ChatCompletionsClient(new Uri(_endpoint), new AzureKeyCredential(apiKey)) : diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/CallCountingChatClient.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/CallCountingChatClient.cs index c2aaa0d086d..853815ff033 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/CallCountingChatClient.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/CallCountingChatClient.cs @@ -16,18 +16,18 @@ internal sealed class CallCountingChatClient(IChatClient innerClient) : Delegati public int CallCount => _callCount; - public override Task CompleteAsync( + public override Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { Interlocked.Increment(ref _callCount); - return base.CompleteAsync(chatMessages, options, cancellationToken); + return base.GetResponseAsync(chatMessages, options, cancellationToken); } - public override IAsyncEnumerable CompleteStreamingAsync( + public override IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { Interlocked.Increment(ref _callCount); - return base.CompleteStreamingAsync(chatMessages, options, cancellationToken); + return base.GetStreamingResponseAsync(chatMessages, options, cancellationToken); } } diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs index 6f8d0ddee29..166cb51e930 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs @@ -46,7 +46,7 @@ public virtual async Task CompleteAsync_SingleRequestMessage() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync("What's the biggest animal?"); + var response = await _chatClient.GetResponseAsync("What's the biggest animal?"); Assert.Contains("whale", response.Message.Text, StringComparison.OrdinalIgnoreCase); } @@ -56,7 +56,7 @@ public virtual async Task CompleteAsync_MultipleRequestMessages() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync( + var response = await _chatClient.GetResponseAsync( [ new(ChatRole.User, "Pick a city, any city"), new(ChatRole.Assistant, "Seattle"), @@ -81,7 +81,7 @@ public virtual async Task CompleteStreamingAsync_SingleStreamingResponseChoice() ]; StringBuilder sb = new(); - await foreach (var chunk in _chatClient.CompleteStreamingAsync(chatHistory)) + await foreach (var chunk in _chatClient.GetStreamingResponseAsync(chatHistory)) { sb.Append(chunk.Text); } @@ -99,7 +99,7 @@ public virtual async Task CompleteAsync_UsageDataAvailable() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync("Explain in 10 words how AI works"); + var response = await _chatClient.GetResponseAsync("Explain in 10 words how AI works"); Assert.Single(response.Choices); Assert.True(response.Usage?.InputTokenCount > 1); @@ -112,7 +112,7 @@ public virtual async Task CompleteStreamingAsync_UsageDataAvailable() { SkipIfNotEnabled(); - var response = _chatClient.CompleteStreamingAsync("Explain in 10 words how AI works", new() + var response = _chatClient.GetStreamingResponseAsync("Explain in 10 words how AI works", new() { AdditionalProperties = new() { @@ -120,7 +120,7 @@ public virtual async Task CompleteStreamingAsync_UsageDataAvailable() }, }); - List chunks = []; + List chunks = []; await foreach (var chunk in response) { chunks.Add(chunk); @@ -141,7 +141,7 @@ public virtual async Task MultiModal_DescribeImage() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync( + var response = await _chatClient.GetResponseAsync( [ new(ChatRole.User, [ @@ -177,7 +177,7 @@ public virtual async Task FunctionInvocation_AutomaticallyInvokeFunction_Paramet new(ChatRole.User, "What is the current secret number?") ]; - var response = await chatClient.CompleteAsync(messages, new() + var response = await chatClient.GetResponseAsync(messages, new() { Tools = [AIFunctionFactory.Create(() => secretNumber, "GetSecretNumber")] }); @@ -203,7 +203,7 @@ public virtual async Task FunctionInvocation_AutomaticallyInvokeFunction_WithPar using var chatClient = new FunctionInvokingChatClient(_chatClient); - var response = await chatClient.CompleteAsync("What is the result of SecretComputation on 42 and 84?", new() + var response = await chatClient.GetResponseAsync("What is the result of SecretComputation on 42 and 84?", new() { Tools = [AIFunctionFactory.Create((int a, int b) => a * b, "SecretComputation")] }); @@ -219,7 +219,7 @@ public virtual async Task FunctionInvocation_AutomaticallyInvokeFunction_WithPar using var chatClient = new FunctionInvokingChatClient(_chatClient); - var response = chatClient.CompleteStreamingAsync("What is the result of SecretComputation on 42 and 84?", new() + var response = chatClient.GetStreamingResponseAsync("What is the result of SecretComputation on 42 and 84?", new() { Tools = [AIFunctionFactory.Create((int a, int b) => a * b, "SecretComputation")] }); @@ -247,7 +247,7 @@ public virtual async Task FunctionInvocation_SupportsMultipleParallelRequests() using var chatClient = new FunctionInvokingChatClient(_chatClient); // The service/model isn't guaranteed to request two calls to GetPersonAge in the same turn, but it's common that it will. - var response = await chatClient.CompleteAsync("How much older is Elsa than Anna? Return the age difference as a single number.", new() + var response = await chatClient.GetResponseAsync("How much older is Elsa than Anna? Return the age difference as a single number.", new() { Tools = [AIFunctionFactory.Create((string personName) => { @@ -279,7 +279,7 @@ public virtual async Task FunctionInvocation_RequireAny() using var chatClient = new FunctionInvokingChatClient(_chatClient); - var response = await chatClient.CompleteAsync("Are birds real?", new() + var response = await chatClient.GetResponseAsync("Are birds real?", new() { Tools = [tool], ToolMode = ChatToolMode.RequireAny, @@ -301,7 +301,7 @@ public virtual async Task FunctionInvocation_RequireSpecific() using var chatClient = new FunctionInvokingChatClient(_chatClient); // Even though the user doesn't ask for the shields to be activated, verify that the tool is invoked - var response = await chatClient.CompleteAsync("What's the current secret number?", new() + var response = await chatClient.GetResponseAsync("What's the current secret number?", new() { Tools = [getSecretNumberTool, shieldsUpTool], ToolMode = ChatToolMode.RequireSpecific(shieldsUpTool.Metadata.Name), @@ -316,10 +316,10 @@ public virtual async Task Caching_OutputVariesWithoutCaching() SkipIfNotEnabled(); var message = new ChatMessage(ChatRole.User, "Pick a random number, uniformly distributed between 1 and 1000000"); - var firstResponse = await _chatClient.CompleteAsync([message]); + var firstResponse = await _chatClient.GetResponseAsync([message]); Assert.Single(firstResponse.Choices); - var secondResponse = await _chatClient.CompleteAsync([message]); + var secondResponse = await _chatClient.GetResponseAsync([message]); Assert.NotEqual(firstResponse.Message.Text, secondResponse.Message.Text); } @@ -333,19 +333,19 @@ public virtual async Task Caching_SamePromptResultsInCacheHit_NonStreaming() new MemoryDistributedCache(Options.Options.Create(new MemoryDistributedCacheOptions()))); var message = new ChatMessage(ChatRole.User, "Pick a random number, uniformly distributed between 1 and 1000000"); - var firstResponse = await chatClient.CompleteAsync([message]); + var firstResponse = await chatClient.GetResponseAsync([message]); Assert.Single(firstResponse.Choices); // No matter what it said before, we should see identical output due to caching for (int i = 0; i < 3; i++) { - var secondResponse = await chatClient.CompleteAsync([message]); + var secondResponse = await chatClient.GetResponseAsync([message]); Assert.Equal(firstResponse.Message.Text, secondResponse.Message.Text); } // ... but if the conversation differs, we should see different output message.Text += "!"; - var thirdResponse = await chatClient.CompleteAsync([message]); + var thirdResponse = await chatClient.GetResponseAsync([message]); Assert.NotEqual(firstResponse.Message.Text, thirdResponse.Message.Text); } @@ -360,7 +360,7 @@ public virtual async Task Caching_SamePromptResultsInCacheHit_Streaming() var message = new ChatMessage(ChatRole.User, "Pick a random number, uniformly distributed between 1 and 1000000"); StringBuilder orig = new(); - await foreach (var update in chatClient.CompleteStreamingAsync([message])) + await foreach (var update in chatClient.GetStreamingResponseAsync([message])) { orig.Append(update.Text); } @@ -369,7 +369,7 @@ public virtual async Task Caching_SamePromptResultsInCacheHit_Streaming() for (int i = 0; i < 3; i++) { StringBuilder second = new(); - await foreach (var update in chatClient.CompleteStreamingAsync([message])) + await foreach (var update in chatClient.GetStreamingResponseAsync([message])) { second.Append(update.Text); } @@ -380,7 +380,7 @@ public virtual async Task Caching_SamePromptResultsInCacheHit_Streaming() // ... but if the conversation differs, we should see different output message.Text += "!"; StringBuilder third = new(); - await foreach (var update in chatClient.CompleteStreamingAsync([message])) + await foreach (var update in chatClient.GetStreamingResponseAsync([message])) { third.Append(update.Text); } @@ -411,14 +411,14 @@ public virtual async Task Caching_BeforeFunctionInvocation_AvoidsExtraCalls() var llmCallCount = chatClient.GetService(); var message = new ChatMessage(ChatRole.User, "What is the temperature?"); - var response = await chatClient.CompleteAsync([message]); + var response = await chatClient.GetResponseAsync([message]); Assert.Contains("101", response.Message.Text); // First LLM call tells us to call the function, second deals with the result Assert.Equal(2, llmCallCount!.CallCount); // Second call doesn't execute the function or call the LLM, but rather just returns the cached result - var secondResponse = await chatClient.CompleteAsync([message]); + var secondResponse = await chatClient.GetResponseAsync([message]); Assert.Equal(response.Message.Text, secondResponse.Message.Text); Assert.Equal(1, functionCallCount); Assert.Equal(2, llmCallCount!.CallCount); @@ -450,7 +450,7 @@ public virtual async Task Caching_AfterFunctionInvocation_FunctionOutputUnchange var llmCallCount = chatClient.GetService(); var message = new ChatMessage(ChatRole.User, "What is the temperature?"); - var response = await chatClient.CompleteAsync([message]); + var response = await chatClient.GetResponseAsync([message]); Assert.Contains("58", response.Message.Text); // First LLM call tells us to call the function, second deals with the result @@ -458,7 +458,7 @@ public virtual async Task Caching_AfterFunctionInvocation_FunctionOutputUnchange Assert.Equal(2, llmCallCount!.CallCount); // Second time, the calls to the LLM don't happen, but the function is called again - var secondResponse = await chatClient.CompleteAsync([message]); + var secondResponse = await chatClient.GetResponseAsync([message]); Assert.Equal(response.Message.Text, secondResponse.Message.Text); Assert.Equal(2, functionCallCount); Assert.Equal(2, llmCallCount!.CallCount); @@ -490,7 +490,7 @@ public virtual async Task Caching_AfterFunctionInvocation_FunctionOutputChangedA var llmCallCount = chatClient.GetService(); var message = new ChatMessage(ChatRole.User, "What is the temperature?"); - var response = await chatClient.CompleteAsync([message]); + var response = await chatClient.GetResponseAsync([message]); Assert.Contains("81", response.Message.Text); // First LLM call tells us to call the function, second deals with the result @@ -499,7 +499,7 @@ public virtual async Task Caching_AfterFunctionInvocation_FunctionOutputChangedA // Second time, the first call to the LLM don't happen, but the function is called again, // and since its output now differs, we no longer hit the cache so the second LLM call does happen - var secondResponse = await chatClient.CompleteAsync([message]); + var secondResponse = await chatClient.GetResponseAsync([message]); Assert.Contains("82", secondResponse.Message.Text); Assert.Equal(2, functionCallCount); Assert.Equal(3, llmCallCount!.CallCount); @@ -517,7 +517,7 @@ public virtual async Task Logging_LogsCalls_NonStreaming() .UseLogging(loggerFactory) .Build(); - await chatClient.CompleteAsync([new(ChatRole.User, "What's the biggest animal?")]); + await chatClient.GetResponseAsync([new(ChatRole.User, "What's the biggest animal?")]); Assert.Collection(collector.GetSnapshot(), entry => Assert.Contains("What's the biggest animal?", entry.Message), @@ -536,7 +536,7 @@ public virtual async Task Logging_LogsCalls_Streaming() .UseLogging(loggerFactory) .Build(); - await foreach (var update in chatClient.CompleteStreamingAsync("What's the biggest animal?")) + await foreach (var update in chatClient.GetStreamingResponseAsync("What's the biggest animal?")) { // Do nothing with the updates } @@ -561,7 +561,7 @@ public virtual async Task Logging_LogsFunctionCalls_NonStreaming() .Build(); int secretNumber = 42; - await chatClient.CompleteAsync( + await chatClient.GetResponseAsync( "What is the current secret number?", new ChatOptions { Tools = [AIFunctionFactory.Create(() => secretNumber, "GetSecretNumber")] }); @@ -587,7 +587,7 @@ public virtual async Task Logging_LogsFunctionCalls_Streaming() .Build(); int secretNumber = 42; - await foreach (var update in chatClient.CompleteStreamingAsync( + await foreach (var update in chatClient.GetStreamingResponseAsync( "What is the current secret number?", new ChatOptions { Tools = [AIFunctionFactory.Create(() => secretNumber, "GetSecretNumber")] })) { @@ -616,7 +616,7 @@ public virtual async Task OpenTelemetry_CanEmitTracesAndMetrics() .UseOpenTelemetry(sourceName: sourceName) .Build(); - var response = await chatClient.CompleteAsync([new(ChatRole.User, "What's the biggest animal?")]); + var response = await chatClient.GetResponseAsync([new(ChatRole.User, "What's the biggest animal?")]); var activity = Assert.Single(activities); Assert.StartsWith("chat", activity.DisplayName); @@ -635,7 +635,7 @@ public virtual async Task CompleteAsync_StructuredOutput() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync(""" + var response = await _chatClient.GetResponseAsync(""" Who is described in the following sentence? Jimbo Smith is a 35-year-old programmer from Cardiff, Wales. """); @@ -651,7 +651,7 @@ public virtual async Task CompleteAsync_StructuredOutputArray() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync(""" + var response = await _chatClient.GetResponseAsync(""" Who are described in the following sentence? Jimbo Smith is a 35-year-old software developer from Cardiff, Wales. Josh Simpson is a 25-year-old software developer from Newport, Wales. @@ -667,7 +667,7 @@ public virtual async Task CompleteAsync_StructuredOutputInteger() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync(""" + var response = await _chatClient.GetResponseAsync(""" There were 14 abstractions for AI programming, which was too many. To fix this we added another one. How many are there now? """); @@ -680,7 +680,7 @@ public virtual async Task CompleteAsync_StructuredOutputString() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync(""" + var response = await _chatClient.GetResponseAsync(""" The software developer, Jimbo Smith, is a 35-year-old from Cardiff, Wales. What's his full name? """); @@ -693,7 +693,7 @@ public virtual async Task CompleteAsync_StructuredOutputBool_True() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync(""" + var response = await _chatClient.GetResponseAsync(""" Jimbo Smith is a 35-year-old software developer from Cardiff, Wales. Is there at least one software developer from Cardiff? """); @@ -706,7 +706,7 @@ public virtual async Task CompleteAsync_StructuredOutputBool_False() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync(""" + var response = await _chatClient.GetResponseAsync(""" Jimbo Smith is a 35-year-old software developer from Cardiff, Wales. Can we be sure that he is a medical doctor? """); @@ -719,7 +719,7 @@ public virtual async Task CompleteAsync_StructuredOutputEnum() { SkipIfNotEnabled(); - var response = await _chatClient.CompleteAsync(""" + var response = await _chatClient.GetResponseAsync(""" Taylor Swift is a famous singer and songwriter. What is her job? """); @@ -740,7 +740,7 @@ public virtual async Task CompleteAsync_StructuredOutput_WithFunctions() }; using var chatClient = new FunctionInvokingChatClient(_chatClient); - var response = await chatClient.CompleteAsync( + var response = await chatClient.GetResponseAsync( "Who is person with ID 123?", new ChatOptions { Tools = [AIFunctionFactory.Create((int personId) => @@ -771,7 +771,7 @@ public virtual async Task CompleteAsync_StructuredOutput_Native() }) .Build(); - var response = await captureOutputChatClient.CompleteAsync(""" + var response = await captureOutputChatClient.GetResponseAsync(""" Supply a JSON object to represent Jimbo Smith from Cardiff. """, useNativeJsonSchema: true); diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/PromptBasedFunctionCallingChatClient.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/PromptBasedFunctionCallingChatClient.cs index 150c984ff86..b442fa74b40 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/PromptBasedFunctionCallingChatClient.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/PromptBasedFunctionCallingChatClient.cs @@ -37,7 +37,7 @@ internal sealed class PromptBasedFunctionCallingChatClient(IChatClient innerClie DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, }; - public override async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) + public override async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { // Our goal is to convert tools into a prompt describing them, then to detect tool calls in the // response and convert those into FunctionCallContent. @@ -78,7 +78,7 @@ public override async Task CompleteAsync(IList chat } } - var result = await base.CompleteAsync(chatMessages, options, cancellationToken); + var result = await base.GetResponseAsync(chatMessages, options, cancellationToken); if (result.Choices.FirstOrDefault()?.Text is { } content && content.IndexOf("", StringComparison.Ordinal) is int startPos && startPos >= 0) diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ReducingChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ReducingChatClientTests.cs index e9ed67a81f0..b736d3524b6 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ReducingChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ReducingChatClientTests.cs @@ -33,7 +33,7 @@ public async Task Reduction_LimitsMessagesBasedOnTokenLimit() Assert.Collection(messages, m => Assert.StartsWith("Golden retrievers are quite active", m.Text, StringComparison.Ordinal), m => Assert.StartsWith("Are they good with kids?", m.Text, StringComparison.Ordinal)); - return Task.FromResult(new ChatCompletion([])); + return Task.FromResult(new ChatResponse([])); } }; @@ -51,7 +51,7 @@ public async Task Reduction_LimitsMessagesBasedOnTokenLimit() new ChatMessage(ChatRole.User, "Are they good with kids?"), ]; - await client.CompleteAsync(messages); + await client.GetResponseAsync(messages); Assert.Equal(5, messages.Count); } @@ -78,21 +78,21 @@ public ReducingChatClient(IChatClient innerClient, IChatReducer reducer, bool in } /// - public override async Task CompleteAsync( + public override async Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { chatMessages = await GetChatMessagesToPropagate(chatMessages, cancellationToken).ConfigureAwait(false); - return await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); + return await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false); } /// - public override async IAsyncEnumerable CompleteStreamingAsync( + public override async IAsyncEnumerable GetStreamingResponseAsync( IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { chatMessages = await GetChatMessagesToPropagate(chatMessages, cancellationToken).ConfigureAwait(false); - await foreach (var update in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) + await foreach (var update in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false)) { yield return update; } diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs index 178411a7d13..09328dd8ce6 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientIntegrationTests.cs @@ -39,7 +39,7 @@ public async Task PromptBasedFunctionCalling_NoArgs() .Build(); var secretNumber = 42; - var response = await chatClient.CompleteAsync("What is the current secret number? Answer with digits only.", new ChatOptions + var response = await chatClient.GetResponseAsync("What is the current secret number? Answer with digits only.", new ChatOptions { ModelId = "llama3:8b", Tools = [AIFunctionFactory.Create(() => secretNumber, "GetSecretNumber")], @@ -75,7 +75,7 @@ public async Task PromptBasedFunctionCalling_WithArgs() var didCallIrrelevantTool = false; var irrelevantTool = AIFunctionFactory.Create(() => { didCallIrrelevantTool = true; return 123; }, "GetSecretNumber"); - var response = await chatClient.CompleteAsync("What's the stock price for Microsoft in British pounds?", new ChatOptions + var response = await chatClient.GetResponseAsync("What's the stock price for Microsoft in British pounds?", new ChatOptions { Tools = [stockPriceTool, irrelevantTool], Temperature = 0, @@ -98,20 +98,20 @@ public async Task InvalidModelParameter_ThrowsInvalidOperationException() using var chatClient = new OllamaChatClient(endpoint, modelId: "inexistent-model"); InvalidOperationException ex; - ex = await Assert.ThrowsAsync(() => chatClient.CompleteAsync("Hello, world!")); + ex = await Assert.ThrowsAsync(() => chatClient.GetResponseAsync("Hello, world!")); Assert.Contains("inexistent-model", ex.Message); - ex = await Assert.ThrowsAsync(() => chatClient.CompleteStreamingAsync("Hello, world!").ToChatCompletionAsync()); + ex = await Assert.ThrowsAsync(() => chatClient.GetStreamingResponseAsync("Hello, world!").ToChatResponseAsync()); Assert.Contains("inexistent-model", ex.Message); } private sealed class AssertNoToolsDefinedChatClient(IChatClient innerClient) : DelegatingChatClient(innerClient) { - public override Task CompleteAsync( + public override Task GetResponseAsync( IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { Assert.Null(options?.Tools); - return base.CompleteAsync(chatMessages, options, cancellationToken); + return base.GetResponseAsync(chatMessages, options, cancellationToken); } } } diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs index c8816d5cf85..439ca29a3ec 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs @@ -111,7 +111,7 @@ public async Task BasicRequestResponse_NonStreaming() using VerbatimHttpHandler handler = new(Input, Output); using HttpClient httpClient = new(handler); using OllamaChatClient client = new("http://localhost:11434", "llama3.1", httpClient); - var response = await client.CompleteAsync("hello", new() + var response = await client.GetResponseAsync("hello", new() { MaxOutputTokens = 10, Temperature = 0.5f, @@ -170,8 +170,8 @@ public async Task BasicRequestResponse_Streaming() using HttpClient httpClient = new(handler); using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient); - List updates = []; - await foreach (var update in client.CompleteStreamingAsync("hello", new() + List updates = []; + await foreach (var update in client.GetStreamingResponseAsync("hello", new() { MaxOutputTokens = 20, Temperature = 0.5f, @@ -186,7 +186,7 @@ public async Task BasicRequestResponse_Streaming() for (int i = 0; i < updates.Count; i++) { - Assert.NotNull(updates[i].CompletionId); + Assert.NotNull(updates[i].ResponseId); Assert.Equal(i < updates.Count - 1 ? 1 : 2, updates[i].Contents.Count); Assert.Equal(ChatRole.Assistant, updates[i].Role); Assert.Equal("llama3.1", updates[i].ModelId); @@ -264,7 +264,7 @@ public async Task MultipleMessages_NonStreaming() new(ChatRole.User, "i'm good. how are you?"), ]; - var response = await client.CompleteAsync(messages, new() + var response = await client.GetResponseAsync(messages, new() { ModelId = "llama3.1", Temperature = 0.25f, @@ -364,7 +364,7 @@ public async Task FunctionCallContent_NonStreaming() ToolCallJsonSerializerOptions = TestJsonSerializerContext.Default.Options, }; - var response = await client.CompleteAsync("How old is Alice?", new() + var response = await client.GetResponseAsync("How old is Alice?", new() { Tools = [AIFunctionFactory.Create(([Description("The person whose age is being requested")] string personName) => 42, "GetPersonAge", "Gets the age of the specified person.")], }); @@ -456,7 +456,7 @@ public async Task FunctionResultContent_NonStreaming() ToolCallJsonSerializerOptions = TestJsonSerializerContext.Default.Options, }; - var response = await client.CompleteAsync( + var response = await client.GetResponseAsync( [ new(ChatRole.User, "How old is Alice?"), new(ChatRole.Assistant, [new FunctionCallContent("abcd1234", "GetPersonAge", new Dictionary { ["personName"] = "Alice" })]), diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs index 2aaaeec9e0d..789f0abeb63 100644 --- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs @@ -187,14 +187,14 @@ public async Task BasicRequestResponse_NonStreaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - var response = await client.CompleteAsync("hello", new() + var response = await client.GetResponseAsync("hello", new() { MaxOutputTokens = 10, Temperature = 0.5f, }); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADx3PvAnCwJg0woha4pYsBTi3ZpOI", response.CompletionId); + Assert.Equal("chatcmpl-ADx3PvAnCwJg0woha4pYsBTi3ZpOI", response.ResponseId); Assert.Equal("Hello! How can I assist you today?", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -217,7 +217,7 @@ public async Task BasicRequestResponse_NonStreaming() }, response.Usage.AdditionalCounts); Assert.NotNull(response.AdditionalProperties); - Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(ChatCompletion.SystemFingerprint)]); } [Fact] @@ -267,8 +267,8 @@ public async Task BasicRequestResponse_Streaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - List updates = []; - await foreach (var update in client.CompleteStreamingAsync("hello", new() + List updates = []; + await foreach (var update in client.GetStreamingResponseAsync("hello", new() { MaxOutputTokens = 20, Temperature = 0.5f, @@ -283,12 +283,12 @@ public async Task BasicRequestResponse_Streaming() Assert.Equal(12, updates.Count); for (int i = 0; i < updates.Count; i++) { - Assert.Equal("chatcmpl-ADxFKtX6xIwdWRN42QvBj2u1RZpCK", updates[i].CompletionId); + Assert.Equal("chatcmpl-ADxFKtX6xIwdWRN42QvBj2u1RZpCK", updates[i].ResponseId); Assert.Equal(createdAt, updates[i].CreatedAt); Assert.Equal("gpt-4o-mini-2024-07-18", updates[i].ModelId); Assert.Equal(ChatRole.Assistant, updates[i].Role); Assert.NotNull(updates[i].AdditionalProperties); - Assert.Equal("fp_f85bea6784", updates[i].AdditionalProperties![nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", updates[i].AdditionalProperties![nameof(ChatCompletion.SystemFingerprint)]); Assert.Equal(i == 10 ? 0 : 1, updates[i].Contents.Count); Assert.Equal(i < 10 ? null : ChatFinishReason.Stop, updates[i].FinishReason); } @@ -347,7 +347,7 @@ public async Task NonStronglyTypedOptions_AllSent() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - Assert.NotNull(await client.CompleteAsync("hello", new() + Assert.NotNull(await client.GetResponseAsync("hello", new() { AdditionalProperties = new() { @@ -444,7 +444,7 @@ public async Task MultipleMessages_NonStreaming() new(ChatRole.User, "i'm good. how are you?"), ]; - var response = await client.CompleteAsync(messages, new() + var response = await client.GetResponseAsync(messages, new() { Temperature = 0.25f, FrequencyPenalty = 0.75f, @@ -454,7 +454,7 @@ public async Task MultipleMessages_NonStreaming() }); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.CompletionId); + Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.ResponseId); Assert.Equal("I’m doing well, thank you! What’s on your mind today?", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -477,7 +477,7 @@ public async Task MultipleMessages_NonStreaming() }, response.Usage.AdditionalCounts); Assert.NotNull(response.AdditionalProperties); - Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(ChatCompletion.SystemFingerprint)]); } [Fact] @@ -551,10 +551,10 @@ public async Task MultiPartSystemMessage_NonStreaming() new(ChatRole.User, "hello!"), ]; - var response = await client.CompleteAsync(messages); + var response = await client.GetResponseAsync(messages); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.CompletionId); + Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.ResponseId); Assert.Equal("Hi! It's so good to hear from you!", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -577,7 +577,7 @@ public async Task MultiPartSystemMessage_NonStreaming() }, response.Usage.AdditionalCounts); Assert.NotNull(response.AdditionalProperties); - Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(ChatCompletion.SystemFingerprint)]); } [Fact] @@ -652,10 +652,10 @@ public async Task EmptyAssistantMessage_NonStreaming() new(ChatRole.User, "i'm good. how are you?"), ]; - var response = await client.CompleteAsync(messages); + var response = await client.GetResponseAsync(messages); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.CompletionId); + Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.ResponseId); Assert.Equal("I’m doing well, thank you! What’s on your mind today?", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -678,7 +678,7 @@ public async Task EmptyAssistantMessage_NonStreaming() }, response.Usage.AdditionalCounts); Assert.NotNull(response.AdditionalProperties); - Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(ChatCompletion.SystemFingerprint)]); } [Fact] @@ -765,7 +765,7 @@ public async Task FunctionCallContent_NonStreaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - var response = await client.CompleteAsync("How old is Alice?", new() + var response = await client.GetResponseAsync("How old is Alice?", new() { Tools = [AIFunctionFactory.Create(([Description("The person whose age is being requested")] string personName) => 42, "GetPersonAge", "Gets the age of the specified person.")], }); @@ -798,7 +798,7 @@ public async Task FunctionCallContent_NonStreaming() AssertExtensions.EqualFunctionCallParameters(new Dictionary { ["personName"] = "Alice" }, fcc.Arguments); Assert.NotNull(response.AdditionalProperties); - Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(ChatCompletion.SystemFingerprint)]); } [Fact] @@ -869,8 +869,8 @@ public async Task FunctionCallContent_Streaming() using HttpClient httpClient = new(handler); using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini"); - List updates = []; - await foreach (var update in client.CompleteStreamingAsync("How old is Alice?", new() + List updates = []; + await foreach (var update in client.GetStreamingResponseAsync("How old is Alice?", new() { Tools = [AIFunctionFactory.Create(([Description("The person whose age is being requested")] string personName) => 42, "GetPersonAge", "Gets the age of the specified person.")], })) @@ -884,12 +884,12 @@ public async Task FunctionCallContent_Streaming() Assert.Equal(10, updates.Count); for (int i = 0; i < updates.Count; i++) { - Assert.Equal("chatcmpl-ADymNiWWeqCJqHNFXiI1QtRcLuXcl", updates[i].CompletionId); + Assert.Equal("chatcmpl-ADymNiWWeqCJqHNFXiI1QtRcLuXcl", updates[i].ResponseId); Assert.Equal(createdAt, updates[i].CreatedAt); Assert.Equal("gpt-4o-mini-2024-07-18", updates[i].ModelId); Assert.Equal(ChatRole.Assistant, updates[i].Role); Assert.NotNull(updates[i].AdditionalProperties); - Assert.Equal("fp_f85bea6784", updates[i].AdditionalProperties![nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", updates[i].AdditionalProperties![nameof(ChatCompletion.SystemFingerprint)]); Assert.Equal(i < 7 ? null : ChatFinishReason.ToolCalls, updates[i].FinishReason); } @@ -1029,10 +1029,10 @@ public async Task AssistantMessageWithBothToolsAndContent_NonStreaming() new(ChatRole.User, "Thanks!"), ]; - var response = await client.CompleteAsync(messages); + var response = await client.GetResponseAsync(messages); Assert.NotNull(response); - Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.CompletionId); + Assert.Equal("chatcmpl-ADyV17bXeSm5rzUx3n46O7m3M0o3P", response.ResponseId); Assert.Equal("I’m doing well, thank you! What’s on your mind today?", response.Message.Text); Assert.Single(response.Message.Contents); Assert.Equal(ChatRole.Assistant, response.Message.Role); @@ -1055,7 +1055,7 @@ public async Task AssistantMessageWithBothToolsAndContent_NonStreaming() }, response.Usage.AdditionalCounts); Assert.NotNull(response.AdditionalProperties); - Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)]); + Assert.Equal("fp_f85bea6784", response.AdditionalProperties[nameof(ChatCompletion.SystemFingerprint)]); } private static IChatClient CreateChatClient(HttpClient httpClient, string modelId) => diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs index 22627731b16..205229f0cfd 100644 --- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs @@ -464,7 +464,7 @@ public static async Task RequestDeserialization_ToolChatMessage() } [Fact] - public static async Task SerializeCompletion_SingleChoice() + public static async Task SerializeResponse_SingleChoice() { ChatMessage message = new() { @@ -482,9 +482,9 @@ public static async Task SerializeCompletion_SingleChoice() ] }; - ChatCompletion completion = new(message) + ChatResponse response = new(message) { - CompletionId = "chatcmpl-ADx3PvAnCwJg0woha4pYsBTi3ZpOI", + ResponseId = "chatcmpl-ADx3PvAnCwJg0woha4pYsBTi3ZpOI", ModelId = "gpt-4o-mini-2024-07-18", CreatedAt = DateTimeOffset.FromUnixTimeSeconds(1_727_888_631), FinishReason = ChatFinishReason.Stop, @@ -503,12 +503,12 @@ public static async Task SerializeCompletion_SingleChoice() }, AdditionalProperties = new() { - [nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)] = "fp_f85bea6784", + [nameof(ChatCompletion.SystemFingerprint)] = "fp_f85bea6784", } }; using MemoryStream stream = new(); - await OpenAISerializationHelpers.SerializeAsync(stream, completion); + await OpenAISerializationHelpers.SerializeAsync(stream, response); string result = Encoding.UTF8.GetString(stream.ToArray()); AssertJsonEqual(""" @@ -563,7 +563,7 @@ public static async Task SerializeCompletion_SingleChoice() } [Fact] - public static async Task SerializeCompletion_ManyChoices_ThrowsNotSupportedException() + public static async Task SerializeResponse_ManyChoices_ThrowsNotSupportedException() { ChatMessage message1 = new() { @@ -577,17 +577,17 @@ public static async Task SerializeCompletion_ManyChoices_ThrowsNotSupportedExcep Text = "Hey there! How can I help?", }; - ChatCompletion completion = new([message1, message2]); + ChatResponse response = new([message1, message2]); using MemoryStream stream = new(); - var ex = await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeAsync(stream, completion)); + var ex = await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeAsync(stream, response)); Assert.Contains("multiple choices", ex.Message); } [Fact] - public static async Task SerializeStreamingCompletion() + public static async Task SerializeStreamingResponse() { - static async IAsyncEnumerable CreateStreamingCompletion() + static async IAsyncEnumerable CreateStreamingResponse() { for (int i = 0; i < 5; i++) { @@ -626,9 +626,9 @@ static async IAsyncEnumerable CreateStreamingComp contents.Add(new UsageContent(usageDetails)); } - yield return new StreamingChatCompletionUpdate + yield return new ChatResponseUpdate { - CompletionId = "chatcmpl-ADymNiWWeqCJqHNFXiI1QtRcLuXcl", + ResponseId = "chatcmpl-ADymNiWWeqCJqHNFXiI1QtRcLuXcl", ModelId = "gpt-4o-mini-2024-07-18", CreatedAt = DateTimeOffset.FromUnixTimeSeconds(1_727_888_631), Role = ChatRole.Assistant, @@ -636,7 +636,7 @@ static async IAsyncEnumerable CreateStreamingComp FinishReason = i == 4 ? ChatFinishReason.Stop : null, AdditionalProperties = new() { - [nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)] = "fp_f85bea6784", + [nameof(ChatCompletion.SystemFingerprint)] = "fp_f85bea6784", }, }; @@ -645,7 +645,7 @@ static async IAsyncEnumerable CreateStreamingComp } using MemoryStream stream = new(); - await OpenAISerializationHelpers.SerializeStreamingAsync(stream, CreateStreamingCompletion()); + await OpenAISerializationHelpers.SerializeStreamingAsync(stream, CreateStreamingResponse()); string result = Encoding.UTF8.GetString(stream.ToArray()); AssertSseEqual(""" @@ -673,12 +673,12 @@ public static async Task SerializationHelpers_NullArguments_ThrowsArgumentNullEx await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeAsync(null!, new(new ChatMessage()))); await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeAsync(new MemoryStream(), null!)); - await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeStreamingAsync(null!, GetStreamingChatCompletion())); + await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeStreamingAsync(null!, GetStreamingChatResponse())); await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeStreamingAsync(new MemoryStream(), null!)); - static async IAsyncEnumerable GetStreamingChatCompletion() + static async IAsyncEnumerable GetStreamingChatResponse() { - yield return new StreamingChatCompletionUpdate(); + yield return new ChatResponseUpdate(); await Task.CompletedTask; } } @@ -691,11 +691,11 @@ public static async Task SerializationHelpers_HonorCancellationToken() await Assert.ThrowsAsync(() => OpenAISerializationHelpers.DeserializeChatCompletionRequestAsync(stream, cancellationToken: canceledToken)); await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeAsync(stream, new(new ChatMessage()), cancellationToken: canceledToken)); - await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeStreamingAsync(stream, GetStreamingChatCompletion(), cancellationToken: canceledToken)); + await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeStreamingAsync(stream, GetStreamingChatResponse(), cancellationToken: canceledToken)); - static async IAsyncEnumerable GetStreamingChatCompletion() + static async IAsyncEnumerable GetStreamingChatResponse() { - yield return new StreamingChatCompletionUpdate(); + yield return new ChatResponseUpdate(); await Task.CompletedTask; } } @@ -711,7 +711,7 @@ public static async Task SerializationHelpers_HonorJsonSerializerOptions() ["arg1"] = new SomeFunctionArgument(), }); - ChatCompletion completion = new(new ChatMessage + ChatResponse response = new(new ChatMessage { Role = ChatRole.Assistant, Contents = [fcc], @@ -720,23 +720,23 @@ public static async Task SerializationHelpers_HonorJsonSerializerOptions() using MemoryStream stream = new(); // Passing a JSO that contains a contract for the function argument results in successful serialization. - await OpenAISerializationHelpers.SerializeAsync(stream, completion, options: JsonContextWithFunctionArgument.Default.Options); + await OpenAISerializationHelpers.SerializeAsync(stream, response, options: JsonContextWithFunctionArgument.Default.Options); stream.Position = 0; - await OpenAISerializationHelpers.SerializeStreamingAsync(stream, GetStreamingCompletion(), options: JsonContextWithFunctionArgument.Default.Options); + await OpenAISerializationHelpers.SerializeStreamingAsync(stream, GetStreamingResponse(), options: JsonContextWithFunctionArgument.Default.Options); stream.Position = 0; // Passing a JSO without a contract for the function argument result in failed serialization. - await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeAsync(stream, completion, options: JsonContextWithoutFunctionArgument.Default.Options)); - await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeStreamingAsync(stream, GetStreamingCompletion(), options: JsonContextWithoutFunctionArgument.Default.Options)); + await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeAsync(stream, response, options: JsonContextWithoutFunctionArgument.Default.Options)); + await Assert.ThrowsAsync(() => OpenAISerializationHelpers.SerializeStreamingAsync(stream, GetStreamingResponse(), options: JsonContextWithoutFunctionArgument.Default.Options)); - async IAsyncEnumerable GetStreamingCompletion() + async IAsyncEnumerable GetStreamingResponse() { - yield return new StreamingChatCompletionUpdate + await Task.Yield(); + yield return new ChatResponseUpdate { Contents = [fcc], }; - await Task.CompletedTask; } } diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs index aab999d12d3..a2c7d623f00 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs @@ -17,9 +17,9 @@ public class ChatClientStructuredOutputExtensionsTests public async Task SuccessUsage() { var expectedResult = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger }; - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))) + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))) { - CompletionId = "test", + ResponseId = "test", CreatedAt = DateTimeOffset.UtcNow, ModelId = "someModel", RawRepresentation = new object(), @@ -49,22 +49,22 @@ public async Task SuccessUsage() } }); - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); }, }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory); + var response = await client.GetResponseAsync(chatHistory); - // The completion contains the deserialized result and other completion properties + // The response contains the deserialized result and other response properties Assert.Equal(1, response.Result.Id); Assert.Equal("Tigger", response.Result.FullName); Assert.Equal(Species.Tiger, response.Result.Species); - Assert.Equal(expectedCompletion.CompletionId, response.CompletionId); - Assert.Equal(expectedCompletion.CreatedAt, response.CreatedAt); - Assert.Equal(expectedCompletion.ModelId, response.ModelId); - Assert.Same(expectedCompletion.RawRepresentation, response.RawRepresentation); - Assert.Same(expectedCompletion.Usage, response.Usage); + Assert.Equal(expectedResponse.ResponseId, response.ResponseId); + Assert.Equal(expectedResponse.CreatedAt, response.CreatedAt); + Assert.Equal(expectedResponse.ModelId, response.ModelId); + Assert.Same(expectedResponse.RawRepresentation, response.RawRepresentation); + Assert.Same(expectedResponse.Usage, response.Usage); // TryGetResult returns the same value Assert.True(response.TryGetResult(out var tryGetResultOutput)); @@ -78,7 +78,7 @@ public async Task SuccessUsage() public async Task WrapsNonObjectValuesInDataProperty() { var expectedResult = new { data = 123 }; - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); using var client = new TestChatClient { @@ -99,25 +99,25 @@ public async Task WrapsNonObjectValuesInDataProperty() "additionalProperties": false } """, suppliedSchemaMatch.Groups[1].Value.Trim()); - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); }, }; - var response = await client.CompleteAsync("Hello"); + var response = await client.GetResponseAsync("Hello"); Assert.Equal(123, response.Result); } [Fact] public async Task FailureUsage_InvalidJson() { - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, "This is not valid JSON")); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, "This is not valid JSON")); using var client = new TestChatClient { - CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedCompletion), + CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedResponse), }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory); + var response = await client.GetResponseAsync(chatHistory); var ex = Assert.Throws(() => response.Result); Assert.Contains("invalid", ex.Message); @@ -129,14 +129,14 @@ public async Task FailureUsage_InvalidJson() [Fact] public async Task FailureUsage_NullJson() { - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, "null")); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, "null")); using var client = new TestChatClient { - CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedCompletion), + CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedResponse), }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory); + var response = await client.GetResponseAsync(chatHistory); var ex = Assert.Throws(() => response.Result); Assert.Equal("The deserialized response is null", ex.Message); @@ -148,14 +148,14 @@ public async Task FailureUsage_NullJson() [Fact] public async Task FailureUsage_NoJsonInResponse() { - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, [new DataContent("https://example.com")])); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, [new DataContent("https://example.com")])); using var client = new TestChatClient { - CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedCompletion), + CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedResponse), }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory); + var response = await client.GetResponseAsync(chatHistory); var ex = Assert.Throws(() => response.Result); Assert.Equal("The response did not contain text to be deserialized", ex.Message); @@ -168,7 +168,7 @@ public async Task FailureUsage_NoJsonInResponse() public async Task CanUseNativeStructuredOutput() { var expectedResult = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger }; - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); using var client = new TestChatClient { @@ -188,14 +188,14 @@ public async Task CanUseNativeStructuredOutput() // The chat history isn't mutated any further, since native structured output is used instead of a prompt Assert.Equal("Hello", Assert.Single(messages).Text); - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); }, }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory, useNativeJsonSchema: true); + var response = await client.GetResponseAsync(chatHistory, useNativeJsonSchema: true); - // The completion contains the deserialized result and other completion properties + // The response contains the deserialized result and other response properties Assert.Equal(1, response.Result.Id); Assert.Equal("Tigger", response.Result.FullName); Assert.Equal(Species.Tiger, response.Result.Species); @@ -212,7 +212,7 @@ public async Task CanUseNativeStructuredOutput() public async Task CanUseNativeStructuredOutputWithSanitizedTypeName() { var expectedResult = new Data { Value = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger } }; - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); using var client = new TestChatClient { @@ -222,14 +222,14 @@ public async Task CanUseNativeStructuredOutputWithSanitizedTypeName() Assert.Matches("Data_1", responseFormat.SchemaName); - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); }, }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync>(chatHistory, useNativeJsonSchema: true); + var response = await client.GetResponseAsync>(chatHistory, useNativeJsonSchema: true); - // The completion contains the deserialized result and other completion properties + // The response contains the deserialized result and other response properties Assert.Equal(1, response.Result!.Value!.Id); Assert.Equal("Tigger", response.Result.Value.FullName); Assert.Equal(Species.Tiger, response.Result.Value.Species); @@ -247,17 +247,17 @@ public async Task CanUseNativeStructuredOutputWithArray() { var expectedResult = new[] { new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger } }; var payload = new { data = expectedResult }; - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(payload))); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(payload))); using var client = new TestChatClient { - CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedCompletion) + CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedResponse) }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory, useNativeJsonSchema: true); + var response = await client.GetResponseAsync(chatHistory, useNativeJsonSchema: true); - // The completion contains the deserialized result and other completion properties + // The response contains the deserialized result and other response properties Assert.Single(response.Result!); Assert.Equal("Tigger", response.Result[0].FullName); Assert.Equal(Species.Tiger, response.Result[0].Species); @@ -278,7 +278,7 @@ public async Task CanSpecifyCustomJsonSerializationOptions() PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, }; var expectedResult = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger }; - var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult, jso))); + var expectedResponse = new ChatResponse(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult, jso))); using var client = new TestChatClient { @@ -296,14 +296,14 @@ public async Task CanSpecifyCustomJsonSerializationOptions() Assert.DoesNotContain(nameof(Species.Tiger), message.Text); // The JSO doesn't use enum-to-string conversion }); - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); }, }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory, jso); + var response = await client.GetResponseAsync(chatHistory, jso); - // The completion contains the deserialized result and other completion properties + // The response contains the deserialized result and other response properties Assert.Equal(1, response.Result.Id); Assert.Equal("Tigger", response.Result.FullName); Assert.Equal(Species.Tiger, response.Result.Species); @@ -324,14 +324,14 @@ public async Task HandlesBackendReturningMultipleObjects() { CompleteAsyncCallback = (messages, options, cancellationToken) => { - return Task.FromResult(new ChatCompletion(new ChatMessage(ChatRole.Assistant, resultDuplicatedJson))); + return Task.FromResult(new ChatResponse(new ChatMessage(ChatRole.Assistant, resultDuplicatedJson))); }, }; var chatHistory = new List { new(ChatRole.User, "Hello") }; - var response = await client.CompleteAsync(chatHistory); + var response = await client.GetResponseAsync(chatHistory); - // The completion contains the deserialized result and other completion properties + // The response contains the deserialized result and other response properties Assert.Equal(1, response.Result.Id); Assert.Equal("Tigger", response.Result.FullName); Assert.Equal(Species.Tiger, response.Result.Species); diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ConfigureOptionsChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ConfigureOptionsChatClientTests.cs index 8ceb16da329..77e6b6bf653 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ConfigureOptionsChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ConfigureOptionsChatClientTests.cs @@ -34,8 +34,8 @@ public async Task ConfigureOptions_ReturnedInstancePassedToNextClient(bool nullP { ChatOptions? providedOptions = nullProvidedOptions ? null : new() { ModelId = "test" }; ChatOptions? returnedOptions = null; - ChatCompletion expectedCompletion = new(Array.Empty()); - var expectedUpdates = Enumerable.Range(0, 3).Select(i => new StreamingChatCompletionUpdate()).ToArray(); + ChatResponse expectedResponse = new(Array.Empty()); + var expectedUpdates = Enumerable.Range(0, 3).Select(i => new ChatResponseUpdate()).ToArray(); using CancellationTokenSource cts = new(); using IChatClient innerClient = new TestChatClient @@ -44,7 +44,7 @@ public async Task ConfigureOptions_ReturnedInstancePassedToNextClient(bool nullP { Assert.Same(returnedOptions, options); Assert.Equal(cts.Token, cancellationToken); - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); }, CompleteStreamingAsyncCallback = (messages, options, cancellationToken) => @@ -73,11 +73,11 @@ public async Task ConfigureOptions_ReturnedInstancePassedToNextClient(bool nullP }) .Build(); - var completion = await client.CompleteAsync(Array.Empty(), providedOptions, cts.Token); - Assert.Same(expectedCompletion, completion); + var response = await client.GetResponseAsync(Array.Empty(), providedOptions, cts.Token); + Assert.Same(expectedResponse, response); int i = 0; - await using var e = client.CompleteStreamingAsync(Array.Empty(), providedOptions, cts.Token).GetAsyncEnumerator(); + await using var e = client.GetStreamingResponseAsync(Array.Empty(), providedOptions, cts.Token).GetAsyncEnumerator(); while (i < expectedUpdates.Length) { Assert.True(await e.MoveNextAsync()); @@ -86,7 +86,7 @@ public async Task ConfigureOptions_ReturnedInstancePassedToNextClient(bool nullP Assert.False(await e.MoveNextAsync()); - static async IAsyncEnumerable YieldUpdates(StreamingChatCompletionUpdate[] updates) + static async IAsyncEnumerable YieldUpdates(ChatResponseUpdate[] updates) { foreach (var update in updates) { diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs index 730d0739f9d..8ec8546d16b 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs @@ -39,7 +39,7 @@ public async Task CachesSuccessResultsAsync() // Verify that all the expected properties will round-trip through the cache, // even if this involves serialization - var expectedCompletion = new ChatCompletion([ + var expectedResponse = new ChatResponse([ new(new ChatRole("fakeRole"), "This is some content") { AdditionalProperties = new() { ["a"] = "b" }, @@ -55,7 +55,7 @@ public async Task CachesSuccessResultsAsync() } ]) { - CompletionId = "someId", + ResponseId = "someId", Usage = new() { InputTokenCount = 123, @@ -74,7 +74,7 @@ public async Task CachesSuccessResultsAsync() CompleteAsyncCallback = delegate { innerCallCount++; - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) @@ -83,19 +83,19 @@ public async Task CachesSuccessResultsAsync() }; // Make the initial request and do a quick sanity check - var result1 = await outer.CompleteAsync("some input"); - Assert.Same(expectedCompletion, result1); + var result1 = await outer.GetResponseAsync("some input"); + Assert.Same(expectedResponse, result1); Assert.Equal(1, innerCallCount); // Act - var result2 = await outer.CompleteAsync("some input"); + var result2 = await outer.GetResponseAsync("some input"); // Assert Assert.Equal(1, innerCallCount); - AssertCompletionsEqual(expectedCompletion, result2); + AssertResponsesEqual(expectedResponse, result2); // Act/Assert 2: Cache misses do not return cached results - await outer.CompleteAsync("some modified input"); + await outer.GetResponseAsync("some modified input"); Assert.Equal(2, innerCallCount); } @@ -111,7 +111,7 @@ public async Task AllowsConcurrentCallsAsync() { innerCallCount++; await completionTcs.Task; - return new ChatCompletion([new(ChatRole.Assistant, "Hello")]); + return new ChatResponse([new(ChatRole.Assistant, "Hello")]); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) @@ -120,8 +120,8 @@ public async Task AllowsConcurrentCallsAsync() }; // Act 1: Concurrent calls before resolution are passed into the inner client - var result1 = outer.CompleteAsync("some input"); - var result2 = outer.CompleteAsync("some input"); + var result1 = outer.GetResponseAsync("some input"); + var result2 = outer.GetResponseAsync("some input"); // Assert 1 Assert.Equal(2, innerCallCount); @@ -132,7 +132,7 @@ public async Task AllowsConcurrentCallsAsync() Assert.Equal("Hello", (await result2).Message.Text); // Act 2: Subsequent calls after completion are resolved from the cache - var result3 = outer.CompleteAsync("some input"); + var result3 = outer.GetResponseAsync("some input"); Assert.Equal(2, innerCallCount); Assert.Equal("Hello", (await result3).Message.Text); } @@ -156,12 +156,12 @@ public async Task DoesNotCacheExceptionResultsAsync() }; var input = new ChatMessage(ChatRole.User, "abc"); - var ex1 = await Assert.ThrowsAsync(() => outer.CompleteAsync([input])); + var ex1 = await Assert.ThrowsAsync(() => outer.GetResponseAsync([input])); Assert.Equal("some failure", ex1.Message); Assert.Equal(1, innerCallCount); // Act - var ex2 = await Assert.ThrowsAsync(() => outer.CompleteAsync([input])); + var ex2 = await Assert.ThrowsAsync(() => outer.GetResponseAsync([input])); // Assert Assert.NotSame(ex1, ex2); @@ -185,7 +185,7 @@ public async Task DoesNotCacheCanceledResultsAsync() await resolutionTcs.Task; } - return new ChatCompletion([new(ChatRole.Assistant, "A good result")]); + return new ChatResponse([new(ChatRole.Assistant, "A good result")]); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) @@ -195,7 +195,7 @@ public async Task DoesNotCacheCanceledResultsAsync() // First call gets cancelled var input = new ChatMessage(ChatRole.User, "abc"); - var result1 = outer.CompleteAsync([input]); + var result1 = outer.GetResponseAsync([input]); Assert.False(result1.IsCompleted); Assert.Equal(1, innerCallCount); resolutionTcs.SetCanceled(); @@ -203,7 +203,7 @@ public async Task DoesNotCacheCanceledResultsAsync() Assert.True(result1.IsCanceled); // Act/Assert: Second call can succeed - var result2 = await outer.CompleteAsync([input]); + var result2 = await outer.GetResponseAsync([input]); Assert.Equal(2, innerCallCount); Assert.Equal("A good result", result2.Message.Text); } @@ -215,7 +215,7 @@ public async Task StreamingCachesSuccessResultsAsync() // Verify that all the expected properties will round-trip through the cache, // even if this involves serialization - List actualCompletion = + List actualUpdate = [ new() { @@ -235,7 +235,7 @@ public async Task StreamingCachesSuccessResultsAsync() } ]; - List expectedCachedCompletion = + List expectedCachedResponse = [ new() { @@ -261,7 +261,7 @@ public async Task StreamingCachesSuccessResultsAsync() CompleteStreamingAsyncCallback = delegate { innerCallCount++; - return ToAsyncEnumerableAsync(actualCompletion); + return ToAsyncEnumerableAsync(actualUpdate); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) @@ -270,19 +270,19 @@ public async Task StreamingCachesSuccessResultsAsync() }; // Make the initial request and do a quick sanity check - var result1 = outer.CompleteStreamingAsync("some input"); - await AssertCompletionsEqualAsync(actualCompletion, result1); + var result1 = outer.GetStreamingResponseAsync("some input"); + await AssertResponsesEqualAsync(actualUpdate, result1); Assert.Equal(1, innerCallCount); // Act - var result2 = outer.CompleteStreamingAsync("some input"); + var result2 = outer.GetStreamingResponseAsync("some input"); // Assert Assert.Equal(1, innerCallCount); - await AssertCompletionsEqualAsync(expectedCachedCompletion, result2); + await AssertResponsesEqualAsync(expectedCachedResponse, result2); // Act/Assert 2: Cache misses do not return cached results - await ToListAsync(outer.CompleteStreamingAsync("some modified input")); + await ToListAsync(outer.GetStreamingResponseAsync("some modified input")); Assert.Equal(2, innerCallCount); } @@ -293,7 +293,7 @@ public async Task StreamingCachesSuccessResultsAsync() public async Task StreamingCoalescesConsecutiveTextChunksAsync(bool? coalesce) { // Arrange - List expectedCompletion = + List expectedResponse = [ new() { Role = ChatRole.Assistant, Text = "This" }, new() { Role = ChatRole.Assistant, Text = " becomes one chunk" }, @@ -305,7 +305,7 @@ public async Task StreamingCoalescesConsecutiveTextChunksAsync(bool? coalesce) using var testClient = new TestChatClient { - CompleteStreamingAsyncCallback = delegate { return ToAsyncEnumerableAsync(expectedCompletion); } + CompleteStreamingAsyncCallback = delegate { return ToAsyncEnumerableAsync(expectedResponse); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) { @@ -317,16 +317,16 @@ public async Task StreamingCoalescesConsecutiveTextChunksAsync(bool? coalesce) outer.CoalesceStreamingUpdates = coalesce.Value; } - var result1 = outer.CompleteStreamingAsync("some input"); + var result1 = outer.GetStreamingResponseAsync("some input"); await ToListAsync(result1); // Act - var result2 = outer.CompleteStreamingAsync("some input"); + var result2 = outer.GetStreamingResponseAsync("some input"); // Assert if (coalesce is null or true) { - StreamingChatCompletionUpdate update = Assert.Single(await ToListAsync(result2)); + ChatResponseUpdate update = Assert.Single(await ToListAsync(result2)); Assert.Collection(update.Contents, c => Assert.Equal("This becomes one chunk", Assert.IsType(c).Text), c => Assert.IsType(c), @@ -348,7 +348,7 @@ public async Task StreamingCoalescesConsecutiveTextChunksAsync(bool? coalesce) public async Task StreamingCoalescingPropagatesMetadataAsync() { // Arrange - List expectedCompletion = + List expectedResponse = [ new() { Role = ChatRole.Assistant, Contents = [new TextContent("Hello")] }, new() { Role = ChatRole.Assistant, Contents = [new TextContent(" world, ")] }, @@ -374,7 +374,7 @@ public async Task StreamingCoalescingPropagatesMetadataAsync() } ], CreatedAt = DateTime.Parse("2024-10-11T19:23:36.0152137Z"), - CompletionId = "12345", + ResponseId = "12345", AuthorName = "Someone", FinishReason = ChatFinishReason.Length, }, @@ -382,24 +382,24 @@ public async Task StreamingCoalescingPropagatesMetadataAsync() using var testClient = new TestChatClient { - CompleteStreamingAsyncCallback = delegate { return ToAsyncEnumerableAsync(expectedCompletion); } + CompleteStreamingAsyncCallback = delegate { return ToAsyncEnumerableAsync(expectedResponse); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) { JsonSerializerOptions = TestJsonSerializerContext.Default.Options }; - var result1 = outer.CompleteStreamingAsync("some input"); + var result1 = outer.GetStreamingResponseAsync("some input"); await ToListAsync(result1); // Act - var result2 = outer.CompleteStreamingAsync("some input"); + var result2 = outer.GetStreamingResponseAsync("some input"); // Assert var items = await ToListAsync(result2); var item = Assert.Single(items); Assert.Equal("Hello world, how are you?", item.Text); - Assert.Equal("12345", item.CompletionId); + Assert.Equal("12345", item.ResponseId); Assert.Equal("Someone", item.AuthorName); Assert.Equal(ChatFinishReason.Length, item.FinishReason); Assert.Equal(DateTime.Parse("2024-10-11T19:23:36.0152137Z"), item.CreatedAt); @@ -414,7 +414,7 @@ public async Task StreamingAllowsConcurrentCallsAsync() // Arrange var innerCallCount = 0; var completionTcs = new TaskCompletionSource(); - List expectedCompletion = + List expectedResponse = [ new() { Role = ChatRole.Assistant, Text = "Chunk 1" }, ]; @@ -423,7 +423,7 @@ public async Task StreamingAllowsConcurrentCallsAsync() CompleteStreamingAsyncCallback = delegate { innerCallCount++; - return ToAsyncEnumerableAsync(completionTcs.Task, expectedCompletion); + return ToAsyncEnumerableAsync(completionTcs.Task, expectedResponse); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) @@ -432,13 +432,13 @@ public async Task StreamingAllowsConcurrentCallsAsync() }; // Act 1: Concurrent calls before resolution are passed into the inner client - var result1 = outer.CompleteStreamingAsync("some input"); - var result2 = outer.CompleteStreamingAsync("some input"); + var result1 = outer.GetStreamingResponseAsync("some input"); + var result2 = outer.GetStreamingResponseAsync("some input"); // Assert 1 Assert.NotSame(result1, result2); - var result1Assertion = AssertCompletionsEqualAsync(expectedCompletion, result1); - var result2Assertion = AssertCompletionsEqualAsync(expectedCompletion, result2); + var result1Assertion = AssertResponsesEqualAsync(expectedResponse, result1); + var result2Assertion = AssertResponsesEqualAsync(expectedResponse, result2); Assert.False(result1Assertion.IsCompleted); Assert.False(result2Assertion.IsCompleted); completionTcs.SetResult(true); @@ -447,8 +447,8 @@ public async Task StreamingAllowsConcurrentCallsAsync() Assert.Equal(2, innerCallCount); // Act 2: Subsequent calls after completion are resolved from the cache - var result3 = outer.CompleteStreamingAsync("some input"); - await AssertCompletionsEqualAsync(expectedCompletion, result3); + var result3 = outer.GetStreamingResponseAsync("some input"); + await AssertResponsesEqualAsync(expectedResponse, result3); Assert.Equal(2, innerCallCount); } @@ -462,7 +462,7 @@ public async Task StreamingDoesNotCacheExceptionResultsAsync() CompleteStreamingAsyncCallback = delegate { innerCallCount++; - return ToAsyncEnumerableAsync(Task.CompletedTask, + return ToAsyncEnumerableAsync(Task.CompletedTask, [ () => new() { Role = ChatRole.Assistant, Text = "Chunk 1" }, () => throw new InvalidTimeZoneException("some failure"), @@ -475,13 +475,13 @@ public async Task StreamingDoesNotCacheExceptionResultsAsync() }; var input = new ChatMessage(ChatRole.User, "abc"); - var result1 = outer.CompleteStreamingAsync([input]); + var result1 = outer.GetStreamingResponseAsync([input]); var ex1 = await Assert.ThrowsAsync(() => ToListAsync(result1)); Assert.Equal("some failure", ex1.Message); Assert.Equal(1, innerCallCount); // Act - var result2 = outer.CompleteStreamingAsync([input]); + var result2 = outer.GetStreamingResponseAsync([input]); var ex2 = await Assert.ThrowsAsync(() => ToListAsync(result2)); // Assert @@ -501,7 +501,7 @@ public async Task StreamingDoesNotCacheCanceledResultsAsync() CompleteStreamingAsyncCallback = delegate { innerCallCount++; - return ToAsyncEnumerableAsync( + return ToAsyncEnumerableAsync( innerCallCount == 1 ? completionTcs.Task : Task.CompletedTask, [() => new() { Role = ChatRole.Assistant, Text = "A good result" }]); } @@ -513,7 +513,7 @@ public async Task StreamingDoesNotCacheCanceledResultsAsync() // First call gets cancelled var input = new ChatMessage(ChatRole.User, "abc"); - var result1 = outer.CompleteStreamingAsync([input]); + var result1 = outer.GetStreamingResponseAsync([input]); var result1Assertion = ToListAsync(result1); Assert.False(result1Assertion.IsCompleted); completionTcs.SetCanceled(); @@ -522,7 +522,7 @@ public async Task StreamingDoesNotCacheCanceledResultsAsync() Assert.Equal(1, innerCallCount); // Act/Assert: Second call can succeed - var result2 = await ToListAsync(outer.CompleteStreamingAsync([input])); + var result2 = await ToListAsync(outer.GetStreamingResponseAsync([input])); Assert.Equal("A good result", result2[0].Text); Assert.Equal(2, innerCallCount); } @@ -548,11 +548,11 @@ public async Task CacheKeyVariesByChatOptionsAsync() }; // Act: Call with two different ChatOptions that have the same values - var result1 = await outer.CompleteAsync([], new ChatOptions + var result1 = await outer.GetResponseAsync([], new ChatOptions { AdditionalProperties = new() { { "someKey", "value 1" } } }); - var result2 = await outer.CompleteAsync([], new ChatOptions + var result2 = await outer.GetResponseAsync([], new ChatOptions { AdditionalProperties = new() { { "someKey", "value 1" } } }); @@ -563,11 +563,11 @@ public async Task CacheKeyVariesByChatOptionsAsync() Assert.Equal("value 1", result2.Message.Text); // Act: Call with two different ChatOptions that have different values - var result3 = await outer.CompleteAsync([], new ChatOptions + var result3 = await outer.GetResponseAsync([], new ChatOptions { AdditionalProperties = new() { { "someKey", "value 1" } } }); - var result4 = await outer.CompleteAsync([], new ChatOptions + var result4 = await outer.GetResponseAsync([], new ChatOptions { AdditionalProperties = new() { { "someKey", "value 2" } } }); @@ -599,11 +599,11 @@ public async Task SubclassCanOverrideCacheKeyToVaryByChatOptionsAsync() }; // Act: Call with two different ChatOptions - var result1 = await outer.CompleteAsync([], new ChatOptions + var result1 = await outer.GetResponseAsync([], new ChatOptions { AdditionalProperties = new() { { "someKey", "value 1" } } }); - var result2 = await outer.CompleteAsync([], new ChatOptions + var result2 = await outer.GetResponseAsync([], new ChatOptions { AdditionalProperties = new() { { "someKey", "value 2" } } }); @@ -618,7 +618,7 @@ public async Task SubclassCanOverrideCacheKeyToVaryByChatOptionsAsync() public async Task CanCacheCustomContentTypesAsync() { // Arrange - var expectedCompletion = new ChatCompletion([ + var expectedResponse = new ChatResponse([ new(new ChatRole("fakeRole"), [ new CustomAIContent1("Hello", DateTime.Now), @@ -645,7 +645,7 @@ public async Task CanCacheCustomContentTypesAsync() CompleteAsyncCallback = delegate { innerCallCount++; - return Task.FromResult(expectedCompletion); + return Task.FromResult(expectedResponse); } }; using var outer = new DistributedCachingChatClient(testClient, _storage) @@ -654,17 +654,17 @@ public async Task CanCacheCustomContentTypesAsync() }; // Make the initial request and do a quick sanity check - var result1 = await outer.CompleteAsync("some input"); - AssertCompletionsEqual(expectedCompletion, result1); + var result1 = await outer.GetResponseAsync("some input"); + AssertResponsesEqual(expectedResponse, result1); // Act - var result2 = await outer.CompleteAsync("some input"); + var result2 = await outer.GetResponseAsync("some input"); // Assert Assert.Equal(1, innerCallCount); - AssertCompletionsEqual(expectedCompletion, result2); - Assert.NotSame(result2.Message.Contents[0], expectedCompletion.Message.Contents[0]); - Assert.NotSame(result2.Message.Contents[1], expectedCompletion.Message.Contents[1]); + AssertResponsesEqual(expectedResponse, result2); + Assert.NotSame(result2.Message.Contents[0], expectedResponse.Message.Contents[0]); + Assert.NotSame(result2.Message.Contents[1], expectedResponse.Message.Contents[1]); } [Fact] @@ -678,7 +678,7 @@ public async Task CanResolveIDistributedCacheFromDI() { CompleteAsyncCallback = delegate { - return Task.FromResult(new ChatCompletion([ + return Task.FromResult(new ChatResponse([ new(ChatRole.Assistant, [new TextContent("Hey")])])); } }; @@ -692,7 +692,7 @@ public async Task CanResolveIDistributedCacheFromDI() // Act: Make a request that should populate the cache Assert.Empty(_storage.Keys); - var result = await outer.CompleteAsync("some input"); + var result = await outer.GetResponseAsync("some input"); // Assert Assert.NotNull(result); @@ -727,9 +727,9 @@ private static async IAsyncEnumerable ToAsyncEnumerableAsync(Task preTask, } } - private static void AssertCompletionsEqual(ChatCompletion expected, ChatCompletion actual) + private static void AssertResponsesEqual(ChatResponse expected, ChatResponse actual) { - Assert.Equal(expected.CompletionId, actual.CompletionId); + Assert.Equal(expected.ResponseId, actual.ResponseId); Assert.Equal(expected.Usage?.InputTokenCount, actual.Usage?.InputTokenCount); Assert.Equal(expected.Usage?.OutputTokenCount, actual.Usage?.OutputTokenCount); Assert.Equal(expected.Usage?.TotalTokenCount, actual.Usage?.TotalTokenCount); @@ -770,7 +770,7 @@ private static void AssertCompletionsEqual(ChatCompletion expected, ChatCompleti } } - private static async Task AssertCompletionsEqualAsync(IReadOnlyList expected, IAsyncEnumerable actual) + private static async Task AssertResponsesEqualAsync(IReadOnlyList expected, IAsyncEnumerable actual) { var actualEnumerator = actual.GetAsyncEnumerator(); diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs index 2690eb7181c..4cb624ea0a0 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs @@ -361,7 +361,7 @@ public async Task RejectsMultipleChoicesAsync() var func1 = AIFunctionFactory.Create(() => "Some result 1", "Func1"); var func2 = AIFunctionFactory.Create(() => "Some result 2", "Func2"); - var expected = new ChatCompletion( + var expected = new ChatResponse( [ new(ChatRole.Assistant, [new FunctionCallContent("callId1", func1.Metadata.Name)]), new(ChatRole.Assistant, [new FunctionCallContent("callId2", func2.Metadata.Name)]), @@ -375,7 +375,7 @@ public async Task RejectsMultipleChoicesAsync() return expected; }, CompleteStreamingAsyncCallback = (chatContents, options, cancellationToken) => - YieldAsync(expected.ToStreamingChatCompletionUpdates()), + YieldAsync(expected.ToChatResponseUpdates()), }; IChatClient service = innerClient.AsBuilder().UseFunctionInvocation().Build(); @@ -383,8 +383,8 @@ public async Task RejectsMultipleChoicesAsync() List chat = [new ChatMessage(ChatRole.User, "hello")]; ChatOptions options = new() { Tools = [func1, func2] }; - Validate(await Assert.ThrowsAsync(() => service.CompleteAsync(chat, options))); - Validate(await Assert.ThrowsAsync(() => service.CompleteStreamingAsync(chat, options).ToChatCompletionAsync())); + Validate(await Assert.ThrowsAsync(() => service.GetResponseAsync(chat, options))); + Validate(await Assert.ThrowsAsync(() => service.GetStreamingResponseAsync(chat, options).ToChatResponseAsync())); void Validate(Exception ex) { @@ -503,7 +503,7 @@ async Task InvokeAsync(Func work) for (int i = 0; i < activities.Count - 1; i++) { - // Activities are exported in the order of completion, so all except the last are children of the last (i.e., outer) + // Activities are exported in the order of response, so all except the last are children of the last (i.e., outer) Assert.Same(activities[activities.Count - 1], activities[i].Parent); } } @@ -535,17 +535,17 @@ public async Task SupportsConsecutiveStreamingUpdatesWithFunctionCalls() // Otherwise just end the conversation return chatContents.Last().Text == "Hello" ? YieldAsync( - new StreamingChatCompletionUpdate { Contents = [new FunctionCallContent("callId1", "Func1", new Dictionary { ["text"] = "Input 1" })] }, - new StreamingChatCompletionUpdate { Contents = [new FunctionCallContent("callId2", "Func1", new Dictionary { ["text"] = "Input 2" })] }) + new ChatResponseUpdate { Contents = [new FunctionCallContent("callId1", "Func1", new Dictionary { ["text"] = "Input 1" })] }, + new ChatResponseUpdate { Contents = [new FunctionCallContent("callId2", "Func1", new Dictionary { ["text"] = "Input 2" })] }) : YieldAsync( - new StreamingChatCompletionUpdate { Contents = [new TextContent("OK bye")] }); + new ChatResponseUpdate { Contents = [new TextContent("OK bye")] }); } }; using var client = new FunctionInvokingChatClient(innerClient) { KeepFunctionCallingMessages = true }; - var updates = new List(); - await foreach (var update in client.CompleteStreamingAsync(messages, options, CancellationToken.None)) + var updates = new List(); + await foreach (var update in client.GetStreamingResponseAsync(messages, options, CancellationToken.None)) { updates.Add(update); } @@ -654,7 +654,7 @@ void AssertInvocationContext(FunctionInvokingChatClient.FunctionInvocationContex } [Fact] - public async Task PropagatesCompletionChatThreadIdToOptions() + public async Task PropagatesResponseChatThreadIdToOptions() { var options = new ChatOptions { @@ -663,7 +663,7 @@ public async Task PropagatesCompletionChatThreadIdToOptions() int iteration = 0; - Func, ChatOptions?, CancellationToken, ChatCompletion> callback = + Func, ChatOptions?, CancellationToken, ChatResponse> callback = (chatContents, chatOptions, cancellationToken) => { iteration++; @@ -671,7 +671,7 @@ public async Task PropagatesCompletionChatThreadIdToOptions() if (iteration == 1) { Assert.Null(chatOptions?.ChatThreadId); - return new ChatCompletion(new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId-abc", "Func1")])) + return new ChatResponse(new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId-abc", "Func1")])) { ChatThreadId = "12345", }; @@ -679,7 +679,7 @@ public async Task PropagatesCompletionChatThreadIdToOptions() else if (iteration == 2) { Assert.Equal("12345", chatOptions?.ChatThreadId); - return new ChatCompletion(new ChatMessage(ChatRole.Assistant, "done!")); + return new ChatResponse(new ChatMessage(ChatRole.Assistant, "done!")); } else { @@ -692,15 +692,15 @@ public async Task PropagatesCompletionChatThreadIdToOptions() CompleteAsyncCallback = (chatContents, chatOptions, cancellationToken) => Task.FromResult(callback(chatContents, chatOptions, cancellationToken)), CompleteStreamingAsyncCallback = (chatContents, chatOptions, cancellationToken) => - YieldAsync(callback(chatContents, chatOptions, cancellationToken).ToStreamingChatCompletionUpdates()), + YieldAsync(callback(chatContents, chatOptions, cancellationToken).ToChatResponseUpdates()), }; using IChatClient service = innerClient.AsBuilder().UseFunctionInvocation().Build(); iteration = 0; - Assert.Equal("done!", (await service.CompleteAsync("hey", options)).ToString()); + Assert.Equal("done!", (await service.GetResponseAsync("hey", options)).ToString()); iteration = 0; - Assert.Equal("done!", (await service.CompleteStreamingAsync("hey", options).ToChatCompletionAsync()).ToString()); + Assert.Equal("done!", (await service.GetStreamingResponseAsync("hey", options).ToChatResponseAsync()).ToString()); } private static async Task> InvokeAndAssertAsync( @@ -728,13 +728,13 @@ private static async Task> InvokeAndAssertAsync( var usage = CreateRandomUsage(); expectedTotalTokenCounts += usage.InputTokenCount!.Value; - return new ChatCompletion(new ChatMessage(ChatRole.Assistant, [.. plan[contents.Count].Contents])) { Usage = usage }; + return new ChatResponse(new ChatMessage(ChatRole.Assistant, [.. plan[contents.Count].Contents])) { Usage = usage }; } }; IChatClient service = configurePipeline(innerClient.AsBuilder()).Build(services); - var result = await service.CompleteAsync(chat, options, cts.Token); + var result = await service.GetResponseAsync(chat, options, cts.Token); chat.Add(result.Message); expected ??= plan; @@ -817,13 +817,13 @@ private static async Task> InvokeAndAssertStreamingAsync( { Assert.Equal(cts.Token, actualCancellationToken); - return YieldAsync(new ChatCompletion(new ChatMessage(ChatRole.Assistant, [.. plan[contents.Count].Contents])).ToStreamingChatCompletionUpdates()); + return YieldAsync(new ChatResponse(new ChatMessage(ChatRole.Assistant, [.. plan[contents.Count].Contents])).ToChatResponseUpdates()); } }; IChatClient service = configurePipeline(innerClient.AsBuilder()).Build(services); - var result = await service.CompleteStreamingAsync(chat, options, cts.Token).ToChatCompletionAsync(); + var result = await service.GetStreamingResponseAsync(chat, options, cts.Token).ToChatResponseAsync(); chat.Add(result.Message); expected ??= plan; diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/LoggingChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/LoggingChatClientTests.cs index 5f3ab83439f..9d50246826c 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/LoggingChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/LoggingChatClientTests.cs @@ -44,7 +44,7 @@ public void UseLogging_AvoidsInjectingNopClient() [InlineData(LogLevel.Trace)] [InlineData(LogLevel.Debug)] [InlineData(LogLevel.Information)] - public async Task CompleteAsync_LogsStartAndCompletion(LogLevel level) + public async Task GetResponseAsync_LogsResponseInvocationAndCompletion(LogLevel level) { var collector = new FakeLogCollector(); @@ -56,7 +56,7 @@ public async Task CompleteAsync_LogsStartAndCompletion(LogLevel level) { CompleteAsyncCallback = (messages, options, cancellationToken) => { - return Task.FromResult(new ChatCompletion([new(ChatRole.Assistant, "blue whale")])); + return Task.FromResult(new ChatResponse([new(ChatRole.Assistant, "blue whale")])); }, }; @@ -65,7 +65,7 @@ public async Task CompleteAsync_LogsStartAndCompletion(LogLevel level) .UseLogging() .Build(services); - await client.CompleteAsync( + await client.GetResponseAsync( [new(ChatRole.User, "What's the biggest animal?")], new ChatOptions { FrequencyPenalty = 3.0f }); @@ -73,14 +73,14 @@ await client.CompleteAsync( if (level is LogLevel.Trace) { Assert.Collection(logs, - entry => Assert.True(entry.Message.Contains("CompleteAsync invoked:") && entry.Message.Contains("biggest animal")), - entry => Assert.True(entry.Message.Contains("CompleteAsync completed:") && entry.Message.Contains("blue whale"))); + entry => Assert.True(entry.Message.Contains("GetResponseAsync invoked:") && entry.Message.Contains("biggest animal")), + entry => Assert.True(entry.Message.Contains("GetResponseAsync completed:") && entry.Message.Contains("blue whale"))); } else if (level is LogLevel.Debug) { Assert.Collection(logs, - entry => Assert.True(entry.Message.Contains("CompleteAsync invoked.") && !entry.Message.Contains("biggest animal")), - entry => Assert.True(entry.Message.Contains("CompleteAsync completed.") && !entry.Message.Contains("blue whale"))); + entry => Assert.True(entry.Message.Contains("GetResponseAsync invoked.") && !entry.Message.Contains("biggest animal")), + entry => Assert.True(entry.Message.Contains("GetResponseAsync completed.") && !entry.Message.Contains("blue whale"))); } else { @@ -92,7 +92,7 @@ await client.CompleteAsync( [InlineData(LogLevel.Trace)] [InlineData(LogLevel.Debug)] [InlineData(LogLevel.Information)] - public async Task CompleteStreamAsync_LogsStartUpdateCompletion(LogLevel level) + public async Task GetResponseStreamingStreamAsync_LogsUpdateReceived(LogLevel level) { var collector = new FakeLogCollector(); using ILoggerFactory loggerFactory = LoggerFactory.Create(b => b.AddProvider(new FakeLoggerProvider(collector)).SetMinimumLevel(level)); @@ -102,11 +102,11 @@ public async Task CompleteStreamAsync_LogsStartUpdateCompletion(LogLevel level) CompleteStreamingAsyncCallback = (messages, options, cancellationToken) => GetUpdatesAsync() }; - static async IAsyncEnumerable GetUpdatesAsync() + static async IAsyncEnumerable GetUpdatesAsync() { await Task.Yield(); - yield return new StreamingChatCompletionUpdate { Role = ChatRole.Assistant, Text = "blue " }; - yield return new StreamingChatCompletionUpdate { Role = ChatRole.Assistant, Text = "whale" }; + yield return new ChatResponseUpdate { Role = ChatRole.Assistant, Text = "blue " }; + yield return new ChatResponseUpdate { Role = ChatRole.Assistant, Text = "whale" }; } using IChatClient client = innerClient @@ -114,7 +114,7 @@ static async IAsyncEnumerable GetUpdatesAsync() .UseLogging(loggerFactory) .Build(); - await foreach (var update in client.CompleteStreamingAsync( + await foreach (var update in client.GetStreamingResponseAsync( [new(ChatRole.User, "What's the biggest animal?")], new ChatOptions { FrequencyPenalty = 3.0f })) { @@ -125,18 +125,18 @@ static async IAsyncEnumerable GetUpdatesAsync() if (level is LogLevel.Trace) { Assert.Collection(logs, - entry => Assert.True(entry.Message.Contains("CompleteStreamingAsync invoked:") && entry.Message.Contains("biggest animal")), - entry => Assert.True(entry.Message.Contains("CompleteStreamingAsync received update:") && entry.Message.Contains("blue")), - entry => Assert.True(entry.Message.Contains("CompleteStreamingAsync received update:") && entry.Message.Contains("whale")), - entry => Assert.Contains("CompleteStreamingAsync completed.", entry.Message)); + entry => Assert.True(entry.Message.Contains("GetStreamingResponseAsync invoked:") && entry.Message.Contains("biggest animal")), + entry => Assert.True(entry.Message.Contains("GetStreamingResponseAsync received update:") && entry.Message.Contains("blue")), + entry => Assert.True(entry.Message.Contains("GetStreamingResponseAsync received update:") && entry.Message.Contains("whale")), + entry => Assert.Contains("GetStreamingResponseAsync completed.", entry.Message)); } else if (level is LogLevel.Debug) { Assert.Collection(logs, - entry => Assert.True(entry.Message.Contains("CompleteStreamingAsync invoked.") && !entry.Message.Contains("biggest animal")), - entry => Assert.True(entry.Message.Contains("CompleteStreamingAsync received update.") && !entry.Message.Contains("blue")), - entry => Assert.True(entry.Message.Contains("CompleteStreamingAsync received update.") && !entry.Message.Contains("whale")), - entry => Assert.Contains("CompleteStreamingAsync completed.", entry.Message)); + entry => Assert.True(entry.Message.Contains("GetStreamingResponseAsync invoked.") && !entry.Message.Contains("biggest animal")), + entry => Assert.True(entry.Message.Contains("GetStreamingResponseAsync received update.") && !entry.Message.Contains("blue")), + entry => Assert.True(entry.Message.Contains("GetStreamingResponseAsync received update.") && !entry.Message.Contains("whale")), + entry => Assert.Contains("GetStreamingResponseAsync completed.", entry.Message)); } else { diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs index f4673023d4c..3849b04778a 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs @@ -38,9 +38,9 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData, bool CompleteAsyncCallback = async (messages, options, cancellationToken) => { await Task.Yield(); - return new ChatCompletion(new ChatMessage(ChatRole.Assistant, "The blue whale, I think.")) + return new ChatResponse(new ChatMessage(ChatRole.Assistant, "The blue whale, I think.")) { - CompletionId = "id123", + ResponseId = "id123", FinishReason = ChatFinishReason.Stop, Usage = new UsageDetails { @@ -61,7 +61,7 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData, bool null, }; - async static IAsyncEnumerable CallbackAsync( + async static IAsyncEnumerable CallbackAsync( IList messages, ChatOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken) { await Task.Yield(); @@ -69,20 +69,20 @@ async static IAsyncEnumerable CallbackAsync( foreach (string text in new[] { "The ", "blue ", "whale,", " ", "", "I", " think." }) { await Task.Yield(); - yield return new StreamingChatCompletionUpdate + yield return new ChatResponseUpdate { Role = ChatRole.Assistant, Text = text, - CompletionId = "id123", + ResponseId = "id123", }; } - yield return new StreamingChatCompletionUpdate + yield return new ChatResponseUpdate { FinishReason = ChatFinishReason.Stop, }; - yield return new StreamingChatCompletionUpdate + yield return new ChatResponseUpdate { Contents = [new UsageContent(new() { @@ -138,14 +138,14 @@ async static IAsyncEnumerable CallbackAsync( if (streaming) { - await foreach (var update in chatClient.CompleteStreamingAsync(chatMessages, options)) + await foreach (var update in chatClient.GetStreamingResponseAsync(chatMessages, options)) { await Task.Yield(); } } else { - await chatClient.CompleteAsync(chatMessages, options); + await chatClient.GetResponseAsync(chatMessages, options); } var activity = Assert.Single(activities); diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/UseDelegateChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/UseDelegateChatClientTests.cs index 1b331160316..e1221a76210 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/UseDelegateChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/UseDelegateChatClientTests.cs @@ -47,7 +47,7 @@ public async Task Shared_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); Assert.Equal(42, asyncLocal.Value); - return Task.FromResult(new ChatCompletion(new ChatMessage(ChatRole.Assistant, "hello"))); + return Task.FromResult(new ChatResponse(new ChatMessage(ChatRole.Assistant, "hello"))); }, CompleteStreamingAsyncCallback = (chatMessages, options, cancellationToken) => @@ -56,7 +56,7 @@ public async Task Shared_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); Assert.Equal(42, asyncLocal.Value); - return YieldUpdates(new StreamingChatCompletionUpdate { Text = "world" }); + return YieldUpdates(new ChatResponseUpdate { Text = "world" }); }, }; @@ -72,12 +72,12 @@ public async Task Shared_ContextPropagated() .Build(); Assert.Equal(0, asyncLocal.Value); - ChatCompletion completion = await client.CompleteAsync(expectedMessages, expectedOptions, expectedCts.Token); - Assert.Equal("hello", completion.Message.Text); + ChatResponse response = await client.GetResponseAsync(expectedMessages, expectedOptions, expectedCts.Token); + Assert.Equal("hello", response.Message.Text); Assert.Equal(0, asyncLocal.Value); - completion = await client.CompleteStreamingAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatCompletionAsync(); - Assert.Equal("world", completion.Message.Text); + response = await client.GetStreamingResponseAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatResponseAsync(); + Assert.Equal("world", response.Message.Text); } [Fact] @@ -96,7 +96,7 @@ public async Task CompleteFunc_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); Assert.Equal(42, asyncLocal.Value); - return Task.FromResult(new ChatCompletion(new ChatMessage(ChatRole.Assistant, "hello"))); + return Task.FromResult(new ChatResponse(new ChatMessage(ChatRole.Assistant, "hello"))); }, }; @@ -107,7 +107,7 @@ public async Task CompleteFunc_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); asyncLocal.Value = 42; - var cc = await innerClient.CompleteAsync(chatMessages, options, cancellationToken); + var cc = await innerClient.GetResponseAsync(chatMessages, options, cancellationToken); cc.Choices[0].Text += " world"; return cc; }, null) @@ -115,11 +115,11 @@ public async Task CompleteFunc_ContextPropagated() Assert.Equal(0, asyncLocal.Value); - ChatCompletion completion = await client.CompleteAsync(expectedMessages, expectedOptions, expectedCts.Token); - Assert.Equal("hello world", completion.Message.Text); + ChatResponse response = await client.GetResponseAsync(expectedMessages, expectedOptions, expectedCts.Token); + Assert.Equal("hello world", response.Message.Text); - completion = await client.CompleteStreamingAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatCompletionAsync(); - Assert.Equal("hello world", completion.Message.Text); + response = await client.GetStreamingResponseAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatResponseAsync(); + Assert.Equal("hello world", response.Message.Text); } [Fact] @@ -138,7 +138,7 @@ public async Task CompleteStreamingFunc_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); Assert.Equal(42, asyncLocal.Value); - return YieldUpdates(new StreamingChatCompletionUpdate { Text = "hello" }); + return YieldUpdates(new ChatResponseUpdate { Text = "hello" }); }, }; @@ -151,10 +151,10 @@ public async Task CompleteStreamingFunc_ContextPropagated() asyncLocal.Value = 42; return Impl(chatMessages, options, innerClient, cancellationToken); - static async IAsyncEnumerable Impl( + static async IAsyncEnumerable Impl( IList chatMessages, ChatOptions? options, IChatClient innerClient, [EnumeratorCancellation] CancellationToken cancellationToken) { - await foreach (var update in innerClient.CompleteStreamingAsync(chatMessages, options, cancellationToken)) + await foreach (var update in innerClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken)) { yield return update; } @@ -166,11 +166,11 @@ static async IAsyncEnumerable Impl( Assert.Equal(0, asyncLocal.Value); - ChatCompletion completion = await client.CompleteAsync(expectedMessages, expectedOptions, expectedCts.Token); - Assert.Equal("hello world", completion.Message.Text); + ChatResponse response = await client.GetResponseAsync(expectedMessages, expectedOptions, expectedCts.Token); + Assert.Equal("hello world", response.Message.Text); - completion = await client.CompleteStreamingAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatCompletionAsync(); - Assert.Equal("hello world", completion.Message.Text); + response = await client.GetStreamingResponseAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatResponseAsync(); + Assert.Equal("hello world", response.Message.Text); } [Fact] @@ -189,7 +189,7 @@ public async Task BothCompleteAndCompleteStreamingFuncs_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); Assert.Equal(42, asyncLocal.Value); - return Task.FromResult(new ChatCompletion(new ChatMessage(ChatRole.Assistant, "non-streaming hello"))); + return Task.FromResult(new ChatResponse(new ChatMessage(ChatRole.Assistant, "non-streaming hello"))); }, CompleteStreamingAsyncCallback = (chatMessages, options, cancellationToken) => @@ -198,7 +198,7 @@ public async Task BothCompleteAndCompleteStreamingFuncs_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); Assert.Equal(42, asyncLocal.Value); - return YieldUpdates(new StreamingChatCompletionUpdate { Text = "streaming hello" }); + return YieldUpdates(new ChatResponseUpdate { Text = "streaming hello" }); }, }; @@ -210,7 +210,7 @@ public async Task BothCompleteAndCompleteStreamingFuncs_ContextPropagated() Assert.Same(expectedOptions, options); Assert.Equal(expectedCts.Token, cancellationToken); asyncLocal.Value = 42; - var cc = await innerClient.CompleteAsync(chatMessages, options, cancellationToken); + var cc = await innerClient.GetResponseAsync(chatMessages, options, cancellationToken); cc.Choices[0].Text += " world (non-streaming)"; return cc; }, @@ -222,10 +222,10 @@ public async Task BothCompleteAndCompleteStreamingFuncs_ContextPropagated() asyncLocal.Value = 42; return Impl(chatMessages, options, innerClient, cancellationToken); - static async IAsyncEnumerable Impl( + static async IAsyncEnumerable Impl( IList chatMessages, ChatOptions? options, IChatClient innerClient, [EnumeratorCancellation] CancellationToken cancellationToken) { - await foreach (var update in innerClient.CompleteStreamingAsync(chatMessages, options, cancellationToken)) + await foreach (var update in innerClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken)) { yield return update; } @@ -237,14 +237,14 @@ static async IAsyncEnumerable Impl( Assert.Equal(0, asyncLocal.Value); - ChatCompletion completion = await client.CompleteAsync(expectedMessages, expectedOptions, expectedCts.Token); - Assert.Equal("non-streaming hello world (non-streaming)", completion.Message.Text); + ChatResponse response = await client.GetResponseAsync(expectedMessages, expectedOptions, expectedCts.Token); + Assert.Equal("non-streaming hello world (non-streaming)", response.Message.Text); - completion = await client.CompleteStreamingAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatCompletionAsync(); - Assert.Equal("streaming hello world (streaming)", completion.Message.Text); + response = await client.GetStreamingResponseAsync(expectedMessages, expectedOptions, expectedCts.Token).ToChatResponseAsync(); + Assert.Equal("streaming hello world (streaming)", response.Message.Text); } - private static async IAsyncEnumerable YieldUpdates(params StreamingChatCompletionUpdate[] updates) + private static async IAsyncEnumerable YieldUpdates(params ChatResponseUpdate[] updates) { foreach (var update in updates) { diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/LoggingEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/LoggingEmbeddingGeneratorTests.cs index bc4a73fdeb7..23600eac43b 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/LoggingEmbeddingGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/LoggingEmbeddingGeneratorTests.cs @@ -43,7 +43,7 @@ public void UseLogging_AvoidsInjectingNopClient() [InlineData(LogLevel.Trace)] [InlineData(LogLevel.Debug)] [InlineData(LogLevel.Information)] - public async Task CompleteAsync_LogsStartAndCompletion(LogLevel level) + public async Task GetResponseAsync_LogsResponseInvocationAndCompletion(LogLevel level) { var collector = new FakeLogCollector(); diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/TestJsonSerializerContext.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/TestJsonSerializerContext.cs index b077542c17c..f8416c005fc 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/TestJsonSerializerContext.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/TestJsonSerializerContext.cs @@ -10,9 +10,9 @@ namespace Microsoft.Extensions.AI; // These types are directly serialized by DistributedCachingChatClient -[JsonSerializable(typeof(ChatCompletion))] +[JsonSerializable(typeof(ChatResponse))] [JsonSerializable(typeof(IList))] -[JsonSerializable(typeof(IReadOnlyList))] +[JsonSerializable(typeof(IReadOnlyList))] // These types are specific to the tests in this project [JsonSerializable(typeof(bool))] From 052fb6159adf809ec124773a1bfdbbf85eb9062f Mon Sep 17 00:00:00 2001 From: Stephen Toub Date: Tue, 11 Feb 2025 07:36:36 -0500 Subject: [PATCH 2/4] Apply suggestions from code review Co-authored-by: Steve Sanderson --- .../ChatCompletion/ChatResponseUpdate.cs | 4 ++-- .../AzureAIInferenceChatClient.cs | 2 +- .../OpenAIRealtimeExtensions.cs | 2 +- src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs | 2 +- .../ChatCompletion/AnonymousDelegatingChatClient.cs | 2 +- .../ChatCompletion/CachingChatClient.cs | 4 ++-- .../Embeddings/CachingEmbeddingGenerator.cs | 2 +- .../IntegrationTestHelpers.cs | 2 +- .../ChatCompletion/FunctionInvokingChatClientTests.cs | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs index 2b0cce87bef..7ac2093c97a 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs @@ -15,7 +15,7 @@ namespace Microsoft.Extensions.AI; /// /// Conceptually, this combines the roles of and /// in streaming output. For ease of consumption, it also flattens the nested structure you see on -/// streaming chunks in some AI service, so instead of a dictionary of choices, each update represents a +/// streaming chunks in some AI services, so instead of a dictionary of choices, each update represents a /// single choice (and hence has its own role, choice ID, etc.). /// /// @@ -121,7 +121,7 @@ public IList Contents /// Gets or sets the finish reason for the operation. public ChatFinishReason? FinishReason { get; set; } - /// Gets or sets the model ID using in the creation of the chat response of which this update is a part. + /// Gets or sets the model ID associated with this response update. public string? ModelId { get; set; } /// diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs index 3447fa2be28..2042a67fe78 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs @@ -309,7 +309,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, } } - // These properties are strongly typed on ChatOptions but not on ChatResponsesOptions. + // These properties are strongly typed on ChatOptions but not on ChatCompletionsOptions. if (options.TopK is int topK) { result.AdditionalProperties["top_k"] = new BinaryData(JsonSerializer.SerializeToUtf8Bytes(topK, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(int)))); diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs index 695e4d569d6..db12baf962d 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeExtensions.cs @@ -52,7 +52,7 @@ public static ConversationFunctionTool ToConversationFunctionTool(this AIFunctio /// An optional flag specifying whether to disclose detailed exception information to the model. The default value is . /// An optional that controls JSON handling. /// An optional . - /// A that represents the response of processing, including invoking any asynchronous tools. + /// A that represents the completion of processing, including invoking any asynchronous tools. public static async Task HandleToolCallsAsync( this RealtimeConversationSession session, ConversationUpdate update, diff --git a/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs b/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs index e0d6a5a3ac1..3b5f5531755 100644 --- a/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs +++ b/src/Libraries/Microsoft.Extensions.AI/CachingHelpers.cs @@ -42,7 +42,7 @@ public static string GetCacheKey(ReadOnlySpan values, JsonSerializerOpt // We need to ensure that the value in ThreadStaticInstance is always ready to use. // If we start using an instance, write to it, and then fail, we will have left it // in an inconsistent state. So, when renting it, we null it out, and we only put - // it back upon successful response after resetting it. + // it back upon successful completion after resetting it. IncrementalHashStream.ThreadStaticInstance = null; } else diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs index 79682381f70..9a533363270 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs @@ -204,7 +204,7 @@ internal static void ThrowIfBothDelegatesNull(object? completeFunc, object? comp /// but need not be. /// /// The to monitor for cancellation requests. The default is . - /// A that represents the response of the operation. + /// A that represents the completion of the operation. public delegate Task CompleteSharedFunc( IList chatMessages, ChatOptions? options, diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs index 5229d7d6031..79f41d1790e 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs @@ -155,7 +155,7 @@ public override async IAsyncEnumerable GetStreamingResponseA /// The cache key. /// The to be stored. /// The to monitor for cancellation requests. - /// A representing the response of the operation. + /// A representing the completion of the operation. protected abstract Task WriteCacheAsync(string key, ChatResponse value, CancellationToken cancellationToken); /// @@ -165,6 +165,6 @@ public override async IAsyncEnumerable GetStreamingResponseA /// The cache key. /// The to be stored. /// The to monitor for cancellation requests. - /// A representing the response of the operation. + /// A representing the completion of the operation. protected abstract Task WriteCacheStreamingAsync(string key, IReadOnlyList value, CancellationToken cancellationToken); } diff --git a/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs index e5595164d1b..688e4b2353d 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Embeddings/CachingEmbeddingGenerator.cs @@ -121,6 +121,6 @@ public override async Task> GenerateAsync( /// The cache key. /// The to be stored. /// The to monitor for cancellation requests. - /// A representing the response of the operation. + /// A representing the completion of the operation. protected abstract Task WriteCacheAsync(string key, TEmbedding value, CancellationToken cancellationToken); } diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs index 7fa87ad336b..236a8428733 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/IntegrationTestHelpers.cs @@ -18,7 +18,7 @@ internal static class IntegrationTestHelpers TestRunnerConfiguration.Instance["AzureAIInference:Endpoint"] ?? "https://api.openai.com/v1"; - /// Gets an to use for testing, or null if the associated tests should be disabled. + /// Gets a to use for testing, or null if the associated tests should be disabled. public static ChatCompletionsClient? GetChatCompletionsClient() => _apiKey is string apiKey ? new ChatCompletionsClient(new Uri(_endpoint), new AzureKeyCredential(apiKey)) : diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs index 4cb624ea0a0..daaf13040ba 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs @@ -503,7 +503,7 @@ async Task InvokeAsync(Func work) for (int i = 0; i < activities.Count - 1; i++) { - // Activities are exported in the order of response, so all except the last are children of the last (i.e., outer) + // Activities are exported in the order of completion, so all except the last are children of the last (i.e., outer) Assert.Same(activities[activities.Count - 1], activities[i].Parent); } } From ff2969cd7f9590fabcf21d79e679da5fd2f92473 Mon Sep 17 00:00:00 2001 From: Stephen Toub Date: Tue, 11 Feb 2025 07:37:12 -0500 Subject: [PATCH 3/4] Update src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs Co-authored-by: Steve Sanderson --- .../AzureAIInferenceChatClient.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs index 2042a67fe78..27e6298e057 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs @@ -321,7 +321,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, { switch (prop.Key) { - // Propagate everything else to the ChatResponseOptions' AdditionalProperties. + // Propagate everything else to the ChatCompletionsOptions' AdditionalProperties. default: if (prop.Value is not null) { From 0da868e91af1d69740b6e0ef44ac1f487bb0a14d Mon Sep 17 00:00:00 2001 From: Stephen Toub Date: Tue, 11 Feb 2025 09:35:40 -0500 Subject: [PATCH 4/4] Address remaining comments --- .../ChatCompletion/ChatResponseUpdate.cs | 25 +++++++------------ .../OpenAIChatClient.cs | 4 +-- .../OpenAIModelMapper.ChatCompletion.cs | 4 +-- ...nAIModelMappers.StreamingChatCompletion.cs | 4 +-- .../OpenAISerializationHelpers.cs | 4 +-- 5 files changed, 17 insertions(+), 24 deletions(-) diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs index 7ac2093c97a..8bf9e57ece2 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs @@ -13,28 +13,21 @@ namespace Microsoft.Extensions.AI; /// /// /// -/// Conceptually, this combines the roles of and -/// in streaming output. For ease of consumption, it also flattens the nested structure you see on -/// streaming chunks in some AI services, so instead of a dictionary of choices, each update represents a -/// single choice (and hence has its own role, choice ID, etc.). -/// -/// /// is so named because it represents updates -/// to a single chat response. As such, it is considered erroneous for multiple updates that are part -/// of the same response to contain competing values. For example, some updates that are part of -/// the same response may have a -/// value, and others may have a non- value, but all of those with a non- -/// value must have the same value (e.g. . It should never be the case, for example, -/// that one in a response has a role of -/// while another has a role of "AI". +/// that layer on each other to form a single chat response. Conceptually, this combines the roles of +/// and in streaming output. For ease of consumption, +/// it also flattens the nested structure you see on streaming chunks in some AI services, so instead of a +/// dictionary of choices, each update is part of a single choice (and hence has its own role, choice ID, etc.). /// /// /// The relationship between and is /// codified in the and /// , which enable bidirectional conversions -/// between the two. Note, however, that the conversion may be slightly lossy, for example if multiple updates -/// all have different objects whereas there's -/// only one slot for such an object available in . +/// between the two. Note, however, that the provided conversions may be lossy, for example if multiple +/// updates all have different objects whereas there's only one slot for +/// such an object available in . Similarly, if different +/// updates that are part of the same choice provide different values for properties like , +/// only one of the values will be used to populate . /// /// public class ChatResponseUpdate diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs index f9e254aea0b..ba584cc1734 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs @@ -110,7 +110,7 @@ public async Task GetResponseAsync( // Make the call to OpenAI. var response = await _chatClient.CompleteChatAsync(openAIChatMessages, openAIOptions, cancellationToken).ConfigureAwait(false); - return OpenAIModelMappers.FromOpenAIChatResponse(response.Value, options, openAIOptions); + return OpenAIModelMappers.FromOpenAIChatCompletion(response.Value, options, openAIOptions); } /// @@ -125,7 +125,7 @@ public IAsyncEnumerable GetStreamingResponseAsync( // Make the call to OpenAI. var chatCompletionUpdates = _chatClient.CompleteChatStreamingAsync(openAIChatMessages, openAIOptions, cancellationToken); - return OpenAIModelMappers.FromOpenAIStreamingChatResponseAsync(chatCompletionUpdates, cancellationToken); + return OpenAIModelMappers.FromOpenAIStreamingChatCompletionAsync(chatCompletionUpdates, cancellationToken); } /// diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs index 53b18e0abb7..170f8cbe06e 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs @@ -28,7 +28,7 @@ internal static partial class OpenAIModelMappers { internal static JsonElement DefaultParameterSchema { get; } = JsonDocument.Parse("{}").RootElement; - public static ChatCompletion ToOpenAIChatResponse(ChatResponse response, JsonSerializerOptions options) + public static ChatCompletion ToOpenAIChatCompletion(ChatResponse response, JsonSerializerOptions options) { _ = Throw.IfNull(response); @@ -73,7 +73,7 @@ public static ChatCompletion ToOpenAIChatResponse(ChatResponse response, JsonSer usage: chatTokenUsage); } - public static ChatResponse FromOpenAIChatResponse(ChatCompletion openAICompletion, ChatOptions? options, ChatCompletionOptions chatCompletionOptions) + public static ChatResponse FromOpenAIChatCompletion(ChatCompletion openAICompletion, ChatOptions? options, ChatCompletionOptions chatCompletionOptions) { _ = Throw.IfNull(openAICompletion); diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs index f77424ee528..bfafbdf82b2 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs @@ -18,7 +18,7 @@ namespace Microsoft.Extensions.AI; internal static partial class OpenAIModelMappers { - public static async IAsyncEnumerable ToOpenAIStreamingChatResponseAsync( + public static async IAsyncEnumerable ToOpenAIStreamingChatCompletionAsync( IAsyncEnumerable updates, JsonSerializerOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -61,7 +61,7 @@ public static async IAsyncEnumerable ToOpenAIStre } } - public static async IAsyncEnumerable FromOpenAIStreamingChatResponseAsync( + public static async IAsyncEnumerable FromOpenAIStreamingChatCompletionAsync( IAsyncEnumerable updates, [EnumeratorCancellation] CancellationToken cancellationToken = default) { diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs index 9c6b91c5f28..659db4ed3bd 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs @@ -53,7 +53,7 @@ public static async Task SerializeAsync( _ = Throw.IfNull(response); options ??= AIJsonUtilities.DefaultOptions; - ChatCompletion openAiChatResponse = OpenAIModelMappers.ToOpenAIChatResponse(response, options); + ChatCompletion openAiChatResponse = OpenAIModelMappers.ToOpenAIChatCompletion(response, options); BinaryData binaryData = JsonModelHelpers.Serialize(openAiChatResponse); await stream.WriteAsync(binaryData.ToMemory(), cancellationToken).ConfigureAwait(false); } @@ -76,7 +76,7 @@ public static Task SerializeStreamingAsync( _ = Throw.IfNull(updates); options ??= AIJsonUtilities.DefaultOptions; - var mappedUpdates = OpenAIModelMappers.ToOpenAIStreamingChatResponseAsync(updates, options, cancellationToken); + var mappedUpdates = OpenAIModelMappers.ToOpenAIStreamingChatCompletionAsync(updates, options, cancellationToken); return SseFormatter.WriteAsync(ToSseEventsAsync(mappedUpdates), stream, FormatAsSseEvent, cancellationToken); static async IAsyncEnumerable> ToSseEventsAsync(IAsyncEnumerable updates)