diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs
index 163cde97f58..e485a75d4b1 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs
@@ -27,13 +27,13 @@ public static class ChatClientExtensions
return (TService?)client.GetService(typeof(TService), serviceKey);
}
- /// Sends a user chat text message to the model and returns the response messages.
+ /// Sends a user chat text message and returns the response messages.
/// The chat client.
/// The text content for the chat message to send.
/// The chat options to configure the request.
/// The to monitor for cancellation requests. The default is .
/// The response messages generated by the client.
- public static Task CompleteAsync(
+ public static Task GetResponseAsync(
this IChatClient client,
string chatMessage,
ChatOptions? options = null,
@@ -42,16 +42,16 @@ public static Task CompleteAsync(
_ = Throw.IfNull(client);
_ = Throw.IfNull(chatMessage);
- return client.CompleteAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken);
+ return client.GetResponseAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken);
}
- /// Sends a chat message to the model and returns the response messages.
+ /// Sends a chat message and returns the response messages.
/// The chat client.
/// The chat message to send.
/// The chat options to configure the request.
/// The to monitor for cancellation requests. The default is .
/// The response messages generated by the client.
- public static Task CompleteAsync(
+ public static Task GetResponseAsync(
this IChatClient client,
ChatMessage chatMessage,
ChatOptions? options = null,
@@ -60,16 +60,16 @@ public static Task CompleteAsync(
_ = Throw.IfNull(client);
_ = Throw.IfNull(chatMessage);
- return client.CompleteAsync([chatMessage], options, cancellationToken);
+ return client.GetResponseAsync([chatMessage], options, cancellationToken);
}
- /// Sends a user chat text message to the model and streams the response messages.
+ /// Sends a user chat text message and streams the response messages.
/// The chat client.
/// The text content for the chat message to send.
/// The chat options to configure the request.
/// The to monitor for cancellation requests. The default is .
/// The response messages generated by the client.
- public static IAsyncEnumerable CompleteStreamingAsync(
+ public static IAsyncEnumerable GetStreamingResponseAsync(
this IChatClient client,
string chatMessage,
ChatOptions? options = null,
@@ -78,16 +78,16 @@ public static IAsyncEnumerable CompleteStreamingA
_ = Throw.IfNull(client);
_ = Throw.IfNull(chatMessage);
- return client.CompleteStreamingAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken);
+ return client.GetStreamingResponseAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken);
}
- /// Sends a chat message to the model and streams the response messages.
+ /// Sends a chat message and streams the response messages.
/// The chat client.
/// The chat message to send.
/// The chat options to configure the request.
/// The to monitor for cancellation requests. The default is .
/// The response messages generated by the client.
- public static IAsyncEnumerable CompleteStreamingAsync(
+ public static IAsyncEnumerable GetStreamingResponseAsync(
this IChatClient client,
ChatMessage chatMessage,
ChatOptions? options = null,
@@ -96,6 +96,6 @@ public static IAsyncEnumerable CompleteStreamingA
_ = Throw.IfNull(client);
_ = Throw.IfNull(chatMessage);
- return client.CompleteStreamingAsync([chatMessage], options, cancellationToken);
+ return client.GetStreamingResponseAsync([chatMessage], options, cancellationToken);
}
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs
index 406b9768dd7..ace4dead9e3 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientMetadata.cs
@@ -10,11 +10,11 @@ public class ChatClientMetadata
{
/// Initializes a new instance of the class.
///
- /// The name of the chat completion provider, if applicable. Where possible, this should map to the
+ /// The name of the chat provider, if applicable. Where possible, this should map to the
/// appropriate name defined in the OpenTelemetry Semantic Conventions for Generative AI systems.
///
- /// The URL for accessing the chat completion provider, if applicable.
- /// The ID of the chat completion model used, if applicable.
+ /// The URL for accessing the chat provider, if applicable.
+ /// The ID of the chat model used, if applicable.
public ChatClientMetadata(string? providerName = null, Uri? providerUri = null, string? modelId = null)
{
ModelId = modelId;
@@ -22,17 +22,17 @@ public ChatClientMetadata(string? providerName = null, Uri? providerUri = null,
ProviderUri = providerUri;
}
- /// Gets the name of the chat completion provider.
+ /// Gets the name of the chat provider.
///
/// Where possible, this maps to the appropriate name defined in the
/// OpenTelemetry Semantic Conventions for Generative AI systems.
///
public string? ProviderName { get; }
- /// Gets the URL for accessing the chat completion provider.
+ /// Gets the URL for accessing the chat provider.
public Uri? ProviderUri { get; }
- /// Gets the ID of the model used by this chat completion provider.
+ /// Gets the ID of the model used by this chat provider.
///
/// This value can be null if either the name is unknown or there are multiple possible models associated with this instance.
/// An individual request may override this value via .
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponse.cs
similarity index 65%
rename from src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatCompletion.cs
rename to src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponse.cs
index 296c089ce1f..f789fc7f974 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatCompletion.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponse.cs
@@ -9,36 +9,36 @@
namespace Microsoft.Extensions.AI;
-/// Represents the result of a chat completion request.
-public class ChatCompletion
+/// Represents the response to a chat request.
+public class ChatResponse
{
- /// The list of choices in the completion.
+ /// The list of choices in the response.
private IList _choices;
- /// Initializes a new instance of the class.
- /// The list of choices in the completion, one message per choice.
+ /// Initializes a new instance of the class.
+ /// The list of choices in the response, one message per choice.
[JsonConstructor]
- public ChatCompletion(IList choices)
+ public ChatResponse(IList choices)
{
_choices = Throw.IfNull(choices);
}
- /// Initializes a new instance of the class.
- /// The chat message representing the singular choice in the completion.
- public ChatCompletion(ChatMessage message)
+ /// Initializes a new instance of the class.
+ /// The chat message representing the singular choice in the response.
+ public ChatResponse(ChatMessage message)
{
_ = Throw.IfNull(message);
_choices = [message];
}
- /// Gets or sets the list of chat completion choices.
+ /// Gets or sets the list of chat response choices.
public IList Choices
{
get => _choices;
set => _choices = Throw.IfNull(value);
}
- /// Gets the chat completion message.
+ /// Gets the chat response message.
///
/// If there are multiple choices, this property returns the first choice.
/// If is empty, this property will throw. Use to access all choices directly.
@@ -51,48 +51,48 @@ public ChatMessage Message
var choices = Choices;
if (choices.Count == 0)
{
- throw new InvalidOperationException($"The {nameof(ChatCompletion)} instance does not contain any {nameof(ChatMessage)} choices.");
+ throw new InvalidOperationException($"The {nameof(ChatResponse)} instance does not contain any {nameof(ChatMessage)} choices.");
}
return choices[0];
}
}
- /// Gets or sets the ID of the chat completion.
- public string? CompletionId { get; set; }
+ /// Gets or sets the ID of the chat response.
+ public string? ResponseId { get; set; }
- /// Gets or sets the chat thread ID associated with this chat completion.
+ /// Gets or sets the chat thread ID associated with this chat response.
///
/// Some implementations are capable of storing the state for a chat thread, such that
- /// the input messages supplied to need only be the additional messages beyond
+ /// the input messages supplied to need only be the additional messages beyond
/// what's already stored. If this property is non-, it represents an identifier for that state,
/// and it should be used in a subsequent instead of supplying the same messages
- /// (and this 's message) as part of the chatMessages parameter.
+ /// (and this 's message) as part of the chatMessages parameter.
///
public string? ChatThreadId { get; set; }
- /// Gets or sets the model ID used in the creation of the chat completion.
+ /// Gets or sets the model ID used in the creation of the chat response.
public string? ModelId { get; set; }
- /// Gets or sets a timestamp for the chat completion.
+ /// Gets or sets a timestamp for the chat response.
public DateTimeOffset? CreatedAt { get; set; }
- /// Gets or sets the reason for the chat completion.
+ /// Gets or sets the reason for the chat response.
public ChatFinishReason? FinishReason { get; set; }
- /// Gets or sets usage details for the chat completion.
+ /// Gets or sets usage details for the chat response.
public UsageDetails? Usage { get; set; }
- /// Gets or sets the raw representation of the chat completion from an underlying implementation.
+ /// Gets or sets the raw representation of the chat response from an underlying implementation.
///
- /// If a is created to represent some underlying object from another object
+ /// If a is created to represent some underlying object from another object
/// model, this property can be used to store that original object. This can be useful for debugging or
/// for enabling a consumer to access the underlying object model if needed.
///
[JsonIgnore]
public object? RawRepresentation { get; set; }
- /// Gets or sets any additional properties associated with the chat completion.
+ /// Gets or sets any additional properties associated with the chat response.
public AdditionalPropertiesDictionary? AdditionalProperties { get; set; }
///
@@ -117,14 +117,14 @@ public override string ToString()
return sb.ToString();
}
- /// Creates an array of instances that represent this .
- /// An array of instances that may be used to represent this .
- public StreamingChatCompletionUpdate[] ToStreamingChatCompletionUpdates()
+ /// Creates an array of instances that represent this .
+ /// An array of instances that may be used to represent this .
+ public ChatResponseUpdate[] ToChatResponseUpdates()
{
- StreamingChatCompletionUpdate? extra = null;
+ ChatResponseUpdate? extra = null;
if (AdditionalProperties is not null || Usage is not null)
{
- extra = new StreamingChatCompletionUpdate
+ extra = new ChatResponseUpdate
{
AdditionalProperties = AdditionalProperties
};
@@ -136,12 +136,12 @@ public StreamingChatCompletionUpdate[] ToStreamingChatCompletionUpdates()
}
int choicesCount = Choices.Count;
- var updates = new StreamingChatCompletionUpdate[choicesCount + (extra is null ? 0 : 1)];
+ var updates = new ChatResponseUpdate[choicesCount + (extra is null ? 0 : 1)];
for (int choiceIndex = 0; choiceIndex < choicesCount; choiceIndex++)
{
ChatMessage choice = Choices[choiceIndex];
- updates[choiceIndex] = new StreamingChatCompletionUpdate
+ updates[choiceIndex] = new ChatResponseUpdate
{
ChatThreadId = ChatThreadId,
ChoiceIndex = choiceIndex,
@@ -152,7 +152,7 @@ public StreamingChatCompletionUpdate[] ToStreamingChatCompletionUpdates()
RawRepresentation = choice.RawRepresentation,
Role = choice.Role,
- CompletionId = CompletionId,
+ ResponseId = ResponseId,
CreatedAt = CreatedAt,
FinishReason = FinishReason,
ModelId = ModelId
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdate.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs
similarity index 52%
rename from src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdate.cs
rename to src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs
index e50fd42169b..8bf9e57ece2 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdate.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdate.cs
@@ -13,46 +13,39 @@ namespace Microsoft.Extensions.AI;
///
///
///
-/// Conceptually, this combines the roles of and
-/// in streaming output. For ease of consumption, it also flattens the nested structure you see on
-/// streaming chunks in some AI service, so instead of a dictionary of choices, each update represents a
-/// single choice (and hence has its own role, choice ID, etc.).
+/// is so named because it represents updates
+/// that layer on each other to form a single chat response. Conceptually, this combines the roles of
+/// and in streaming output. For ease of consumption,
+/// it also flattens the nested structure you see on streaming chunks in some AI services, so instead of a
+/// dictionary of choices, each update is part of a single choice (and hence has its own role, choice ID, etc.).
///
///
-/// is so named because it represents streaming updates
-/// to a single chat completion. As such, it is considered erroneous for multiple updates that are part
-/// of the same completion to contain competing values. For example, some updates that are part of
-/// the same completion may have a
-/// value, and others may have a non- value, but all of those with a non-
-/// value must have the same value (e.g. . It should never be the case, for example,
-/// that one in a completion has a role of
-/// while another has a role of "AI".
-///
-///
-/// The relationship between and is
-/// codified in the and
-/// , which enable bidirectional conversions
-/// between the two. Note, however, that the conversion may be slightly lossy, for example if multiple updates
-/// all have different objects whereas there's
-/// only one slot for such an object available in .
+/// The relationship between and is
+/// codified in the and
+/// , which enable bidirectional conversions
+/// between the two. Note, however, that the provided conversions may be lossy, for example if multiple
+/// updates all have different objects whereas there's only one slot for
+/// such an object available in . Similarly, if different
+/// updates that are part of the same choice provide different values for properties like ,
+/// only one of the values will be used to populate .
///
///
-public class StreamingChatCompletionUpdate
+public class ChatResponseUpdate
{
- /// The completion update content items.
+ /// The response update content items.
private IList? _contents;
/// The name of the author of the update.
private string? _authorName;
- /// Gets or sets the name of the author of the completion update.
+ /// Gets or sets the name of the author of the response update.
public string? AuthorName
{
get => _authorName;
set => _authorName = string.IsNullOrWhiteSpace(value) ? null : value;
}
- /// Gets or sets the role of the author of the completion update.
+ /// Gets or sets the role of the author of the response update.
public ChatRole? Role { get; set; }
///
@@ -79,7 +72,7 @@ public string? Text
}
}
- /// Gets or sets the chat completion update content items.
+ /// Gets or sets the chat response update content items.
[AllowNull]
public IList Contents
{
@@ -87,9 +80,9 @@ public IList Contents
set => _contents = value;
}
- /// Gets or sets the raw representation of the completion update from an underlying implementation.
+ /// Gets or sets the raw representation of the response update from an underlying implementation.
///
- /// If a is created to represent some underlying object from another object
+ /// If a is created to represent some underlying object from another object
/// model, this property can be used to store that original object. This can be useful for debugging or
/// for enabling a consumer to access the underlying object model if needed.
///
@@ -99,20 +92,20 @@ public IList Contents
/// Gets or sets additional properties for the update.
public AdditionalPropertiesDictionary? AdditionalProperties { get; set; }
- /// Gets or sets the ID of the completion of which this update is a part.
- public string? CompletionId { get; set; }
+ /// Gets or sets the ID of the response of which this update is a part.
+ public string? ResponseId { get; set; }
- /// Gets or sets the chat thread ID associated with the chat completion of which this update is a part.
+ /// Gets or sets the chat thread ID associated with the chat response of which this update is a part.
///
/// Some implementations are capable of storing the state for a chat thread, such that
- /// the input messages supplied to need only be the additional messages beyond
+ /// the input messages supplied to need only be the additional messages beyond
/// what's already stored. If this property is non-, it represents an identifier for that state,
/// and it should be used in a subsequent instead of supplying the same messages
/// (and this streaming message) as part of the chatMessages parameter.
///
public string? ChatThreadId { get; set; }
- /// Gets or sets a timestamp for the completion update.
+ /// Gets or sets a timestamp for the response update.
public DateTimeOffset? CreatedAt { get; set; }
/// Gets or sets the zero-based index of the choice with which this update is associated in the streaming sequence.
@@ -121,7 +114,7 @@ public IList Contents
/// Gets or sets the finish reason for the operation.
public ChatFinishReason? FinishReason { get; set; }
- /// Gets or sets the model ID using in the creation of the chat completion of which this update is a part.
+ /// Gets or sets the model ID associated with this response update.
public string? ModelId { get; set; }
///
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdateExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdateExtensions.cs
similarity index 66%
rename from src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdateExtensions.cs
rename to src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdateExtensions.cs
index 9694b0e4dc0..bfc2c3fd60f 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/StreamingChatCompletionUpdateExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseUpdateExtensions.cs
@@ -18,11 +18,11 @@
namespace Microsoft.Extensions.AI;
///
-/// Provides extension methods for working with instances.
+/// Provides extension methods for working with instances.
///
-public static class StreamingChatCompletionUpdateExtensions
+public static class ChatResponseUpdateExtensions
{
- /// Combines instances into a single .
+ /// Combines instances into a single .
/// The updates to be combined.
///
/// to attempt to coalesce contiguous items, where applicable,
@@ -30,26 +30,26 @@ public static class StreamingChatCompletionUpdateExtensions
/// the manufactured instances. When , the original content items are used.
/// The default is .
///
- /// The combined .
- public static ChatCompletion ToChatCompletion(
- this IEnumerable updates, bool coalesceContent = true)
+ /// The combined .
+ public static ChatResponse ToChatResponse(
+ this IEnumerable updates, bool coalesceContent = true)
{
_ = Throw.IfNull(updates);
- ChatCompletion completion = new([]);
+ ChatResponse response = new([]);
Dictionary messages = [];
foreach (var update in updates)
{
- ProcessUpdate(update, messages, completion);
+ ProcessUpdate(update, messages, response);
}
- AddMessagesToCompletion(messages, completion, coalesceContent);
+ AddMessagesToResponse(messages, response, coalesceContent);
- return completion;
+ return response;
}
- /// Combines instances into a single .
+ /// Combines instances into a single .
/// The updates to be combined.
///
/// to attempt to coalesce contiguous items, where applicable,
@@ -58,41 +58,41 @@ public static ChatCompletion ToChatCompletion(
/// The default is .
///
/// The to monitor for cancellation requests. The default is .
- /// The combined .
- public static Task ToChatCompletionAsync(
- this IAsyncEnumerable updates, bool coalesceContent = true, CancellationToken cancellationToken = default)
+ /// The combined .
+ public static Task ToChatResponseAsync(
+ this IAsyncEnumerable updates, bool coalesceContent = true, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(updates);
- return ToChatCompletionAsync(updates, coalesceContent, cancellationToken);
+ return ToChatResponseAsync(updates, coalesceContent, cancellationToken);
- static async Task ToChatCompletionAsync(
- IAsyncEnumerable updates, bool coalesceContent, CancellationToken cancellationToken)
+ static async Task ToChatResponseAsync(
+ IAsyncEnumerable updates, bool coalesceContent, CancellationToken cancellationToken)
{
- ChatCompletion completion = new([]);
+ ChatResponse response = new([]);
Dictionary messages = [];
await foreach (var update in updates.WithCancellation(cancellationToken).ConfigureAwait(false))
{
- ProcessUpdate(update, messages, completion);
+ ProcessUpdate(update, messages, response);
}
- AddMessagesToCompletion(messages, completion, coalesceContent);
+ AddMessagesToResponse(messages, response, coalesceContent);
- return completion;
+ return response;
}
}
- /// Processes the , incorporating its contents into and .
+ /// Processes the , incorporating its contents into and .
/// The update to process.
- /// The dictionary mapping to the being built for that choice.
- /// The object whose properties should be updated based on .
- private static void ProcessUpdate(StreamingChatCompletionUpdate update, Dictionary messages, ChatCompletion completion)
+ /// The dictionary mapping to the being built for that choice.
+ /// The object whose properties should be updated based on .
+ private static void ProcessUpdate(ChatResponseUpdate update, Dictionary messages, ChatResponse response)
{
- completion.CompletionId ??= update.CompletionId;
- completion.CreatedAt ??= update.CreatedAt;
- completion.FinishReason ??= update.FinishReason;
- completion.ModelId ??= update.ModelId;
+ response.ResponseId ??= update.ResponseId;
+ response.CreatedAt ??= update.CreatedAt;
+ response.FinishReason ??= update.FinishReason;
+ response.ModelId ??= update.ModelId;
#if NET
ChatMessage message = CollectionsMarshal.GetValueRefOrAddDefault(messages, update.ChoiceIndex, out _) ??=
@@ -104,14 +104,14 @@ private static void ProcessUpdate(StreamingChatCompletionUpdate update, Dictiona
}
#endif
- // Incorporate all content from the update into the completion.
+ // Incorporate all content from the update into the response.
foreach (var content in update.Contents)
{
switch (content)
{
- // Usage content is treated specially and propagated to the completion's Usage.
+ // Usage content is treated specially and propagated to the response's Usage.
case UsageContent usage:
- (completion.Usage ??= new()).Add(usage.Details);
+ (response.Usage ??= new()).Add(usage.Details);
break;
default:
@@ -143,28 +143,28 @@ private static void ProcessUpdate(StreamingChatCompletionUpdate update, Dictiona
}
}
- /// Finalizes the object by transferring the into it.
- /// The messages to process further and transfer into .
- /// The result being built.
- /// The corresponding option value provided to or .
- private static void AddMessagesToCompletion(Dictionary messages, ChatCompletion completion, bool coalesceContent)
+ /// Finalizes the object by transferring the into it.
+ /// The messages to process further and transfer into .
+ /// The result being built.
+ /// The corresponding option value provided to or .
+ private static void AddMessagesToResponse(Dictionary messages, ChatResponse response, bool coalesceContent)
{
if (messages.Count <= 1)
{
// Add the single message if there is one.
foreach (var entry in messages)
{
- AddMessage(completion, coalesceContent, entry);
+ AddMessage(response, coalesceContent, entry);
}
// In the vast majority case where there's only one choice, promote any additional properties
- // from the single message to the chat completion, making them more discoverable and more similar
+ // from the single message to the chat response, making them more discoverable and more similar
// to how they're typically surfaced from non-streaming services.
- if (completion.Choices.Count == 1 &&
- completion.Choices[0].AdditionalProperties is { } messageProps)
+ if (response.Choices.Count == 1 &&
+ response.Choices[0].AdditionalProperties is { } messageProps)
{
- completion.Choices[0].AdditionalProperties = null;
- completion.AdditionalProperties = messageProps;
+ response.Choices[0].AdditionalProperties = null;
+ response.AdditionalProperties = messageProps;
}
}
else
@@ -172,7 +172,7 @@ private static void AddMessagesToCompletion(Dictionary message
// Add all of the messages, sorted by choice index.
foreach (var entry in messages.OrderBy(entry => entry.Key))
{
- AddMessage(completion, coalesceContent, entry);
+ AddMessage(response, coalesceContent, entry);
}
// If there are multiple choices, we don't promote additional properties from the individual messages.
@@ -180,7 +180,7 @@ private static void AddMessagesToCompletion(Dictionary message
// conflicting values across the choices, it would be unclear which one should be used.
}
- static void AddMessage(ChatCompletion completion, bool coalesceContent, KeyValuePair entry)
+ static void AddMessage(ChatResponse response, bool coalesceContent, KeyValuePair entry)
{
if (entry.Value.Role == default)
{
@@ -192,7 +192,7 @@ static void AddMessage(ChatCompletion completion, bool coalesceContent, KeyValue
CoalesceTextContent((List)entry.Value.Contents);
}
- completion.Choices.Add(entry.Value);
+ response.Choices.Add(entry.Value);
}
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs
index 0b5f72adfa5..77717d7c96c 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatRole.cs
@@ -11,21 +11,21 @@
namespace Microsoft.Extensions.AI;
///
-/// Describes the intended purpose of a message within a chat completion interaction.
+/// Describes the intended purpose of a message within a chat interaction.
///
[JsonConverter(typeof(Converter))]
public readonly struct ChatRole : IEquatable
{
- /// Gets the role that instructs or sets the behavior of the assistant.
+ /// Gets the role that instructs or sets the behavior of the system.
public static ChatRole System { get; } = new("system");
/// Gets the role that provides responses to system-instructed, user-prompted input.
public static ChatRole Assistant { get; } = new("assistant");
- /// Gets the role that provides input for chat completions.
+ /// Gets the role that provides user input for chat interactions.
public static ChatRole User { get; } = new("user");
- /// Gets the role that provides additional information and references for chat completions.
+ /// Gets the role that provides additional information and references in response to tool use requests.
public static ChatRole Tool { get; } = new("tool");
///
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs
index 875aa31e87e..7882529ac85 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs
@@ -38,15 +38,15 @@ public void Dispose()
protected IChatClient InnerClient { get; }
///
- public virtual Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ public virtual Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
- return InnerClient.CompleteAsync(chatMessages, options, cancellationToken);
+ return InnerClient.GetResponseAsync(chatMessages, options, cancellationToken);
}
///
- public virtual IAsyncEnumerable CompleteStreamingAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ public virtual IAsyncEnumerable GetStreamingResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
- return InnerClient.CompleteStreamingAsync(chatMessages, options, cancellationToken);
+ return InnerClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken);
}
///
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs
index 5d86b1fd985..79e2f658cb9 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs
@@ -8,15 +8,15 @@
namespace Microsoft.Extensions.AI;
-/// Represents a chat completion client.
+/// Represents a chat client.
///
///
/// Unless otherwise specified, all members of are thread-safe for concurrent use.
/// It is expected that all implementations of support being used by multiple requests concurrently.
///
///
-/// However, implementations of might mutate the arguments supplied to and
-/// , such as by adding additional messages to the messages list or configuring the options
+/// However, implementations of might mutate the arguments supplied to and
+/// , such as by adding additional messages to the messages list or configuring the options
/// instance. Thus, consumers of the interface either should avoid using shared instances of these arguments for concurrent
/// invocations or should otherwise ensure by construction that no instances are used which might employ
/// such mutation. For example, the WithChatOptions method be provided with a callback that could mutate the supplied options
@@ -25,7 +25,7 @@ namespace Microsoft.Extensions.AI;
///
public interface IChatClient : IDisposable
{
- /// Sends chat messages to the model and returns the response messages.
+ /// Sends chat messages and returns the response.
/// The chat content to send.
/// The chat options to configure the request.
/// The to monitor for cancellation requests. The default is .
@@ -34,12 +34,12 @@ public interface IChatClient : IDisposable
/// The returned messages aren't added to . However, any intermediate messages generated implicitly
/// by the client, including any messages for roundtrips to the model as part of the implementation of this request, are included.
///
- Task CompleteAsync(
+ Task GetResponseAsync(
IList chatMessages,
ChatOptions? options = null,
CancellationToken cancellationToken = default);
- /// Sends chat messages to the model and streams the response messages.
+ /// Sends chat messages and streams the response.
/// The chat content to send.
/// The chat options to configure the request.
/// The to monitor for cancellation requests. The default is .
@@ -48,7 +48,7 @@ Task CompleteAsync(
/// The returned messages aren't added to . However, any intermediate messages generated implicitly
/// by the client, including any messages for roundtrips to the model as part of the implementation of this request, are included.
///
- IAsyncEnumerable CompleteStreamingAsync(
+ IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages,
ChatOptions? options = null,
CancellationToken cancellationToken = default);
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs
index 16e9d08b324..dcb2f87df7c 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UsageContent.cs
@@ -8,7 +8,7 @@
namespace Microsoft.Extensions.AI;
///
-/// Represents usage information associated with a chat response.
+/// Represents usage information associated with a chat request and response.
///
[DebuggerDisplay("{DebuggerDisplay,nq}")]
public class UsageContent : AIContent
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md b/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md
index ea0d6d9f74f..0d0ae279bef 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md
@@ -22,7 +22,7 @@ Or directly in the C# project file:
### `IChatClient`
-The `IChatClient` interface defines a client abstraction responsible for interacting with AI services that provide chat capabilities. It defines methods for sending and receiving messages comprised of multi-modal content (text, images, audio, etc.), either as a complete set or streamed incrementally. Additionally, it provides metadata information about the client and allows for retrieving strongly-typed services that may be provided by the client or its underlying services.
+The `IChatClient` interface defines a client abstraction responsible for interacting with AI services that provide chat capabilities. It defines methods for sending and receiving messages comprised of multi-modal content (text, images, audio, etc.), either as a complete set or streamed incrementally. Additionally, it allows for retrieving strongly-typed services that may be provided by the client or its underlying services.
#### Sample Implementation
@@ -45,7 +45,7 @@ public class SampleChatClient : IChatClient
public SampleChatClient(Uri endpoint, string modelId) =>
_metadata = new("SampleChatClient", endpoint, modelId);
- public async Task CompleteAsync(
+ public async Task GetResponseAsync(
IList chatMessages,
ChatOptions? options = null,
CancellationToken cancellationToken = default)
@@ -53,7 +53,7 @@ public class SampleChatClient : IChatClient
// Simulate some operation.
await Task.Delay(300, cancellationToken);
- // Return a sample chat completion response randomly.
+ // Return a sample chat response randomly.
string[] responses =
[
"This is the first sample response.",
@@ -68,7 +68,7 @@ public class SampleChatClient : IChatClient
});
}
- public async IAsyncEnumerable CompleteStreamingAsync(
+ public async IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages,
ChatOptions? options = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
@@ -81,7 +81,7 @@ public class SampleChatClient : IChatClient
await Task.Delay(100, cancellationToken);
// Yield the next message in the response.
- yield return new StreamingChatCompletionUpdate
+ yield return new ChatResponseUpdate
{
Role = ChatRole.Assistant,
Text = word,
@@ -99,44 +99,44 @@ public class SampleChatClient : IChatClient
}
```
-#### Requesting a Chat Completion: `CompleteAsync`
+#### Requesting a Chat Response: `GetResponseAsync`
-With an instance of `IChatClient`, the `CompleteAsync` method may be used to send a request. The request is composed of one or more messages, each of which is composed of one or more pieces of content. Accelerator methods exist to simplify common cases, such as constructing a request for a single piece of text content.
+With an instance of `IChatClient`, the `GetResponseAsync` method may be used to send a request and get a response. The request is composed of one or more messages, each of which is composed of one or more pieces of content. Accelerator methods exist to simplify common cases, such as constructing a request for a single piece of text content.
```csharp
using Microsoft.Extensions.AI;
IChatClient client = new SampleChatClient(new Uri("http://coolsite.ai"), "my-custom-model");
-var response = await client.CompleteAsync("What is AI?");
+var response = await client.GetResponseAsync("What is AI?");
Console.WriteLine(response.Message);
```
-The core `CompleteAsync` method on the `IChatClient` interface accepts a list of messages. This list represents the history of all messages that are part of the conversation.
+The core `GetResponseAsync` method on the `IChatClient` interface accepts a list of messages. This list represents the history of all messages that are part of the conversation.
```csharp
using Microsoft.Extensions.AI;
IChatClient client = new SampleChatClient(new Uri("http://coolsite.ai"), "my-custom-model");
-Console.WriteLine(await client.CompleteAsync(
+Console.WriteLine(await client.GetResponseAsync(
[
new(ChatRole.System, "You are a helpful AI assistant"),
new(ChatRole.User, "What is AI?"),
]));
```
-#### Requesting a Streaming Chat Completion: `CompleteStreamingAsync`
+#### Requesting a Streaming Chat Response: `GetStreamingResponseAsync`
-The inputs to `CompleteStreamingAsync` are identical to those of `CompleteAsync`. However, rather than returning the complete response as part of a `ChatCompletion` object, the method returns an `IAsyncEnumerable`, providing a stream of updates that together form the single response.
+The inputs to `GetStreamingResponseAsync` are identical to those of `GetResponseAsync`. However, rather than returning the complete response as part of a `ChatResponse` object, the method returns an `IAsyncEnumerable`, providing a stream of updates that together form the single response.
```csharp
using Microsoft.Extensions.AI;
IChatClient client = new SampleChatClient(new Uri("http://coolsite.ai"), "my-custom-model");
-await foreach (var update in client.CompleteStreamingAsync("What is AI?"))
+await foreach (var update in client.GetStreamingResponseAsync("What is AI?"))
{
Console.Write(update);
}
@@ -157,7 +157,7 @@ IChatClient client = new ChatClientBuilder(new OllamaChatClient(new Uri("http://
.UseFunctionInvocation()
.Build();
-var response = client.CompleteStreamingAsync(
+var response = client.GetStreamingResponseAsync(
"Should I wear a rain coat?",
new() { Tools = [AIFunctionFactory.Create(GetCurrentWeather)] });
@@ -185,7 +185,7 @@ string[] prompts = ["What is AI?", "What is .NET?", "What is AI?"];
foreach (var prompt in prompts)
{
- await foreach (var update in client.CompleteStreamingAsync(prompt))
+ await foreach (var update in client.GetStreamingResponseAsync(prompt))
{
Console.Write(update);
}
@@ -212,12 +212,12 @@ IChatClient client = new ChatClientBuilder(new SampleChatClient(new Uri("http://
.UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true)
.Build();
-Console.WriteLine((await client.CompleteAsync("What is AI?")).Message);
+Console.WriteLine((await client.GetResponseAsync("What is AI?")).Message);
```
#### Options
-Every call to `CompleteAsync` or `CompleteStreamingAsync` may optionally supply a `ChatOptions` instance containing additional parameters for the operation. The most common parameters that are common amongst AI models and services show up as strongly-typed properties on the type, such as `ChatOptions.Temperature`. Other parameters may be supplied by name in a weakly-typed manner via the `ChatOptions.AdditionalProperties` dictionary.
+Every call to `GetResponseAsync` or `GetStreamingResponseAsync` may optionally supply a `ChatOptions` instance containing additional parameters for the operation. The most common parameters that are common amongst AI models and services show up as strongly-typed properties on the type, such as `ChatOptions.Temperature`. Other parameters may be supplied by name in a weakly-typed manner via the `ChatOptions.AdditionalProperties` dictionary.
Options may also be baked into an `IChatClient` via the `ConfigureOptions` extension method on `ChatClientBuilder`. This delegating client wraps another client and invokes the supplied delegate to populate a `ChatOptions` instance for every call. For example, to ensure that the `ChatOptions.ModelId` property defaults to a particular model name, code like the following may be used:
```csharp
@@ -227,8 +227,8 @@ IChatClient client = new ChatClientBuilder(new OllamaChatClient(new Uri("http://
.ConfigureOptions(options => options.ModelId ??= "phi3")
.Build();
-Console.WriteLine(await client.CompleteAsync("What is AI?")); // will request "phi3"
-Console.WriteLine(await client.CompleteAsync("What is AI?", new() { ModelId = "llama3.1" })); // will request "llama3.1"
+Console.WriteLine(await client.GetResponseAsync("What is AI?")); // will request "phi3"
+Console.WriteLine(await client.GetResponseAsync("What is AI?", new() { ModelId = "llama3.1" })); // will request "llama3.1"
```
#### Pipelines of Functionality
@@ -273,13 +273,13 @@ for (int i = 0; i < 3; i++)
new ChatMessage(ChatRole.User, "Do I need an umbrella?")
];
- Console.WriteLine(await client.CompleteAsync(history, options));
+ Console.WriteLine(await client.GetResponseAsync(history, options));
}
```
#### Custom `IChatClient` Middleware
-Anyone can layer in such additional functionality. While it's possible to implement `IChatClient` directly, the `DelegatingChatClient` class is an implementation of the `IChatClient` interface that serves as a base class for creating chat clients that delegate their operations to another `IChatClient` instance. It is designed to facilitate the chaining of multiple clients, allowing calls to be passed through to an underlying client. The class provides default implementations for methods such as `CompleteAsync`, `CompleteStreamingAsync`, and `Dispose`, simply forwarding the calls to the inner client instance. A derived type may then override just the methods it needs to in order to augment the behavior, delegating to the base implementation in order to forward the call along to the wrapped client. This setup is useful for creating flexible and modular chat clients that can be easily extended and composed.
+Anyone can layer in such additional functionality. While it's possible to implement `IChatClient` directly, the `DelegatingChatClient` class is an implementation of the `IChatClient` interface that serves as a base class for creating chat clients that delegate their operations to another `IChatClient` instance. It is designed to facilitate the chaining of multiple clients, allowing calls to be passed through to an underlying client. The class provides default implementations for methods such as `GetResponseAsync`, `GetStreamingResponseAsync`, and `Dispose`, simply forwarding the calls to the inner client instance. A derived type may then override just the methods it needs to in order to augment the behavior, delegating to the base implementation in order to forward the call along to the wrapped client. This setup is useful for creating flexible and modular chat clients that can be easily extended and composed.
Here is an example class derived from `DelegatingChatClient` to provide rate limiting functionality, utilizing the [System.Threading.RateLimiting](https://www.nuget.org/packages/System.Threading.RateLimiting) library:
```csharp
@@ -288,24 +288,24 @@ using System.Threading.RateLimiting;
public sealed class RateLimitingChatClient(IChatClient innerClient, RateLimiter rateLimiter) : DelegatingChatClient(innerClient)
{
- public override async Task CompleteAsync(
+ public override async Task GetResponseAsync(
IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken).ConfigureAwait(false);
if (!lease.IsAcquired)
throw new InvalidOperationException("Unable to acquire lease.");
- return await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
+ return await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
}
- public override async IAsyncEnumerable CompleteStreamingAsync(
+ public override async IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
using var lease = await rateLimiter.AcquireAsync(permitCount: 1, cancellationToken).ConfigureAwait(false);
if (!lease.IsAcquired)
throw new InvalidOperationException("Unable to acquire lease.");
- await foreach (var update in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
+ await foreach (var update in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
yield return update;
}
@@ -329,7 +329,7 @@ var client = new RateLimitingChatClient(
new SampleChatClient(new Uri("http://localhost"), "test"),
new ConcurrencyLimiter(new() { PermitLimit = 1, QueueLimit = int.MaxValue }));
-await client.CompleteAsync("What color is the sky?");
+await client.GetResponseAsync("What color is the sky?");
```
To make it easier to compose such components with others, the author of the component is recommended to create a "Use" extension method for registering this component into a pipeline, e.g.
@@ -361,9 +361,9 @@ var client = new SampleChatClient(new Uri("http://localhost"), "test")
```
The above extension methods demonstrate using a `Use` method on `ChatClientBuilder`. `ChatClientBuilder` also provides `Use` overloads that make it easier to
-write such delegating handlers. For example, in the earlier `RateLimitingChatClient` example, the overrides of `CompleteAsync` and `CompleteStreamingAsync` only
+write such delegating handlers. For example, in the earlier `RateLimitingChatClient` example, the overrides of `GetResponseAsync` and `GetStreamingResponseAsync` only
need to do work before and after delegating to the next client in the pipeline. To achieve the same thing without writing a custom class, an overload of `Use` may
-be used that accepts a delegate which is used for both `CompleteAsync` and `CompleteStreamingAsync`, reducing the boilderplate required:
+be used that accepts a delegate which is used for both `GetResponseAsync` and `GetStreamingResponseAsync`, reducing the boilderplate required:
```csharp
RateLimiter rateLimiter = ...;
var client = new SampleChatClient(new Uri("http://localhost"), "test")
@@ -402,7 +402,7 @@ var client = new SampleChatClient(new Uri("http://localhost"), "test")
.Build();
```
-For scenarios where the developer would like to specify delegating implementations of `CompleteAsync` and `CompleteStreamingAsync` inline,
+For scenarios where the developer would like to specify delegating implementations of `GetResponseAsync` and `GetStreamingResponseAsync` inline,
and where it's important to be able to write a different implementation for each in order to handle their unique return types specially,
another overload of `Use` exists that accepts a delegate for each.
@@ -424,7 +424,7 @@ var host = builder.Build();
// Elsewhere in the app
var chatClient = host.Services.GetRequiredService();
-Console.WriteLine(await chatClient.CompleteAsync("What is AI?"));
+Console.WriteLine(await chatClient.GetResponseAsync("What is AI?"));
```
What instance and configuration is injected may differ based on the current needs of the application, and multiple pipelines may be injected with different keys.
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs
index 9ae0fac76c4..dff7fbab373 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Utilities/AIJsonUtilities.Defaults.cs
@@ -82,9 +82,9 @@ private static JsonSerializerOptions CreateDefaultOptions()
[JsonSerializable(typeof(EmbeddingGenerationOptions))]
[JsonSerializable(typeof(ChatClientMetadata))]
[JsonSerializable(typeof(EmbeddingGeneratorMetadata))]
- [JsonSerializable(typeof(ChatCompletion))]
- [JsonSerializable(typeof(StreamingChatCompletionUpdate))]
- [JsonSerializable(typeof(IReadOnlyList))]
+ [JsonSerializable(typeof(ChatResponse))]
+ [JsonSerializable(typeof(ChatResponseUpdate))]
+ [JsonSerializable(typeof(IReadOnlyList))]
[JsonSerializable(typeof(Dictionary))]
[JsonSerializable(typeof(IDictionary))]
[JsonSerializable(typeof(IDictionary))]
diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
index 137b2d80e70..27e6298e057 100644
--- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
@@ -77,7 +77,7 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions
}
///
- public async Task CompleteAsync(
+ public async Task GetResponseAsync(
IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
@@ -129,20 +129,20 @@ public async Task CompleteAsync(
};
}
- // Wrap the content in a ChatCompletion to return.
- return new ChatCompletion(returnMessages)
+ // Wrap the content in a ChatResponse to return.
+ return new ChatResponse(returnMessages)
{
- CompletionId = response.Id,
CreatedAt = response.Created,
ModelId = response.Model,
FinishReason = ToFinishReason(response.FinishReason),
RawRepresentation = response,
+ ResponseId = response.Id,
Usage = usage,
};
}
///
- public async IAsyncEnumerable CompleteStreamingAsync(
+ public async IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
@@ -150,7 +150,7 @@ public async IAsyncEnumerable CompleteStreamingAs
Dictionary? functionCallInfos = null;
ChatRole? streamedRole = default;
ChatFinishReason? finishReason = default;
- string? completionId = null;
+ string? responseId = null;
DateTimeOffset? createdAt = null;
string? modelId = null;
string lastCallId = string.Empty;
@@ -162,25 +162,25 @@ public async IAsyncEnumerable CompleteStreamingAs
// The role and finish reason may arrive during any update, but once they've arrived, the same value should be the same for all subsequent updates.
streamedRole ??= chatCompletionUpdate.Role is global::Azure.AI.Inference.ChatRole role ? ToChatRole(role) : null;
finishReason ??= chatCompletionUpdate.FinishReason is CompletionsFinishReason reason ? ToFinishReason(reason) : null;
- completionId ??= chatCompletionUpdate.Id;
+ responseId ??= chatCompletionUpdate.Id;
createdAt ??= chatCompletionUpdate.Created;
modelId ??= chatCompletionUpdate.Model;
// Create the response content object.
- StreamingChatCompletionUpdate completionUpdate = new()
+ ChatResponseUpdate responseUpdate = new()
{
- CompletionId = chatCompletionUpdate.Id,
CreatedAt = chatCompletionUpdate.Created,
FinishReason = finishReason,
ModelId = modelId,
RawRepresentation = chatCompletionUpdate,
+ ResponseId = chatCompletionUpdate.Id,
Role = streamedRole,
};
// Transfer over content update items.
if (chatCompletionUpdate.ContentUpdate is string update)
{
- completionUpdate.Contents.Add(new TextContent(update));
+ responseUpdate.Contents.Add(new TextContent(update));
}
// Transfer over tool call updates.
@@ -213,7 +213,7 @@ public async IAsyncEnumerable CompleteStreamingAs
if (chatCompletionUpdate.Usage is { } usage)
{
- completionUpdate.Contents.Add(new UsageContent(new()
+ responseUpdate.Contents.Add(new UsageContent(new()
{
InputTokenCount = usage.PromptTokens,
OutputTokenCount = usage.CompletionTokens,
@@ -222,18 +222,18 @@ public async IAsyncEnumerable CompleteStreamingAs
}
// Now yield the item.
- yield return completionUpdate;
+ yield return responseUpdate;
}
// Now that we've received all updates, combine any for function calls into a single item to yield.
if (functionCallInfos is not null)
{
- var completionUpdate = new StreamingChatCompletionUpdate
+ var responseUpdate = new ChatResponseUpdate
{
- CompletionId = completionId,
CreatedAt = createdAt,
FinishReason = finishReason,
ModelId = modelId,
+ ResponseId = responseId,
Role = streamedRole,
};
@@ -246,11 +246,11 @@ public async IAsyncEnumerable CompleteStreamingAs
fci.Arguments?.ToString() ?? string.Empty,
entry.Key,
fci.Name!);
- completionUpdate.Contents.Add(callContent);
+ responseUpdate.Contents.Add(callContent);
}
}
- yield return completionUpdate;
+ yield return responseUpdate;
}
}
@@ -321,7 +321,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents,
{
switch (prop.Key)
{
- // Propagate everything else to the ChatCompletionOptions' AdditionalProperties.
+ // Propagate everything else to the ChatCompletionsOptions' AdditionalProperties.
default:
if (prop.Value is not null)
{
diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md
index b781f1ae4c3..c22f6306204 100644
--- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md
+++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/README.md
@@ -32,7 +32,7 @@ IChatClient client =
new AzureKeyCredential(Environment.GetEnvironmentVariable("GH_TOKEN")!))
.AsChatClient("gpt-4o-mini");
-Console.WriteLine(await client.CompleteAsync("What is AI?"));
+Console.WriteLine(await client.GetResponseAsync("What is AI?"));
```
> **Note:** When connecting with Azure Open AI, the URL passed into the `ChatCompletionsClient` needs to include `openai/deployments/{yourDeployment}`. For example:
@@ -54,7 +54,7 @@ IChatClient client =
new AzureKeyCredential(Environment.GetEnvironmentVariable("GH_TOKEN")!))
.AsChatClient("gpt-4o-mini");
-Console.WriteLine(await client.CompleteAsync(
+Console.WriteLine(await client.GetResponseAsync(
[
new ChatMessage(ChatRole.System, "You are a helpful AI assistant"),
new ChatMessage(ChatRole.User, "What is AI?"),
@@ -73,7 +73,7 @@ IChatClient client =
new AzureKeyCredential(Environment.GetEnvironmentVariable("GH_TOKEN")!))
.AsChatClient("gpt-4o-mini");
-await foreach (var update in client.CompleteStreamingAsync("What is AI?"))
+await foreach (var update in client.GetStreamingResponseAsync("What is AI?"))
{
Console.Write(update);
}
@@ -101,7 +101,7 @@ ChatOptions chatOptions = new()
Tools = [AIFunctionFactory.Create(GetWeather)]
};
-await foreach (var message in client.CompleteStreamingAsync("Do I need an umbrella?", chatOptions))
+await foreach (var message in client.GetStreamingResponseAsync("Do I need an umbrella?", chatOptions))
{
Console.Write(message);
}
@@ -133,7 +133,7 @@ IChatClient client = new ChatClientBuilder(azureClient)
for (int i = 0; i < 3; i++)
{
- await foreach (var message in client.CompleteStreamingAsync("In less than 100 words, what is AI?"))
+ await foreach (var message in client.GetStreamingResponseAsync("In less than 100 words, what is AI?"))
{
Console.Write(message);
}
@@ -167,7 +167,7 @@ IChatClient client = new ChatClientBuilder(azureClient)
.UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true)
.Build();
-Console.WriteLine(await client.CompleteAsync("What is AI?"));
+Console.WriteLine(await client.GetResponseAsync("What is AI?"));
```
### Telemetry, Caching, and Tool Calling
@@ -211,7 +211,7 @@ IChatClient client = new ChatClientBuilder(azureClient)
for (int i = 0; i < 3; i++)
{
- Console.WriteLine(await client.CompleteAsync("How much older is Alice than Bob?", chatOptions));
+ Console.WriteLine(await client.GetResponseAsync("How much older is Alice than Bob?", chatOptions));
}
[Description("Gets the age of a person specified by name.")]
@@ -251,7 +251,7 @@ var app = builder.Build();
// Elsewhere in the app
var chatClient = app.Services.GetRequiredService();
-Console.WriteLine(await chatClient.CompleteAsync("What is AI?"));
+Console.WriteLine(await chatClient.GetResponseAsync("What is AI?"));
```
### Minimal Web API
@@ -274,7 +274,7 @@ var app = builder.Build();
app.MapPost("/chat", async (IChatClient client, string message) =>
{
- var response = await client.CompleteAsync(message);
+ var response = await client.GetResponseAsync(message);
return response.Message;
});
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
index 450b83bd6a2..efeff58d592 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
@@ -78,7 +78,7 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions
}
///
- public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ public async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
@@ -104,16 +104,16 @@ public async Task CompleteAsync(IList chatMessages,
return new([FromOllamaMessage(response.Message!)])
{
- CompletionId = response.CreatedAt,
- ModelId = response.Model ?? options?.ModelId ?? _metadata.ModelId,
CreatedAt = DateTimeOffset.TryParse(response.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null,
FinishReason = ToFinishReason(response),
+ ModelId = response.Model ?? options?.ModelId ?? _metadata.ModelId,
+ ResponseId = response.CreatedAt,
Usage = ParseOllamaChatResponseUsage(response),
};
}
///
- public async IAsyncEnumerable CompleteStreamingAsync(
+ public async IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
@@ -152,13 +152,13 @@ public async IAsyncEnumerable CompleteStreamingAs
string? modelId = chunk.Model ?? _metadata.ModelId;
- StreamingChatCompletionUpdate update = new()
+ ChatResponseUpdate update = new()
{
- CompletionId = chunk.CreatedAt,
- Role = chunk.Message?.Role is not null ? new ChatRole(chunk.Message.Role) : null,
CreatedAt = DateTimeOffset.TryParse(chunk.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null,
FinishReason = ToFinishReason(chunk),
ModelId = modelId,
+ ResponseId = chunk.CreatedAt,
+ Role = chunk.Message?.Role is not null ? new ChatRole(chunk.Message.Role) : null,
};
if (chunk.Message is { } message)
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md b/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md
index e468965b9a8..bb0c360d877 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/README.md
@@ -27,7 +27,7 @@ using Microsoft.Extensions.AI;
IChatClient client = new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.1");
-Console.WriteLine(await client.CompleteAsync("What is AI?"));
+Console.WriteLine(await client.GetResponseAsync("What is AI?"));
```
### Chat + Conversation History
@@ -37,7 +37,7 @@ using Microsoft.Extensions.AI;
IChatClient client = new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.1");
-Console.WriteLine(await client.CompleteAsync(
+Console.WriteLine(await client.GetResponseAsync(
[
new ChatMessage(ChatRole.System, "You are a helpful AI assistant"),
new ChatMessage(ChatRole.User, "What is AI?"),
@@ -51,7 +51,7 @@ using Microsoft.Extensions.AI;
IChatClient client = new OllamaChatClient(new Uri("http://localhost:11434/"), "llama3.1");
-await foreach (var update in client.CompleteStreamingAsync("What is AI?"))
+await foreach (var update in client.GetStreamingResponseAsync("What is AI?"))
{
Console.Write(update);
}
@@ -79,7 +79,7 @@ ChatOptions chatOptions = new()
Tools = [AIFunctionFactory.Create(GetWeather)]
};
-Console.WriteLine(await client.CompleteAsync("Do I need an umbrella?", chatOptions));
+Console.WriteLine(await client.GetResponseAsync("Do I need an umbrella?", chatOptions));
[Description("Gets the weather")]
static string GetWeather() => Random.Shared.NextDouble() > 0.5 ? "It's sunny" : "It's raining";
@@ -103,7 +103,7 @@ IChatClient client = new ChatClientBuilder(ollamaClient)
for (int i = 0; i < 3; i++)
{
- await foreach (var message in client.CompleteStreamingAsync("In less than 100 words, what is AI?"))
+ await foreach (var message in client.GetStreamingResponseAsync("In less than 100 words, what is AI?"))
{
Console.Write(message);
}
@@ -132,7 +132,7 @@ IChatClient client = new ChatClientBuilder(ollamaClient)
.UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true)
.Build();
-Console.WriteLine(await client.CompleteAsync("What is AI?"));
+Console.WriteLine(await client.GetResponseAsync("What is AI?"));
```
### Telemetry, Caching, and Tool Calling
@@ -171,7 +171,7 @@ IChatClient client = new ChatClientBuilder(ollamaClient)
for (int i = 0; i < 3; i++)
{
- Console.WriteLine(await client.CompleteAsync("How much older is Alice than Bob?", chatOptions));
+ Console.WriteLine(await client.GetResponseAsync("How much older is Alice than Bob?", chatOptions));
}
[Description("Gets the age of a person specified by name.")]
@@ -243,7 +243,7 @@ var app = builder.Build();
// Elsewhere in the app
var chatClient = app.Services.GetRequiredService();
-Console.WriteLine(await chatClient.CompleteAsync("What is AI?"));
+Console.WriteLine(await chatClient.GetResponseAsync("What is AI?"));
```
### Minimal Web API
@@ -262,7 +262,7 @@ var app = builder.Build();
app.MapPost("/chat", async (IChatClient client, string message) =>
{
- var response = await client.CompleteAsync(message, cancellationToken: default);
+ var response = await client.GetResponseAsync(message, cancellationToken: default);
return response.Message;
});
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs
index b820fde3134..110ea0bf7fe 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantClient.cs
@@ -24,7 +24,7 @@
namespace Microsoft.Extensions.AI;
-/// Represents an for an OpenAI or .
+/// Represents an for an OpenAI or .
internal sealed class OpenAIAssistantClient : IChatClient
{
/// Metadata for the client.
@@ -69,12 +69,12 @@ public OpenAIAssistantClient(AssistantClient assistantClient, string assistantId
}
///
- public Task CompleteAsync(
+ public Task GetResponseAsync(
IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
- CompleteStreamingAsync(chatMessages, options, cancellationToken).ToChatCompletionAsync(coalesceContent: true, cancellationToken);
+ GetStreamingResponseAsync(chatMessages, options, cancellationToken).ToChatResponseAsync(coalesceContent: true, cancellationToken);
///
- public async IAsyncEnumerable CompleteStreamingAsync(
+ public async IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Extract necessary state from chatMessages and options.
@@ -133,14 +133,14 @@ public async IAsyncEnumerable CompleteStreamingAs
case RunUpdate ru:
threadId ??= ru.Value.ThreadId;
- StreamingChatCompletionUpdate ruUpdate = new()
+ ChatResponseUpdate ruUpdate = new()
{
AuthorName = ru.Value.AssistantId,
ChatThreadId = threadId,
- CompletionId = ru.Value.Id,
CreatedAt = ru.Value.CreatedAt,
ModelId = ru.Value.Model,
RawRepresentation = ru,
+ ResponseId = ru.Value.Id,
Role = ChatRole.Assistant,
};
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
index acd73142dcf..ba584cc1734 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
@@ -18,7 +18,7 @@
namespace Microsoft.Extensions.AI;
-/// Represents an for an OpenAI or .
+/// Represents an for an OpenAI or .
public sealed class OpenAIChatClient : IChatClient
{
/// Gets the default OpenAI endpoint.
@@ -99,7 +99,7 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions
}
///
- public async Task CompleteAsync(
+ public async Task GetResponseAsync(
IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
@@ -114,7 +114,7 @@ public async Task CompleteAsync(
}
///
- public IAsyncEnumerable CompleteStreamingAsync(
+ public IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs
index 5b7d7fe5a34..6a28c9f5490 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs
@@ -19,17 +19,17 @@ namespace Microsoft.Extensions.AI;
public sealed class OpenAIChatCompletionRequest
{
///
- /// Gets the chat messages specified in the completion request.
+ /// Gets the chat messages specified in the request.
///
public required IList Messages { get; init; }
///
- /// Gets the chat options governing the completion request.
+ /// Gets the chat options governing the request.
///
public required ChatOptions Options { get; init; }
///
- /// Gets a value indicating whether the completion response should be streamed.
+ /// Gets a value indicating whether the response should be streamed.
///
public bool Stream { get; init; }
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
index e7eba21e4a8..170f8cbe06e 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
@@ -28,17 +28,17 @@ internal static partial class OpenAIModelMappers
{
internal static JsonElement DefaultParameterSchema { get; } = JsonDocument.Parse("{}").RootElement;
- public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion chatCompletion, JsonSerializerOptions options)
+ public static ChatCompletion ToOpenAIChatCompletion(ChatResponse response, JsonSerializerOptions options)
{
- _ = Throw.IfNull(chatCompletion);
+ _ = Throw.IfNull(response);
- if (chatCompletion.Choices.Count > 1)
+ if (response.Choices.Count > 1)
{
throw new NotSupportedException("Creating OpenAI ChatCompletion models with multiple choices is currently not supported.");
}
List? toolCalls = null;
- foreach (AIContent content in chatCompletion.Message.Contents)
+ foreach (AIContent content in response.Message.Contents)
{
if (content is FunctionCallContent callRequest)
{
@@ -53,27 +53,27 @@ public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion c
}
ChatTokenUsage? chatTokenUsage = null;
- if (chatCompletion.Usage is UsageDetails usageDetails)
+ if (response.Usage is UsageDetails usageDetails)
{
chatTokenUsage = ToOpenAIUsage(usageDetails);
}
return OpenAIChatModelFactory.ChatCompletion(
- id: chatCompletion.CompletionId ?? CreateCompletionId(),
- model: chatCompletion.ModelId,
- createdAt: chatCompletion.CreatedAt ?? DateTimeOffset.UtcNow,
- role: ToOpenAIChatRole(chatCompletion.Message.Role).Value,
- finishReason: ToOpenAIFinishReason(chatCompletion.FinishReason),
- content: new(ToOpenAIChatContent(chatCompletion.Message.Contents)),
+ id: response.ResponseId ?? CreateCompletionId(),
+ model: response.ModelId,
+ createdAt: response.CreatedAt ?? DateTimeOffset.UtcNow,
+ role: ToOpenAIChatRole(response.Message.Role).Value,
+ finishReason: ToOpenAIFinishReason(response.FinishReason),
+ content: new(ToOpenAIChatContent(response.Message.Contents)),
toolCalls: toolCalls,
- refusal: chatCompletion.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.ChatCompletion.Refusal)),
- contentTokenLogProbabilities: chatCompletion.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.ChatCompletion.ContentTokenLogProbabilities)),
- refusalTokenLogProbabilities: chatCompletion.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.ChatCompletion.RefusalTokenLogProbabilities)),
- systemFingerprint: chatCompletion.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)),
+ refusal: response.AdditionalProperties.GetValueOrDefault(nameof(ChatCompletion.Refusal)),
+ contentTokenLogProbabilities: response.AdditionalProperties.GetValueOrDefault>(nameof(ChatCompletion.ContentTokenLogProbabilities)),
+ refusalTokenLogProbabilities: response.AdditionalProperties.GetValueOrDefault>(nameof(ChatCompletion.RefusalTokenLogProbabilities)),
+ systemFingerprint: response.AdditionalProperties.GetValueOrDefault(nameof(ChatCompletion.SystemFingerprint)),
usage: chatTokenUsage);
}
- public static ChatCompletion FromOpenAIChatCompletion(OpenAI.Chat.ChatCompletion openAICompletion, ChatOptions? options, ChatCompletionOptions chatCompletionOptions)
+ public static ChatResponse FromOpenAIChatCompletion(ChatCompletion openAICompletion, ChatOptions? options, ChatCompletionOptions chatCompletionOptions)
{
_ = Throw.IfNull(openAICompletion);
@@ -139,42 +139,42 @@ public static ChatCompletion FromOpenAIChatCompletion(OpenAI.Chat.ChatCompletion
}
}
- // Wrap the content in a ChatCompletion to return.
- var completion = new ChatCompletion([returnMessage])
+ // Wrap the content in a ChatResponse to return.
+ var response = new ChatResponse([returnMessage])
{
- CompletionId = openAICompletion.Id,
CreatedAt = openAICompletion.CreatedAt,
FinishReason = FromOpenAIFinishReason(openAICompletion.FinishReason),
ModelId = openAICompletion.Model,
RawRepresentation = openAICompletion,
+ ResponseId = openAICompletion.Id,
};
if (openAICompletion.Usage is ChatTokenUsage tokenUsage)
{
- completion.Usage = FromOpenAIUsage(tokenUsage);
+ response.Usage = FromOpenAIUsage(tokenUsage);
}
if (openAICompletion.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs)
{
- (completion.AdditionalProperties ??= [])[nameof(openAICompletion.ContentTokenLogProbabilities)] = contentTokenLogProbs;
+ (response.AdditionalProperties ??= [])[nameof(openAICompletion.ContentTokenLogProbabilities)] = contentTokenLogProbs;
}
if (openAICompletion.Refusal is string refusal)
{
- (completion.AdditionalProperties ??= [])[nameof(openAICompletion.Refusal)] = refusal;
+ (response.AdditionalProperties ??= [])[nameof(openAICompletion.Refusal)] = refusal;
}
if (openAICompletion.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs)
{
- (completion.AdditionalProperties ??= [])[nameof(openAICompletion.RefusalTokenLogProbabilities)] = refusalTokenLogProbs;
+ (response.AdditionalProperties ??= [])[nameof(openAICompletion.RefusalTokenLogProbabilities)] = refusalTokenLogProbs;
}
if (openAICompletion.SystemFingerprint is string systemFingerprint)
{
- (completion.AdditionalProperties ??= [])[nameof(openAICompletion.SystemFingerprint)] = systemFingerprint;
+ (response.AdditionalProperties ??= [])[nameof(openAICompletion.SystemFingerprint)] = systemFingerprint;
}
- return completion;
+ return response;
}
public static ChatOptions FromOpenAIOptions(ChatCompletionOptions? options)
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs
index 399ca5484f0..c051c208f1e 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs
@@ -23,8 +23,8 @@ public static OpenAIChatCompletionRequest FromOpenAIChatCompletionRequest(OpenAI
return new()
{
Messages = messages,
- Options = chatOptions,
ModelId = chatOptions.ModelId,
+ Options = chatOptions,
Stream = _getStreamAccessor(chatCompletionOptions) ?? false,
};
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs
index 5d743a1baea..bfafbdf82b2 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMappers.StreamingChatCompletion.cs
@@ -18,17 +18,17 @@ namespace Microsoft.Extensions.AI;
internal static partial class OpenAIModelMappers
{
- public static async IAsyncEnumerable ToOpenAIStreamingChatCompletionAsync(
- IAsyncEnumerable chatCompletions,
+ public static async IAsyncEnumerable ToOpenAIStreamingChatCompletionAsync(
+ IAsyncEnumerable updates,
JsonSerializerOptions options,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
- await foreach (var chatCompletionUpdate in chatCompletions.WithCancellation(cancellationToken).ConfigureAwait(false))
+ await foreach (var update in updates.WithCancellation(cancellationToken).ConfigureAwait(false))
{
List? toolCallUpdates = null;
ChatTokenUsage? chatTokenUsage = null;
- foreach (var content in chatCompletionUpdate.Contents)
+ foreach (var content in update.Contents)
{
if (content is FunctionCallContent functionCallContent)
{
@@ -46,92 +46,92 @@ internal static partial class OpenAIModelMappers
}
yield return OpenAIChatModelFactory.StreamingChatCompletionUpdate(
- completionId: chatCompletionUpdate.CompletionId ?? CreateCompletionId(),
- model: chatCompletionUpdate.ModelId,
- createdAt: chatCompletionUpdate.CreatedAt ?? DateTimeOffset.UtcNow,
- role: ToOpenAIChatRole(chatCompletionUpdate.Role),
- finishReason: chatCompletionUpdate.FinishReason is null ? null : ToOpenAIFinishReason(chatCompletionUpdate.FinishReason),
- contentUpdate: [.. ToOpenAIChatContent(chatCompletionUpdate.Contents)],
+ completionId: update.ResponseId ?? CreateCompletionId(),
+ model: update.ModelId,
+ createdAt: update.CreatedAt ?? DateTimeOffset.UtcNow,
+ role: ToOpenAIChatRole(update.Role),
+ finishReason: update.FinishReason is null ? null : ToOpenAIFinishReason(update.FinishReason),
+ contentUpdate: [.. ToOpenAIChatContent(update.Contents)],
toolCallUpdates: toolCallUpdates,
- refusalUpdate: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.RefusalUpdate)),
- contentTokenLogProbabilities: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.ContentTokenLogProbabilities)),
- refusalTokenLogProbabilities: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.RefusalTokenLogProbabilities)),
- systemFingerprint: chatCompletionUpdate.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.StreamingChatCompletionUpdate.SystemFingerprint)),
+ refusalUpdate: update.AdditionalProperties.GetValueOrDefault(nameof(StreamingChatCompletionUpdate.RefusalUpdate)),
+ contentTokenLogProbabilities: update.AdditionalProperties.GetValueOrDefault>(nameof(StreamingChatCompletionUpdate.ContentTokenLogProbabilities)),
+ refusalTokenLogProbabilities: update.AdditionalProperties.GetValueOrDefault>(nameof(StreamingChatCompletionUpdate.RefusalTokenLogProbabilities)),
+ systemFingerprint: update.AdditionalProperties.GetValueOrDefault(nameof(StreamingChatCompletionUpdate.SystemFingerprint)),
usage: chatTokenUsage);
}
}
- public static async IAsyncEnumerable FromOpenAIStreamingChatCompletionAsync(
- IAsyncEnumerable chatCompletionUpdates,
+ public static async IAsyncEnumerable FromOpenAIStreamingChatCompletionAsync(
+ IAsyncEnumerable updates,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
Dictionary? functionCallInfos = null;
ChatRole? streamedRole = null;
ChatFinishReason? finishReason = null;
StringBuilder? refusal = null;
- string? completionId = null;
+ string? responseId = null;
DateTimeOffset? createdAt = null;
string? modelId = null;
string? fingerprint = null;
// Process each update as it arrives
- await foreach (OpenAI.Chat.StreamingChatCompletionUpdate chatCompletionUpdate in chatCompletionUpdates.WithCancellation(cancellationToken).ConfigureAwait(false))
+ await foreach (StreamingChatCompletionUpdate update in updates.WithCancellation(cancellationToken).ConfigureAwait(false))
{
// The role and finish reason may arrive during any update, but once they've arrived, the same value should be the same for all subsequent updates.
- streamedRole ??= chatCompletionUpdate.Role is ChatMessageRole role ? FromOpenAIChatRole(role) : null;
- finishReason ??= chatCompletionUpdate.FinishReason is OpenAI.Chat.ChatFinishReason reason ? FromOpenAIFinishReason(reason) : null;
- completionId ??= chatCompletionUpdate.CompletionId;
- createdAt ??= chatCompletionUpdate.CreatedAt;
- modelId ??= chatCompletionUpdate.Model;
- fingerprint ??= chatCompletionUpdate.SystemFingerprint;
+ streamedRole ??= update.Role is ChatMessageRole role ? FromOpenAIChatRole(role) : null;
+ finishReason ??= update.FinishReason is OpenAI.Chat.ChatFinishReason reason ? FromOpenAIFinishReason(reason) : null;
+ responseId ??= update.CompletionId;
+ createdAt ??= update.CreatedAt;
+ modelId ??= update.Model;
+ fingerprint ??= update.SystemFingerprint;
// Create the response content object.
- StreamingChatCompletionUpdate completionUpdate = new()
+ ChatResponseUpdate responseUpdate = new()
{
- CompletionId = chatCompletionUpdate.CompletionId,
- CreatedAt = chatCompletionUpdate.CreatedAt,
+ ResponseId = update.CompletionId,
+ CreatedAt = update.CreatedAt,
FinishReason = finishReason,
ModelId = modelId,
- RawRepresentation = chatCompletionUpdate,
+ RawRepresentation = update,
Role = streamedRole,
};
// Populate it with any additional metadata from the OpenAI object.
- if (chatCompletionUpdate.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs)
+ if (update.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs)
{
- (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.ContentTokenLogProbabilities)] = contentTokenLogProbs;
+ (responseUpdate.AdditionalProperties ??= [])[nameof(update.ContentTokenLogProbabilities)] = contentTokenLogProbs;
}
- if (chatCompletionUpdate.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs)
+ if (update.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs)
{
- (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.RefusalTokenLogProbabilities)] = refusalTokenLogProbs;
+ (responseUpdate.AdditionalProperties ??= [])[nameof(update.RefusalTokenLogProbabilities)] = refusalTokenLogProbs;
}
if (fingerprint is not null)
{
- (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.SystemFingerprint)] = fingerprint;
+ (responseUpdate.AdditionalProperties ??= [])[nameof(update.SystemFingerprint)] = fingerprint;
}
// Transfer over content update items.
- if (chatCompletionUpdate.ContentUpdate is { Count: > 0 })
+ if (update.ContentUpdate is { Count: > 0 })
{
- foreach (ChatMessageContentPart contentPart in chatCompletionUpdate.ContentUpdate)
+ foreach (ChatMessageContentPart contentPart in update.ContentUpdate)
{
if (ToAIContent(contentPart) is AIContent aiContent)
{
- completionUpdate.Contents.Add(aiContent);
+ responseUpdate.Contents.Add(aiContent);
}
}
}
// Transfer over refusal updates.
- if (chatCompletionUpdate.RefusalUpdate is not null)
+ if (update.RefusalUpdate is not null)
{
- _ = (refusal ??= new()).Append(chatCompletionUpdate.RefusalUpdate);
+ _ = (refusal ??= new()).Append(update.RefusalUpdate);
}
// Transfer over tool call updates.
- if (chatCompletionUpdate.ToolCallUpdates is { Count: > 0 } toolCallUpdates)
+ if (update.ToolCallUpdates is { Count: > 0 } toolCallUpdates)
{
foreach (StreamingChatToolCallUpdate toolCallUpdate in toolCallUpdates)
{
@@ -143,30 +143,30 @@ public static async IAsyncEnumerable FromOpenAISt
existing.CallId ??= toolCallUpdate.ToolCallId;
existing.Name ??= toolCallUpdate.FunctionName;
- if (toolCallUpdate.FunctionArgumentsUpdate is { } update && !update.ToMemory().IsEmpty)
+ if (toolCallUpdate.FunctionArgumentsUpdate is { } argUpdate && !argUpdate.ToMemory().IsEmpty)
{
- _ = (existing.Arguments ??= new()).Append(update.ToString());
+ _ = (existing.Arguments ??= new()).Append(argUpdate.ToString());
}
}
}
// Transfer over usage updates.
- if (chatCompletionUpdate.Usage is ChatTokenUsage tokenUsage)
+ if (update.Usage is ChatTokenUsage tokenUsage)
{
var usageDetails = FromOpenAIUsage(tokenUsage);
- completionUpdate.Contents.Add(new UsageContent(usageDetails));
+ responseUpdate.Contents.Add(new UsageContent(usageDetails));
}
// Now yield the item.
- yield return completionUpdate;
+ yield return responseUpdate;
}
// Now that we've received all updates, combine any for function calls into a single item to yield.
if (functionCallInfos is not null)
{
- StreamingChatCompletionUpdate completionUpdate = new()
+ ChatResponseUpdate responseUpdate = new()
{
- CompletionId = completionId,
+ ResponseId = responseId,
CreatedAt = createdAt,
FinishReason = finishReason,
ModelId = modelId,
@@ -182,7 +182,7 @@ public static async IAsyncEnumerable FromOpenAISt
fci.Arguments?.ToString() ?? string.Empty,
fci.CallId!,
fci.Name!);
- completionUpdate.Contents.Add(callContent);
+ responseUpdate.Contents.Add(callContent);
}
}
@@ -190,16 +190,16 @@ public static async IAsyncEnumerable FromOpenAISt
// add it to this function calling item.
if (refusal is not null)
{
- (completionUpdate.AdditionalProperties ??= [])[nameof(ChatMessageContentPart.Refusal)] = refusal.ToString();
+ (responseUpdate.AdditionalProperties ??= [])[nameof(ChatMessageContentPart.Refusal)] = refusal.ToString();
}
// Propagate additional relevant metadata.
if (fingerprint is not null)
{
- (completionUpdate.AdditionalProperties ??= [])[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)] = fingerprint;
+ (responseUpdate.AdditionalProperties ??= [])[nameof(ChatCompletion.SystemFingerprint)] = fingerprint;
}
- yield return completionUpdate;
+ yield return responseUpdate;
}
}
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs
index 899a69630b8..659db4ed3bd 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAISerializationHelpers.cs
@@ -36,50 +36,50 @@ public static async Task DeserializeChatCompletionR
}
///
- /// Serializes a Microsoft.Extensions.AI completion using the OpenAI wire format.
+ /// Serializes a Microsoft.Extensions.AI response using the OpenAI wire format.
///
/// The stream to write the value.
- /// The chat completion to serialize.
+ /// The chat response to serialize.
/// The governing function call content serialization.
/// A token used to cancel the serialization operation.
/// A task tracking the serialization operation.
public static async Task SerializeAsync(
Stream stream,
- ChatCompletion chatCompletion,
+ ChatResponse response,
JsonSerializerOptions? options = null,
CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(stream);
- _ = Throw.IfNull(chatCompletion);
+ _ = Throw.IfNull(response);
options ??= AIJsonUtilities.DefaultOptions;
- OpenAI.Chat.ChatCompletion openAiChatCompletion = OpenAIModelMappers.ToOpenAIChatCompletion(chatCompletion, options);
- BinaryData binaryData = JsonModelHelpers.Serialize(openAiChatCompletion);
+ ChatCompletion openAiChatResponse = OpenAIModelMappers.ToOpenAIChatCompletion(response, options);
+ BinaryData binaryData = JsonModelHelpers.Serialize(openAiChatResponse);
await stream.WriteAsync(binaryData.ToMemory(), cancellationToken).ConfigureAwait(false);
}
///
- /// Serializes a Microsoft.Extensions.AI streaming completion using the OpenAI wire format.
+ /// Serializes a Microsoft.Extensions.AI streaming response using the OpenAI wire format.
///
/// The stream to write the value.
- /// The streaming chat completions to serialize.
+ /// The chat response updates to serialize.
/// The governing function call content serialization.
/// A token used to cancel the serialization operation.
/// A task tracking the serialization operation.
public static Task SerializeStreamingAsync(
Stream stream,
- IAsyncEnumerable streamingChatCompletionUpdates,
+ IAsyncEnumerable updates,
JsonSerializerOptions? options = null,
CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(stream);
- _ = Throw.IfNull(streamingChatCompletionUpdates);
+ _ = Throw.IfNull(updates);
options ??= AIJsonUtilities.DefaultOptions;
- var mappedUpdates = OpenAIModelMappers.ToOpenAIStreamingChatCompletionAsync(streamingChatCompletionUpdates, options, cancellationToken);
+ var mappedUpdates = OpenAIModelMappers.ToOpenAIStreamingChatCompletionAsync(updates, options, cancellationToken);
return SseFormatter.WriteAsync(ToSseEventsAsync(mappedUpdates), stream, FormatAsSseEvent, cancellationToken);
- static async IAsyncEnumerable> ToSseEventsAsync(IAsyncEnumerable updates)
+ static async IAsyncEnumerable> ToSseEventsAsync(IAsyncEnumerable updates)
{
await foreach (var update in updates.ConfigureAwait(false))
{
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md b/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md
index dacafd33a7f..0c5b32472e0 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/README.md
@@ -30,7 +30,7 @@ IChatClient client =
new OpenAIClient(Environment.GetEnvironmentVariable("OPENAI_API_KEY"))
.AsChatClient("gpt-4o-mini");
-Console.WriteLine(await client.CompleteAsync("What is AI?"));
+Console.WriteLine(await client.GetResponseAsync("What is AI?"));
```
### Chat + Conversation History
@@ -43,7 +43,7 @@ IChatClient client =
new OpenAIClient(Environment.GetEnvironmentVariable("OPENAI_API_KEY"))
.AsChatClient("gpt-4o-mini");
-Console.WriteLine(await client.CompleteAsync(
+Console.WriteLine(await client.GetResponseAsync(
[
new ChatMessage(ChatRole.System, "You are a helpful AI assistant"),
new ChatMessage(ChatRole.User, "What is AI?"),
@@ -60,7 +60,7 @@ IChatClient client =
new OpenAIClient(Environment.GetEnvironmentVariable("OPENAI_API_KEY"))
.AsChatClient("gpt-4o-mini");
-await foreach (var update in client.CompleteStreamingAsync("What is AI?"))
+await foreach (var update in client.GetStreamingResponseAsync("What is AI?"))
{
Console.Write(update);
}
@@ -86,7 +86,7 @@ ChatOptions chatOptions = new()
Tools = [AIFunctionFactory.Create(GetWeather)]
};
-await foreach (var message in client.CompleteStreamingAsync("Do I need an umbrella?", chatOptions))
+await foreach (var message in client.GetStreamingResponseAsync("Do I need an umbrella?", chatOptions))
{
Console.Write(message);
}
@@ -116,7 +116,7 @@ IChatClient client = new ChatClientBuilder(openaiClient)
for (int i = 0; i < 3; i++)
{
- await foreach (var message in client.CompleteStreamingAsync("In less than 100 words, what is AI?"))
+ await foreach (var message in client.GetStreamingResponseAsync("In less than 100 words, what is AI?"))
{
Console.Write(message);
}
@@ -148,7 +148,7 @@ IChatClient client = new ChatClientBuilder(openaiClient)
.UseOpenTelemetry(sourceName, c => c.EnableSensitiveData = true)
.Build();
-Console.WriteLine(await client.CompleteAsync("What is AI?"));
+Console.WriteLine(await client.GetResponseAsync("What is AI?"));
```
### Telemetry, Caching, and Tool Calling
@@ -190,7 +190,7 @@ IChatClient client = new ChatClientBuilder(openaiClient)
for (int i = 0; i < 3; i++)
{
- Console.WriteLine(await client.CompleteAsync("How much older is Alice than Bob?", chatOptions));
+ Console.WriteLine(await client.GetResponseAsync("How much older is Alice than Bob?", chatOptions));
}
[Description("Gets the age of a person specified by name.")]
@@ -268,7 +268,7 @@ var app = builder.Build();
// Elsewhere in the app
var chatClient = app.Services.GetRequiredService();
-Console.WriteLine(await chatClient.CompleteAsync("What is AI?"));
+Console.WriteLine(await chatClient.GetResponseAsync("What is AI?"));
```
### Minimal Web API
@@ -291,7 +291,7 @@ var app = builder.Build();
app.MapPost("/chat", async (IChatClient client, string message) =>
{
- var response = await client.CompleteAsync(message);
+ var response = await client.GetResponseAsync(message);
return response.Message;
});
diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs
index ebd5477f177..9a533363270 100644
--- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/AnonymousDelegatingChatClient.cs
@@ -16,18 +16,18 @@ namespace Microsoft.Extensions.AI;
/// Represents a delegating chat client that wraps an inner client with implementations provided by delegates.
public sealed class AnonymousDelegatingChatClient : DelegatingChatClient
{
- /// The delegate to use as the implementation of .
- private readonly Func, ChatOptions?, IChatClient, CancellationToken, Task>? _completeFunc;
+ /// The delegate to use as the implementation of .
+ private readonly Func, ChatOptions?, IChatClient, CancellationToken, Task>? _getResponseFunc;
- /// The delegate to use as the implementation of .
+ /// The delegate to use as the implementation of .
///
- /// When non-, this delegate is used as the implementation of and
+ /// When non-, this delegate is used as the implementation of and
/// will be invoked with the same arguments as the method itself, along with a reference to the inner client.
- /// When , will delegate directly to the inner client.
+ /// When , will delegate directly to the inner client.
///
- private readonly Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? _completeStreamingFunc;
+ private readonly Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? _getStreamingResponseFunc;
- /// The delegate to use as the implementation of both and .
+ /// The delegate to use as the implementation of both and .
private readonly CompleteSharedFunc? _sharedFunc;
///
@@ -35,7 +35,7 @@ public sealed class AnonymousDelegatingChatClient : DelegatingChatClient
///
/// The inner client.
///
- /// A delegate that provides the implementation for both and .
+ /// A delegate that provides the implementation for both and .
/// In addition to the arguments for the operation, it's provided with a delegate to the inner client that should be
/// used to perform the operation on the inner client. It will handle both the non-streaming and streaming cases.
///
@@ -57,77 +57,77 @@ public AnonymousDelegatingChatClient(IChatClient innerClient, CompleteSharedFunc
/// Initializes a new instance of the class.
///
/// The inner client.
- ///
- /// A delegate that provides the implementation for . When ,
- /// must be non-null, and the implementation of
- /// will use for the implementation.
+ ///
+ /// A delegate that provides the implementation for . When ,
+ /// must be non-null, and the implementation of
+ /// will use for the implementation.
///
- ///
- /// A delegate that provides the implementation for . When ,
- /// must be non-null, and the implementation of
- /// will use for the implementation.
+ ///
+ /// A delegate that provides the implementation for . When ,
+ /// must be non-null, and the implementation of
+ /// will use for the implementation.
///
/// is .
- /// Both and are .
+ /// Both and are .
public AnonymousDelegatingChatClient(
IChatClient innerClient,
- Func, ChatOptions?, IChatClient, CancellationToken, Task>? completeFunc,
- Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? completeStreamingFunc)
+ Func, ChatOptions?, IChatClient, CancellationToken, Task>? getResponseFunc,
+ Func, ChatOptions?, IChatClient, CancellationToken, IAsyncEnumerable>? getStreamingResponseFunc)
: base(innerClient)
{
- ThrowIfBothDelegatesNull(completeFunc, completeStreamingFunc);
+ ThrowIfBothDelegatesNull(getResponseFunc, getStreamingResponseFunc);
- _completeFunc = completeFunc;
- _completeStreamingFunc = completeStreamingFunc;
+ _getResponseFunc = getResponseFunc;
+ _getStreamingResponseFunc = getStreamingResponseFunc;
}
///
- public override Task CompleteAsync(
+ public override Task GetResponseAsync(
IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
if (_sharedFunc is not null)
{
- return CompleteViaSharedAsync(chatMessages, options, cancellationToken);
+ return GetResponseViaSharedAsync(chatMessages, options, cancellationToken);
- async Task CompleteViaSharedAsync(IList chatMessages, ChatOptions? options, CancellationToken cancellationToken)
+ async Task GetResponseViaSharedAsync(IList chatMessages, ChatOptions? options, CancellationToken cancellationToken)
{
- ChatCompletion? completion = null;
+ ChatResponse? response = null;
await _sharedFunc(chatMessages, options, async (chatMessages, options, cancellationToken) =>
{
- completion = await InnerClient.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
+ response = await InnerClient.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
}, cancellationToken).ConfigureAwait(false);
- if (completion is null)
+ if (response is null)
{
- throw new InvalidOperationException("The wrapper completed successfully without producing a ChatCompletion.");
+ throw new InvalidOperationException("The wrapper completed successfully without producing a ChatResponse.");
}
- return completion;
+ return response;
}
}
- else if (_completeFunc is not null)
+ else if (_getResponseFunc is not null)
{
- return _completeFunc(chatMessages, options, InnerClient, cancellationToken);
+ return _getResponseFunc(chatMessages, options, InnerClient, cancellationToken);
}
else
{
- Debug.Assert(_completeStreamingFunc is not null, "Expected non-null streaming delegate.");
- return _completeStreamingFunc!(chatMessages, options, InnerClient, cancellationToken)
- .ToChatCompletionAsync(coalesceContent: true, cancellationToken);
+ Debug.Assert(_getStreamingResponseFunc is not null, "Expected non-null streaming delegate.");
+ return _getStreamingResponseFunc!(chatMessages, options, InnerClient, cancellationToken)
+ .ToChatResponseAsync(coalesceContent: true, cancellationToken);
}
}
///
- public override IAsyncEnumerable CompleteStreamingAsync(
+ public override IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
if (_sharedFunc is not null)
{
- var updates = Channel.CreateBounded(1);
+ var updates = Channel.CreateBounded(1);
#pragma warning disable CA2016 // explicitly not forwarding the cancellation token, as we need to ensure the channel is always completed
_ = Task.Run(async () =>
@@ -138,7 +138,7 @@ public override IAsyncEnumerable CompleteStreamin
{
await _sharedFunc(chatMessages, options, async (chatMessages, options, cancellationToken) =>
{
- await foreach (var update in InnerClient.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
+ await foreach (var update in InnerClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
{
await updates.Writer.WriteAsync(update, cancellationToken).ConfigureAwait(false);
}
@@ -157,19 +157,19 @@ await _sharedFunc(chatMessages, options, async (chatMessages, options, cancellat
return updates.Reader.ReadAllAsync(cancellationToken);
}
- else if (_completeStreamingFunc is not null)
+ else if (_getStreamingResponseFunc is not null)
{
- return _completeStreamingFunc(chatMessages, options, InnerClient, cancellationToken);
+ return _getStreamingResponseFunc(chatMessages, options, InnerClient, cancellationToken);
}
else
{
- Debug.Assert(_completeFunc is not null, "Expected non-null non-streaming delegate.");
- return CompleteStreamingAsyncViaCompleteAsync(_completeFunc!(chatMessages, options, InnerClient, cancellationToken));
+ Debug.Assert(_getResponseFunc is not null, "Expected non-null non-streaming delegate.");
+ return CompleteStreamingAsyncViaCompleteAsync(_getResponseFunc!(chatMessages, options, InnerClient, cancellationToken));
- static async IAsyncEnumerable CompleteStreamingAsyncViaCompleteAsync(Task task)
+ static async IAsyncEnumerable CompleteStreamingAsyncViaCompleteAsync(Task task)
{
- ChatCompletion completion = await task.ConfigureAwait(false);
- foreach (var update in completion.ToStreamingChatCompletionUpdates())
+ ChatResponse response = await task.ConfigureAwait(false);
+ foreach (var update in response.ToChatResponseUpdates())
{
yield return update;
}
@@ -193,13 +193,13 @@ internal static void ThrowIfBothDelegatesNull(object? completeFunc, object? comp
// signature with the nextAsync delegate parameter.
///
- /// Represents a method used to call or .
+ /// Represents a method used to call or .
///
/// The chat content to send.
/// The chat options to configure the request.
///
- /// A delegate that provides the implementation for the inner client's or
- /// . It should be invoked to continue the pipeline. It accepts
+ /// A delegate that provides the implementation for the inner client's or
+ /// . It should be invoked to continue the pipeline. It accepts
/// the chat messages, options, and cancellation token, which are typically the same instances as provided to this method
/// but need not be.
///
diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs
index 698025e8901..79f41d1790e 100644
--- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/CachingChatClient.cs
@@ -45,7 +45,7 @@ protected CachingChatClient(IChatClient innerClient)
public bool CoalesceStreamingUpdates { get; set; } = true;
///
- public override async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
+ public override async Task GetResponseAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
@@ -56,7 +56,7 @@ public override async Task CompleteAsync(IList chat
if (await ReadCacheAsync(cacheKey, cancellationToken).ConfigureAwait(false) is not { } result)
{
- result = await base.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
+ result = await base.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
await WriteCacheAsync(cacheKey, result, cancellationToken).ConfigureAwait(false);
}
@@ -64,7 +64,7 @@ public override async Task CompleteAsync(IList chat
}
///
- public override async IAsyncEnumerable CompleteStreamingAsync(
+ public override async IAsyncEnumerable GetStreamingResponseAsync(
IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
@@ -76,10 +76,10 @@ public override async IAsyncEnumerable CompleteSt
// result and cache it. When we get a cache hit, we yield the non-streaming result as a streaming one.
var cacheKey = GetCacheKey(_boxedTrue, chatMessages, options);
- if (await ReadCacheAsync(cacheKey, cancellationToken).ConfigureAwait(false) is { } chatCompletion)
+ if (await ReadCacheAsync(cacheKey, cancellationToken).ConfigureAwait(false) is { } chatResponse)
{
// Yield all of the cached items.
- foreach (var chunk in chatCompletion.ToStreamingChatCompletionUpdates())
+ foreach (var chunk in chatResponse.ToChatResponseUpdates())
{
yield return chunk;
}
@@ -87,15 +87,15 @@ public override async IAsyncEnumerable CompleteSt
else
{
// Yield and store all of the items.
- List capturedItems = [];
- await foreach (var chunk in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
+ List capturedItems = [];
+ await foreach (var chunk in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
{
capturedItems.Add(chunk);
yield return chunk;
}
// Write the captured items to the cache as a non-streaming result.
- await WriteCacheAsync(cacheKey, capturedItems.ToChatCompletion(), cancellationToken).ConfigureAwait(false);
+ await WriteCacheAsync(cacheKey, capturedItems.ToChatResponse(), cancellationToken).ConfigureAwait(false);
}
}
else
@@ -112,8 +112,8 @@ public override async IAsyncEnumerable CompleteSt
else
{
// Yield and store all of the items.
- List capturedItems = [];
- await foreach (var chunk in base.CompleteStreamingAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
+ List capturedItems = [];
+ await foreach (var chunk in base.GetStreamingResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false))
{
capturedItems.Add(chunk);
yield return chunk;
@@ -131,40 +131,40 @@ public override async IAsyncEnumerable CompleteSt
protected abstract string GetCacheKey(params ReadOnlySpan