Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ public class ChatOptions
/// <related type="Article" href="https://learn.microsoft.com/dotnet/ai/microsoft-extensions-ai#stateless-vs-stateful-clients">Stateless vs. stateful clients.</related>
public string? ConversationId { get; set; }

/// <summary>Gets or sets additional per-request instructions to be provided to the <see cref="IChatClient"/>.</summary>
public string? Instructions { get; set; }

/// <summary>Gets or sets the temperature for generating chat responses.</summary>
/// <remarks>
/// This value controls the randomness of predictions made by the model. Use a lower value to decrease randomness in the response.
Expand Down Expand Up @@ -146,20 +149,21 @@ public virtual ChatOptions Clone()
{
ChatOptions options = new()
{
AdditionalProperties = AdditionalProperties?.Clone(),
AllowMultipleToolCalls = AllowMultipleToolCalls,
ConversationId = ConversationId,
Temperature = Temperature,
MaxOutputTokens = MaxOutputTokens,
TopP = TopP,
TopK = TopK,
FrequencyPenalty = FrequencyPenalty,
Instructions = Instructions,
MaxOutputTokens = MaxOutputTokens,
ModelId = ModelId,
PresencePenalty = PresencePenalty,
Seed = Seed,
RawRepresentationFactory = RawRepresentationFactory,
ResponseFormat = ResponseFormat,
ModelId = ModelId,
AllowMultipleToolCalls = AllowMultipleToolCalls,
Seed = Seed,
Temperature = Temperature,
ToolMode = ToolMode,
RawRepresentationFactory = RawRepresentationFactory,
AdditionalProperties = AdditionalProperties?.Clone(),
TopK = TopK,
TopP = TopP,
};

if (StopSequences is not null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -923,6 +923,10 @@
"Member": "string? Microsoft.Extensions.AI.ChatOptions.ConversationId { get; set; }",
"Stage": "Stable"
},
{
"Member": "string? Microsoft.Extensions.AI.ChatOptions.Instructions { get; set; }",
"Stage": "Stable"
},
{
"Member": "float? Microsoft.Extensions.AI.ChatOptions.FrequencyPenalty { get; set; }",
"Stage": "Stable"
Expand Down Expand Up @@ -2286,4 +2290,4 @@
]
}
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ private static ChatRole ToChatRole(global::Azure.AI.Inference.ChatRole role) =>
new(s);

private ChatCompletionsOptions CreateAzureAIOptions(IEnumerable<ChatMessage> chatContents, ChatOptions? options) =>
new(ToAzureAIInferenceChatMessages(chatContents))
new(ToAzureAIInferenceChatMessages(chatContents, options))
{
Model = options?.ModelId ?? _metadata.DefaultModelId ??
throw new InvalidOperationException("No model id was provided when either constructing the client or in the chat options.")
Expand All @@ -299,7 +299,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IEnumerable<ChatMessage> chatCon

if (options.RawRepresentationFactory?.Invoke(this) is ChatCompletionsOptions result)
{
result.Messages = ToAzureAIInferenceChatMessages(chatContents).ToList();
result.Messages = ToAzureAIInferenceChatMessages(chatContents, options).ToList();
result.Model ??= options.ModelId ?? _metadata.DefaultModelId ??
throw new InvalidOperationException("No model id was provided when either constructing the client or in the chat options.");
}
Expand Down Expand Up @@ -422,11 +422,16 @@ private static ChatCompletionsToolDefinition ToAzureAIChatTool(AIFunction aiFunc
}

/// <summary>Converts an Extensions chat message enumerable to an AzureAI chat message enumerable.</summary>
private static IEnumerable<ChatRequestMessage> ToAzureAIInferenceChatMessages(IEnumerable<ChatMessage> inputs)
private static IEnumerable<ChatRequestMessage> ToAzureAIInferenceChatMessages(IEnumerable<ChatMessage> inputs, ChatOptions? options)
{
// Maps all of the M.E.AI types to the corresponding AzureAI types.
// Unrecognized or non-processable content is ignored.

if (options?.Instructions is { } instructions && !string.IsNullOrWhiteSpace(instructions))
{
yield return new ChatRequestSystemMessage(instructions);
}

foreach (ChatMessage input in inputs)
{
if (input.Role == ChatRole.System)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,10 +307,20 @@ private static FunctionCallContent ToFunctionCallContent(OllamaFunctionToolCall

private OllamaChatRequest ToOllamaChatRequest(IEnumerable<ChatMessage> messages, ChatOptions? options, bool stream)
{
var requestMessages = messages.SelectMany(ToOllamaChatRequestMessages).ToList();
if (options?.Instructions is string instructions)
{
requestMessages.Insert(0, new OllamaChatRequestMessage
{
Role = ChatRole.System.Value,
Content = instructions,
});
}

OllamaChatRequest request = new()
{
Format = ToOllamaChatResponseFormat(options?.ResponseFormat),
Messages = messages.SelectMany(ToOllamaChatRequestMessages).ToArray(),
Messages = requestMessages,
Model = options?.ModelId ?? _metadata.DefaultModelId ?? string.Empty,
Stream = stream,
Tools = options?.ToolMode is not NoneChatToolMode && options?.Tools is { Count: > 0 } tools ? tools.OfType<AIFunction>().Select(ToOllamaTool) : null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ namespace Microsoft.Extensions.AI;
internal sealed class OllamaChatRequest
{
public required string Model { get; set; }
public required OllamaChatRequestMessage[] Messages { get; set; }
public required IList<OllamaChatRequestMessage> Messages { get; set; }
public JsonElement? Format { get; set; }
public bool Stream { get; set; }
public IEnumerable<OllamaTool>? Tools { get; set; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -328,8 +328,27 @@ void IDisposable.Dispose()
}
}

// Process ChatMessages.
// Configure system instructions.
StringBuilder? instructions = null;
void AppendSystemInstructions(string? toAppend)
{
if (!string.IsNullOrEmpty(toAppend))
{
if (instructions is null)
{
instructions = new(toAppend);
}
else
{
_ = instructions.AppendLine().AppendLine(toAppend);
}
}
}

AppendSystemInstructions(runOptions.AdditionalInstructions);
AppendSystemInstructions(options?.Instructions);

// Process ChatMessages.
List<FunctionResultContent>? functionResults = null;
foreach (var chatMessage in messages)
{
Expand All @@ -345,10 +364,9 @@ void IDisposable.Dispose()
if (chatMessage.Role == ChatRole.System ||
chatMessage.Role == OpenAIResponseChatClient.ChatRoleDeveloper)
{
instructions ??= new();
foreach (var textContent in chatMessage.Contents.OfType<TextContent>())
{
_ = instructions.Append(textContent);
AppendSystemInstructions(textContent.Text);
}

continue;
Expand Down Expand Up @@ -389,10 +407,7 @@ void IDisposable.Dispose()
}
}

if (instructions is not null)
{
runOptions.AdditionalInstructions = instructions.ToString();
}
runOptions.AdditionalInstructions = instructions?.ToString();

return (runOptions, functionResults);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public async Task<ChatResponse> GetResponseAsync(
{
_ = Throw.IfNull(messages);

var openAIChatMessages = ToOpenAIChatMessages(messages, AIJsonUtilities.DefaultOptions);
var openAIChatMessages = ToOpenAIChatMessages(messages, options, AIJsonUtilities.DefaultOptions);
var openAIOptions = ToOpenAIOptions(options);

// Make the call to OpenAI.
Expand All @@ -95,7 +95,7 @@ public IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
{
_ = Throw.IfNull(messages);

var openAIChatMessages = ToOpenAIChatMessages(messages, AIJsonUtilities.DefaultOptions);
var openAIChatMessages = ToOpenAIChatMessages(messages, options, AIJsonUtilities.DefaultOptions);
var openAIOptions = ToOpenAIOptions(options);

// Make the call to OpenAI.
Expand All @@ -111,11 +111,16 @@ void IDisposable.Dispose()
}

/// <summary>Converts an Extensions chat message enumerable to an OpenAI chat message enumerable.</summary>
private static IEnumerable<OpenAI.Chat.ChatMessage> ToOpenAIChatMessages(IEnumerable<ChatMessage> inputs, JsonSerializerOptions options)
private static IEnumerable<OpenAI.Chat.ChatMessage> ToOpenAIChatMessages(IEnumerable<ChatMessage> inputs, ChatOptions? chatOptions, JsonSerializerOptions jsonOptions)
{
// Maps all of the M.E.AI types to the corresponding OpenAI types.
// Unrecognized or non-processable content is ignored.

if (chatOptions?.Instructions is { } instructions && !string.IsNullOrWhiteSpace(instructions))
{
yield return new SystemChatMessage(instructions);
}

foreach (ChatMessage input in inputs)
{
if (input.Role == ChatRole.System ||
Expand All @@ -139,7 +144,7 @@ void IDisposable.Dispose()
{
try
{
result = JsonSerializer.Serialize(resultContent.Result, options.GetTypeInfo(typeof(object)));
result = JsonSerializer.Serialize(resultContent.Result, jsonOptions.GetTypeInfo(typeof(object)));
}
catch (NotSupportedException)
{
Expand Down Expand Up @@ -167,7 +172,7 @@ void IDisposable.Dispose()
case FunctionCallContent fc:
(toolCalls ??= []).Add(
ChatToolCall.CreateFunctionToolCall(fc.CallId, fc.Name, new(JsonSerializer.SerializeToUtf8Bytes(
fc.Arguments, options.GetTypeInfo(typeof(IDictionary<string, object?>))))));
fc.Arguments, jsonOptions.GetTypeInfo(typeof(IDictionary<string, object?>))))));
break;

default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,12 @@ private ResponseCreationOptions ToOpenAIResponseCreationOptions(ChatOptions? opt
result.TopP ??= options.TopP;
result.Temperature ??= options.Temperature;
result.ParallelToolCallsEnabled ??= options.AllowMultipleToolCalls;
if (options.Instructions is { } instructions)
{
result.Instructions = string.IsNullOrEmpty(result.Instructions) ?
instructions :
$"{result.Instructions}{Environment.NewLine}{instructions}";
}

// Populate tools if there are any.
if (options.Tools is { Count: > 0 } tools)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ public void Constructor_Parameterless_PropsDefaulted()
{
ChatOptions options = new();
Assert.Null(options.ConversationId);
Assert.Null(options.Instructions);
Assert.Null(options.Temperature);
Assert.Null(options.MaxOutputTokens);
Assert.Null(options.TopP);
Expand All @@ -33,6 +34,7 @@ public void Constructor_Parameterless_PropsDefaulted()

ChatOptions clone = options.Clone();
Assert.Null(clone.ConversationId);
Assert.Null(clone.Instructions);
Assert.Null(clone.Temperature);
Assert.Null(clone.MaxOutputTokens);
Assert.Null(clone.TopP);
Expand Down Expand Up @@ -75,6 +77,7 @@ public void Properties_Roundtrip()
Func<IChatClient, object?> rawRepresentationFactory = (c) => null;

options.ConversationId = "12345";
options.Instructions = "Some instructions";
options.Temperature = 0.1f;
options.MaxOutputTokens = 2;
options.TopP = 0.3f;
Expand All @@ -92,6 +95,7 @@ public void Properties_Roundtrip()
options.AdditionalProperties = additionalProps;

Assert.Equal("12345", options.ConversationId);
Assert.Equal("Some instructions", options.Instructions);
Assert.Equal(0.1f, options.Temperature);
Assert.Equal(2, options.MaxOutputTokens);
Assert.Equal(0.3f, options.TopP);
Expand Down Expand Up @@ -144,6 +148,7 @@ public void JsonSerialization_Roundtrips()
};

options.ConversationId = "12345";
options.Instructions = "Some instructions";
options.Temperature = 0.1f;
options.MaxOutputTokens = 2;
options.TopP = 0.3f;
Expand All @@ -170,6 +175,7 @@ public void JsonSerialization_Roundtrips()
Assert.NotNull(deserialized);

Assert.Equal("12345", deserialized.ConversationId);
Assert.Equal("Some instructions", deserialized.Instructions);
Assert.Equal(0.1f, deserialized.Temperature);
Assert.Equal(2, deserialized.MaxOutputTokens);
Assert.Equal(0.3f, deserialized.TopP);
Expand Down
Loading