diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs
index 96a6c6cd36b..0be912430fa 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs
@@ -15,6 +15,9 @@ public class ChatOptions
/// Stateless vs. stateful clients.
public string? ConversationId { get; set; }
+ /// Gets or sets additional per-request instructions to be provided to the .
+ public string? Instructions { get; set; }
+
/// Gets or sets the temperature for generating chat responses.
///
/// This value controls the randomness of predictions made by the model. Use a lower value to decrease randomness in the response.
@@ -146,20 +149,21 @@ public virtual ChatOptions Clone()
{
ChatOptions options = new()
{
+ AdditionalProperties = AdditionalProperties?.Clone(),
+ AllowMultipleToolCalls = AllowMultipleToolCalls,
ConversationId = ConversationId,
- Temperature = Temperature,
- MaxOutputTokens = MaxOutputTokens,
- TopP = TopP,
- TopK = TopK,
FrequencyPenalty = FrequencyPenalty,
+ Instructions = Instructions,
+ MaxOutputTokens = MaxOutputTokens,
+ ModelId = ModelId,
PresencePenalty = PresencePenalty,
- Seed = Seed,
+ RawRepresentationFactory = RawRepresentationFactory,
ResponseFormat = ResponseFormat,
- ModelId = ModelId,
- AllowMultipleToolCalls = AllowMultipleToolCalls,
+ Seed = Seed,
+ Temperature = Temperature,
ToolMode = ToolMode,
- RawRepresentationFactory = RawRepresentationFactory,
- AdditionalProperties = AdditionalProperties?.Clone(),
+ TopK = TopK,
+ TopP = TopP,
};
if (StopSequences is not null)
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
index 499ff4b4a71..79776b0ecb4 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
@@ -923,6 +923,10 @@
"Member": "string? Microsoft.Extensions.AI.ChatOptions.ConversationId { get; set; }",
"Stage": "Stable"
},
+ {
+ "Member": "string? Microsoft.Extensions.AI.ChatOptions.Instructions { get; set; }",
+ "Stage": "Stable"
+ },
{
"Member": "float? Microsoft.Extensions.AI.ChatOptions.FrequencyPenalty { get; set; }",
"Stage": "Stable"
@@ -2286,4 +2290,4 @@
]
}
]
-}
\ No newline at end of file
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
index 5c6b64b138f..0f8a8a90008 100644
--- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
@@ -283,7 +283,7 @@ private static ChatRole ToChatRole(global::Azure.AI.Inference.ChatRole role) =>
new(s);
private ChatCompletionsOptions CreateAzureAIOptions(IEnumerable chatContents, ChatOptions? options) =>
- new(ToAzureAIInferenceChatMessages(chatContents))
+ new(ToAzureAIInferenceChatMessages(chatContents, options))
{
Model = options?.ModelId ?? _metadata.DefaultModelId ??
throw new InvalidOperationException("No model id was provided when either constructing the client or in the chat options.")
@@ -299,7 +299,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IEnumerable chatCon
if (options.RawRepresentationFactory?.Invoke(this) is ChatCompletionsOptions result)
{
- result.Messages = ToAzureAIInferenceChatMessages(chatContents).ToList();
+ result.Messages = ToAzureAIInferenceChatMessages(chatContents, options).ToList();
result.Model ??= options.ModelId ?? _metadata.DefaultModelId ??
throw new InvalidOperationException("No model id was provided when either constructing the client or in the chat options.");
}
@@ -422,11 +422,16 @@ private static ChatCompletionsToolDefinition ToAzureAIChatTool(AIFunction aiFunc
}
/// Converts an Extensions chat message enumerable to an AzureAI chat message enumerable.
- private static IEnumerable ToAzureAIInferenceChatMessages(IEnumerable inputs)
+ private static IEnumerable ToAzureAIInferenceChatMessages(IEnumerable inputs, ChatOptions? options)
{
// Maps all of the M.E.AI types to the corresponding AzureAI types.
// Unrecognized or non-processable content is ignored.
+ if (options?.Instructions is { } instructions && !string.IsNullOrWhiteSpace(instructions))
+ {
+ yield return new ChatRequestSystemMessage(instructions);
+ }
+
foreach (ChatMessage input in inputs)
{
if (input.Role == ChatRole.System)
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
index 28f8eb8c3ad..42f75af495e 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
@@ -307,10 +307,20 @@ private static FunctionCallContent ToFunctionCallContent(OllamaFunctionToolCall
private OllamaChatRequest ToOllamaChatRequest(IEnumerable messages, ChatOptions? options, bool stream)
{
+ var requestMessages = messages.SelectMany(ToOllamaChatRequestMessages).ToList();
+ if (options?.Instructions is string instructions)
+ {
+ requestMessages.Insert(0, new OllamaChatRequestMessage
+ {
+ Role = ChatRole.System.Value,
+ Content = instructions,
+ });
+ }
+
OllamaChatRequest request = new()
{
Format = ToOllamaChatResponseFormat(options?.ResponseFormat),
- Messages = messages.SelectMany(ToOllamaChatRequestMessages).ToArray(),
+ Messages = requestMessages,
Model = options?.ModelId ?? _metadata.DefaultModelId ?? string.Empty,
Stream = stream,
Tools = options?.ToolMode is not NoneChatToolMode && options?.Tools is { Count: > 0 } tools ? tools.OfType().Select(ToOllamaTool) : null,
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequest.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequest.cs
index a5b23d567a4..7cdadb91666 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequest.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatRequest.cs
@@ -9,7 +9,7 @@ namespace Microsoft.Extensions.AI;
internal sealed class OllamaChatRequest
{
public required string Model { get; set; }
- public required OllamaChatRequestMessage[] Messages { get; set; }
+ public required IList Messages { get; set; }
public JsonElement? Format { get; set; }
public bool Stream { get; set; }
public IEnumerable? Tools { get; set; }
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs
index c3aab83da61..534ccd841d2 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs
@@ -328,8 +328,27 @@ void IDisposable.Dispose()
}
}
- // Process ChatMessages.
+ // Configure system instructions.
StringBuilder? instructions = null;
+ void AppendSystemInstructions(string? toAppend)
+ {
+ if (!string.IsNullOrEmpty(toAppend))
+ {
+ if (instructions is null)
+ {
+ instructions = new(toAppend);
+ }
+ else
+ {
+ _ = instructions.AppendLine().AppendLine(toAppend);
+ }
+ }
+ }
+
+ AppendSystemInstructions(runOptions.AdditionalInstructions);
+ AppendSystemInstructions(options?.Instructions);
+
+ // Process ChatMessages.
List? functionResults = null;
foreach (var chatMessage in messages)
{
@@ -345,10 +364,9 @@ void IDisposable.Dispose()
if (chatMessage.Role == ChatRole.System ||
chatMessage.Role == OpenAIResponseChatClient.ChatRoleDeveloper)
{
- instructions ??= new();
foreach (var textContent in chatMessage.Contents.OfType())
{
- _ = instructions.Append(textContent);
+ AppendSystemInstructions(textContent.Text);
}
continue;
@@ -389,10 +407,7 @@ void IDisposable.Dispose()
}
}
- if (instructions is not null)
- {
- runOptions.AdditionalInstructions = instructions.ToString();
- }
+ runOptions.AdditionalInstructions = instructions?.ToString();
return (runOptions, functionResults);
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
index f97ebd492a7..a6cd70149d7 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
@@ -80,7 +80,7 @@ public async Task GetResponseAsync(
{
_ = Throw.IfNull(messages);
- var openAIChatMessages = ToOpenAIChatMessages(messages, AIJsonUtilities.DefaultOptions);
+ var openAIChatMessages = ToOpenAIChatMessages(messages, options, AIJsonUtilities.DefaultOptions);
var openAIOptions = ToOpenAIOptions(options);
// Make the call to OpenAI.
@@ -95,7 +95,7 @@ public IAsyncEnumerable GetStreamingResponseAsync(
{
_ = Throw.IfNull(messages);
- var openAIChatMessages = ToOpenAIChatMessages(messages, AIJsonUtilities.DefaultOptions);
+ var openAIChatMessages = ToOpenAIChatMessages(messages, options, AIJsonUtilities.DefaultOptions);
var openAIOptions = ToOpenAIOptions(options);
// Make the call to OpenAI.
@@ -111,11 +111,16 @@ void IDisposable.Dispose()
}
/// Converts an Extensions chat message enumerable to an OpenAI chat message enumerable.
- private static IEnumerable ToOpenAIChatMessages(IEnumerable inputs, JsonSerializerOptions options)
+ private static IEnumerable ToOpenAIChatMessages(IEnumerable inputs, ChatOptions? chatOptions, JsonSerializerOptions jsonOptions)
{
// Maps all of the M.E.AI types to the corresponding OpenAI types.
// Unrecognized or non-processable content is ignored.
+ if (chatOptions?.Instructions is { } instructions && !string.IsNullOrWhiteSpace(instructions))
+ {
+ yield return new SystemChatMessage(instructions);
+ }
+
foreach (ChatMessage input in inputs)
{
if (input.Role == ChatRole.System ||
@@ -139,7 +144,7 @@ void IDisposable.Dispose()
{
try
{
- result = JsonSerializer.Serialize(resultContent.Result, options.GetTypeInfo(typeof(object)));
+ result = JsonSerializer.Serialize(resultContent.Result, jsonOptions.GetTypeInfo(typeof(object)));
}
catch (NotSupportedException)
{
@@ -167,7 +172,7 @@ void IDisposable.Dispose()
case FunctionCallContent fc:
(toolCalls ??= []).Add(
ChatToolCall.CreateFunctionToolCall(fc.CallId, fc.Name, new(JsonSerializer.SerializeToUtf8Bytes(
- fc.Arguments, options.GetTypeInfo(typeof(IDictionary))))));
+ fc.Arguments, jsonOptions.GetTypeInfo(typeof(IDictionary))))));
break;
default:
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
index 34e6977e1f7..ffa5bf19b63 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
@@ -366,6 +366,12 @@ private ResponseCreationOptions ToOpenAIResponseCreationOptions(ChatOptions? opt
result.TopP ??= options.TopP;
result.Temperature ??= options.Temperature;
result.ParallelToolCallsEnabled ??= options.AllowMultipleToolCalls;
+ if (options.Instructions is { } instructions)
+ {
+ result.Instructions = string.IsNullOrEmpty(result.Instructions) ?
+ instructions :
+ $"{result.Instructions}{Environment.NewLine}{instructions}";
+ }
// Populate tools if there are any.
if (options.Tools is { Count: > 0 } tools)
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs
index cdf1aab09c9..b7645c26245 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs
@@ -15,6 +15,7 @@ public void Constructor_Parameterless_PropsDefaulted()
{
ChatOptions options = new();
Assert.Null(options.ConversationId);
+ Assert.Null(options.Instructions);
Assert.Null(options.Temperature);
Assert.Null(options.MaxOutputTokens);
Assert.Null(options.TopP);
@@ -33,6 +34,7 @@ public void Constructor_Parameterless_PropsDefaulted()
ChatOptions clone = options.Clone();
Assert.Null(clone.ConversationId);
+ Assert.Null(clone.Instructions);
Assert.Null(clone.Temperature);
Assert.Null(clone.MaxOutputTokens);
Assert.Null(clone.TopP);
@@ -75,6 +77,7 @@ public void Properties_Roundtrip()
Func rawRepresentationFactory = (c) => null;
options.ConversationId = "12345";
+ options.Instructions = "Some instructions";
options.Temperature = 0.1f;
options.MaxOutputTokens = 2;
options.TopP = 0.3f;
@@ -92,6 +95,7 @@ public void Properties_Roundtrip()
options.AdditionalProperties = additionalProps;
Assert.Equal("12345", options.ConversationId);
+ Assert.Equal("Some instructions", options.Instructions);
Assert.Equal(0.1f, options.Temperature);
Assert.Equal(2, options.MaxOutputTokens);
Assert.Equal(0.3f, options.TopP);
@@ -144,6 +148,7 @@ public void JsonSerialization_Roundtrips()
};
options.ConversationId = "12345";
+ options.Instructions = "Some instructions";
options.Temperature = 0.1f;
options.MaxOutputTokens = 2;
options.TopP = 0.3f;
@@ -170,6 +175,7 @@ public void JsonSerialization_Roundtrips()
Assert.NotNull(deserialized);
Assert.Equal("12345", deserialized.ConversationId);
+ Assert.Equal("Some instructions", deserialized.Instructions);
Assert.Equal(0.1f, deserialized.Temperature);
Assert.Equal(2, deserialized.MaxOutputTokens);
Assert.Equal(0.3f, deserialized.TopP);