diff --git a/readme.md b/readme.md index 79056f9..6d62293 100644 --- a/readme.md +++ b/readme.md @@ -133,7 +133,7 @@ var requests = new List(); var responses = new List(); var openai = new OpenAIClient( Env.Get("OPENAI_API_KEY")!, - ClientOptions.Observe(requests.Add, responses.Add)); + OpenAIClientOptions.Observable(requests.Add, responses.Add)); ``` diff --git a/src/AI.Tests/Extensions/PipelineTestOutput.cs b/src/AI.Tests/Extensions/PipelineTestOutput.cs index e75a303..4542619 100644 --- a/src/AI.Tests/Extensions/PipelineTestOutput.cs +++ b/src/AI.Tests/Extensions/PipelineTestOutput.cs @@ -3,19 +3,28 @@ namespace Devlooped.Extensions.AI; -public static class PipelineTestOutput +public static class PipelineOutput { - static readonly JsonSerializerOptions options = new(JsonSerializerDefaults.General) + extension(TOptions) where TOptions : ClientPipelineOptions, new() + { + public static TOptions WriteTo(ITestOutputHelper output) + => new TOptions().WriteTo(output); + } +} + +public static class PipelineOutputExtensions +{ + static readonly JsonSerializerOptions jsonOptions = new(JsonSerializerDefaults.General) { WriteIndented = true, }; - public static TOptions WriteTo(this TOptions pipelineOptions, ITestOutputHelper output = default) - where TOptions : ClientPipelineOptions + extension(TOptions options) where TOptions : ClientPipelineOptions { - return pipelineOptions.Observe( - request => output.WriteLine(request.ToJsonString(options)), - response => output.WriteLine(response.ToJsonString(options)) - ); + public TOptions WriteTo(ITestOutputHelper output) + => options.Observe( + request => output.WriteLine(request.ToJsonString(jsonOptions)), + response => output.WriteLine(response.ToJsonString(jsonOptions)) + ); } } \ No newline at end of file diff --git a/src/AI.Tests/GrokTests.cs b/src/AI.Tests/GrokTests.cs index b7f34c8..84162ca 100644 --- a/src/AI.Tests/GrokTests.cs +++ b/src/AI.Tests/GrokTests.cs @@ -51,9 +51,9 @@ public async Task GrokInvokesToolAndSearch() var requests = new List(); var responses = new List(); - var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", - ClientOptions.Observable(requests.Add, responses.Add) - .WriteTo(output)) + var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions + .Observable(requests.Add, responses.Add) + .WriteTo(output)) .AsBuilder() .UseFunctionInvocation() .Build(); @@ -105,9 +105,9 @@ public async Task GrokInvokesHostedSearchTool() var requests = new List(); var responses = new List(); - var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", - ClientOptions.Observable(requests.Add, responses.Add) - .WriteTo(output)); + var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions + .Observable(requests.Add, responses.Add) + .WriteTo(output)); var options = new ChatOptions { diff --git a/src/AI.Tests/OpenAITests.cs b/src/AI.Tests/OpenAITests.cs index a626a4b..5b77edf 100644 --- a/src/AI.Tests/OpenAITests.cs +++ b/src/AI.Tests/OpenAITests.cs @@ -1,5 +1,6 @@ using System.Text.Json.Nodes; using Microsoft.Extensions.AI; +using OpenAI; using static ConfigurationExtensions; namespace Devlooped.Extensions.AI; @@ -15,7 +16,7 @@ public async Task OpenAISwitchesModel() }; var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1-nano", - new OpenAI.OpenAIClientOptions().WriteTo(output)); + OpenAIClientOptions.WriteTo(output)); var options = new ChatOptions { @@ -41,7 +42,7 @@ public async Task OpenAIThinks() var requests = new List(); var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "o3-mini", - ClientOptions.Observable(requests.Add).WriteTo(output)); + OpenAIClientOptions.Observable(requests.Add).WriteTo(output)); var options = new ChatOptions { diff --git a/src/AI/ClientOptions.cs b/src/AI/ClientOptions.cs deleted file mode 100644 index c8a13ea..0000000 --- a/src/AI/ClientOptions.cs +++ /dev/null @@ -1,31 +0,0 @@ -using System.ClientModel.Primitives; -using System.Text.Json.Nodes; -using OpenAI; - -namespace Devlooped.Extensions.AI; - -/// -/// Shortcut factory methods for creating like -/// that provide convenient initialization options. -/// -public static class ClientOptions -{ - /// - /// Creates an obserbable instance that can - /// be used to log requests and responses. - /// - /// A callback to process the that was sent. - /// A callback to process the that was received. - public static OpenAIClientOptions Observable(Action? onRequest = default, Action? onResponse = default) - => Observable(onRequest, onResponse); - - /// - /// Creates an obserbable -derived instance - /// that can be used to log requests and responses. - /// - /// A callback to process the that was sent. - /// A callback to process the that was received. - public static TOptions Observable(Action? onRequest = default, Action? onResponse = default) - where TOptions : ClientPipelineOptions, new() - => new TOptions().Observe(onRequest, onResponse); -} diff --git a/src/AI/ClientPipelineExtensions.cs b/src/AI/ClientPipelineExtensions.cs index e1fa3d7..09a7ee1 100644 --- a/src/AI/ClientPipelineExtensions.cs +++ b/src/AI/ClientPipelineExtensions.cs @@ -4,28 +4,44 @@ namespace Devlooped.Extensions.AI; +/// +/// Provides extension methods for . +/// public static class ClientPipelineExtensions { - /// - /// Adds a that observes requests and response - /// messages from the and notifies the provided - /// callbacks with the JSON representation of the HTTP messages. - /// - /// The options type to configure for HTTP logging. - /// The options instance to configure. - /// A callback to process the that was sent. - /// A callback to process the that was received. - /// - /// This is the lowst-level logging after all chat pipeline processing has been done. - /// If no can be parsed from the request or response, - /// the callbacks will not be invoked. - /// - public static TOptions Observe(this TOptions pipelineOptions, - Action? onRequest = default, Action? onResponse = default) - where TOptions : ClientPipelineOptions + extension(TOptions) where TOptions : ClientPipelineOptions, new() { - pipelineOptions.AddPolicy(new ObservePipelinePolicy(onRequest, onResponse), PipelinePosition.BeforeTransport); - return pipelineOptions; + /// + /// Creates an instance of the that can be observed for requests and responses. + /// + /// A callback to process the that was sent. + /// A callback to process the that was received. + /// A new instance of . + public static TOptions Observable(Action? onRequest = default, Action? onResponse = default) + => new TOptions().Observe(onRequest, onResponse); + } + + extension(TOptions options) where TOptions : ClientPipelineOptions + { + /// + /// Adds a that observes requests and response + /// messages from the and notifies the provided + /// callbacks with the JSON representation of the HTTP messages. + /// + /// The options type to configure for HTTP logging. + /// The options instance to configure. + /// A callback to process the that was sent. + /// A callback to process the that was received. + /// + /// This is the lowst-level logging after all chat pipeline processing has been done. + /// If no can be parsed from the request or response, + /// the callbacks will not be invoked. + /// + public TOptions Observe(Action? onRequest = default, Action? onResponse = default) + { + options.AddPolicy(new ObservePipelinePolicy(onRequest, onResponse), PipelinePosition.BeforeTransport); + return options; + } } class ObservePipelinePolicy(Action? onRequest = default, Action? onResponse = default) : PipelinePolicy @@ -78,4 +94,4 @@ void NotifyObservers(PipelineMessage message) } } } -} +} \ No newline at end of file diff --git a/src/AI/Console/JsonConsoleLoggingExtensions.cs b/src/AI/Console/JsonConsoleLoggingExtensions.cs index 6da2786..63797dc 100644 --- a/src/AI/Console/JsonConsoleLoggingExtensions.cs +++ b/src/AI/Console/JsonConsoleLoggingExtensions.cs @@ -12,97 +12,52 @@ namespace Microsoft.Extensions.AI; [EditorBrowsable(EditorBrowsableState.Never)] public static class JsonConsoleLoggingExtensions { - /// - /// Sets a that renders HTTP messages to the - /// console using Spectre.Console rich JSON formatting, but only if the console is interactive. - /// - /// The options type to configure for HTTP logging. - /// The options instance to configure. - /// - /// NOTE: this is the lowest-level logging after all chat pipeline processing has been done. - /// - /// If the options already provide a transport, it will be wrapped with the console - /// logging transport to minimize the impact on existing configurations. - /// - /// - public static TOptions UseJsonConsoleLogging(this TOptions pipelineOptions, JsonConsoleOptions? consoleOptions = null) - where TOptions : ClientPipelineOptions + extension(TOptions pipelineOptions) where TOptions : ClientPipelineOptions { - consoleOptions ??= JsonConsoleOptions.Default; - - if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?")) - return pipelineOptions; - - if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive) - return pipelineOptions; - - pipelineOptions.AddPolicy(new JsonConsoleLoggingPipelinePolicy(consoleOptions), PipelinePosition.BeforeTransport); - return pipelineOptions; - } - - /// - /// Renders chat messages and responses to the console using Spectre.Console rich JSON formatting. - /// - /// The builder in use. - /// - /// Confirmation will be asked if the console is interactive, otherwise, it will be - /// enabled unconditionally. - /// - public static ChatClientBuilder UseJsonConsoleLogging(this ChatClientBuilder builder, JsonConsoleOptions? consoleOptions = null) - { - consoleOptions ??= JsonConsoleOptions.Default; + /// + /// Observes the HTTP request and response messages from the underlying pipeline and renders them + /// to the console using Spectre.Console rich JSON formatting, but only if the console is interactive. + /// + /// The options type to configure for HTTP logging. + /// The options instance to configure. + /// + public TOptions UseJsonConsoleLogging(JsonConsoleOptions? consoleOptions = null) + { + consoleOptions ??= JsonConsoleOptions.Default; - if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?")) - return builder; + if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?")) + return pipelineOptions; - if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive) - return builder; + if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive) + return pipelineOptions; - return builder.Use(inner => new JsonConsoleLoggingChatClient(inner, consoleOptions)); + return pipelineOptions.Observe( + request => AnsiConsole.Write(consoleOptions.CreatePanel(request)), + response => AnsiConsole.Write(consoleOptions.CreatePanel(response))); + } } - class JsonConsoleLoggingPipelinePolicy(JsonConsoleOptions consoleOptions) : PipelinePolicy + extension(ChatClientBuilder builder) { - public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex) + /// + /// Renders chat messages and responses to the console using Spectre.Console rich JSON formatting. + /// + /// The builder in use. + /// + /// Confirmation will be asked if the console is interactive, otherwise, it will be + /// enabled unconditionally. + /// + public ChatClientBuilder UseJsonConsoleLogging(JsonConsoleOptions? consoleOptions = null) { - message.BufferResponse = true; - ProcessNext(message, pipeline, currentIndex); - - if (message.Request.Content is not null) - { - using var memory = new MemoryStream(); - message.Request.Content.WriteTo(memory); - memory.Position = 0; - using var reader = new StreamReader(memory); - var content = reader.ReadToEnd(); - AnsiConsole.Write(consoleOptions.CreatePanel(content)); - } - - if (message.Response != null) - { - AnsiConsole.Write(consoleOptions.CreatePanel(message.Response.Content.ToString())); - } - } + consoleOptions ??= JsonConsoleOptions.Default; - public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex) - { - message.BufferResponse = true; - await ProcessNextAsync(message, pipeline, currentIndex); + if (consoleOptions.InteractiveConfirm && ConsoleExtensions.IsConsoleInteractive && !AnsiConsole.Confirm("Do you want to enable rich JSON console logging for HTTP pipeline messages?")) + return builder; - if (message.Request.Content is not null) - { - using var memory = new MemoryStream(); - message.Request.Content.WriteTo(memory); - memory.Position = 0; - using var reader = new StreamReader(memory); - var content = await reader.ReadToEndAsync(); - AnsiConsole.Write(consoleOptions.CreatePanel(content)); - } + if (consoleOptions.InteractiveOnly && !ConsoleExtensions.IsConsoleInteractive) + return builder; - if (message.Response != null) - { - AnsiConsole.Write(consoleOptions.CreatePanel(message.Response.Content.ToString())); - } + return builder.Use(inner => new JsonConsoleLoggingChatClient(inner, consoleOptions)); } } diff --git a/src/AI/Console/JsonConsoleOptions.cs b/src/AI/Console/JsonConsoleOptions.cs index 080764d..540debd 100644 --- a/src/AI/Console/JsonConsoleOptions.cs +++ b/src/AI/Console/JsonConsoleOptions.cs @@ -91,6 +91,29 @@ internal Panel CreatePanel(string json) return panel; } + internal Panel CreatePanel(JsonNode node) + { + string json; + + // Determine if we need to pre-process the JSON node based on the settings. + if (TruncateLength.HasValue || !IncludeAdditionalProperties) + { + json = node.ToShortJsonString(TruncateLength, IncludeAdditionalProperties); + } + else + { + // i.e. we had no pre-processing to do + json = node.ToJsonString(); + } + + var panel = new Panel(WrapLength.HasValue ? new WrappedJsonText(json, WrapLength.Value) : new JsonText(json)) + { + Border = Border, + BorderStyle = BorderStyle, + }; + return panel; + } + internal Panel CreatePanel(object value) { string? json = null;