Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,25 @@ var options = new ChatOptions
var response = await grok.GetResponseAsync(messages, options);
```

We also provide an OpenAI-compatible `WebSearchTool` that can be used to restrict
the search to a specific country in a way that works with both Grok and OpenAI:

```csharp
var options = new ChatOptions
{
Tools = [new WebSearchTool("AR")] // 👈 search in Argentina
};
```

This is equivalent to the following when used with a Grok client:
```csharp
var options = new ChatOptions
{
// 👇 search in Argentina
Tools = [new GrokSearchTool(GrokSearch.On) { Country = "AR" }]
};
```

### Advanced Live Search

To configure advanced live search options, beyond the `On|Auto|Off` settings
Expand Down Expand Up @@ -127,9 +146,34 @@ var options = new ChatOptions
};

var response = await chat.GetResponseAsync(messages, options);
```

### Web Search

Similar to the Grok client, we provide the `WebSearchTool` to enable search customization
in OpenAI too:

```csharp
var options = new ChatOptions
{
// 👇 search in Argentina, Bariloche region
Tools = [new WebSearchTool("AR")
{
Region = "Bariloche", // 👈 Bariloche region
TimeZone = "America/Argentina/Buenos_Aires", // 👈 IANA timezone
ContextSize = WebSearchContextSize.High // 👈 high search context size
}]
};
```

> [!NOTE]
> This enables all features supported by the [Web search](https://platform.openai.com/docs/guides/tools-web-search)
> feature in OpenAI.

If advanced search settings are not needed, you can use the built-in M.E.AI `HostedWebSearchTool`
instead, which is a more generic tool and provides the basics out of the box.


## Observing Request/Response

The underlying HTTP pipeline provided by the Azure SDK allows setting up
Expand Down
8 changes: 5 additions & 3 deletions src/AI.Tests/GrokTests.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
using System.Text.Json.Nodes;
using Devlooped.Extensions.AI.Grok;
using Microsoft.Extensions.AI;
using static ConfigurationExtensions;
using OpenAIClientOptions = OpenAI.OpenAIClientOptions;

namespace Devlooped.Extensions.AI;

Expand Down Expand Up @@ -51,7 +53,7 @@ public async Task GrokInvokesToolAndSearch()
var requests = new List<JsonNode>();
var responses = new List<JsonNode>();

var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAIClientOptions
.Observable(requests.Add, responses.Add)
.WriteTo(output))
.AsBuilder()
Expand Down Expand Up @@ -105,7 +107,7 @@ public async Task GrokInvokesHostedSearchTool()
var requests = new List<JsonNode>();
var responses = new List<JsonNode>();

var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAIClientOptions
.Observable(requests.Add, responses.Add)
.WriteTo(output));

Expand Down Expand Up @@ -185,7 +187,7 @@ public async Task GrokInvokesSpecificSearchUrl()
var requests = new List<JsonNode>();
var responses = new List<JsonNode>();

var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAI.OpenAIClientOptions
var grok = new GrokChatClient(Configuration["XAI_API_KEY"]!, "grok-3", OpenAIClientOptions
.Observable(requests.Add, responses.Add)
.WriteTo(output));

Expand Down
54 changes: 54 additions & 0 deletions src/AI.Tests/OpenAITests.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
using System.Text.Json.Nodes;
using Devlooped.Extensions.AI.OpenAI;
using Microsoft.Extensions.AI;
using OpenAI;
using OpenAI.Responses;
using static ConfigurationExtensions;

namespace Devlooped.Extensions.AI;
Expand Down Expand Up @@ -66,4 +68,56 @@ public async Task OpenAIThinks()
Assert.Equal("medium", search["effort"]?.GetValue<string>());
});
}

[SecretsFact("OPENAI_API_KEY")]
public async Task WebSearchCountryHighContext()
{
var messages = new Chat()
{
{ "system", "Sos un asistente del Cerro Catedral, usas la funcionalidad de Live Search en el sitio oficial." },
{ "system", $"Hoy es {DateTime.Now.ToString("o")}." },
{ "system",
"""
Web search sources:
https://catedralaltapatagonia.com/parte-de-nieve/
https://catedralaltapatagonia.com/tarifas/
https://catedralaltapatagonia.com/

DO NOT USE https://partediario.catedralaltapatagonia.com/partediario for web search, it's **OBSOLETE**.
"""},
{ "user", "Cuanto cuesta el pase diario en el Catedral hoy?" },
};

var requests = new List<JsonNode>();
var responses = new List<JsonNode>();

var chat = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1",
OpenAIClientOptions.Observable(requests.Add, responses.Add).WriteTo(output));

var options = new ChatOptions
{
Tools = [new WebSearchTool("AR")
{
Region = "Bariloche",
TimeZone = "America/Argentina/Buenos_Aires",
ContextSize = WebSearchContextSize.High
}]
};

var response = await chat.GetResponseAsync(messages, options);
var text = response.Text;

var raw = Assert.IsType<OpenAIResponse>(response.RawRepresentation);
Assert.NotEmpty(raw.OutputItems.OfType<WebSearchCallResponseItem>());

var assistant = raw.OutputItems.OfType<MessageResponseItem>().Where(x => x.Role == MessageRole.Assistant).FirstOrDefault();
Assert.NotNull(assistant);

var content = Assert.Single(assistant.Content);
Assert.NotEmpty(content.OutputTextAnnotations);
Assert.Contains(content.OutputTextAnnotations,
x => x.Kind == ResponseMessageAnnotationKind.UriCitation &&
x.UriCitationUri.StartsWith("https://catedralaltapatagonia.com/tarifas/"));

}
}
4 changes: 2 additions & 2 deletions src/AI.Tests/RetrievalTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ public class RetrievalTests(ITestOutputHelper output)
[InlineData("gpt-4.1-nano", "What's the battery life in an iPhone 15?", true)]
public async Task CanRetrieveContent(string model, string question, bool empty = false)
{
var client = new OpenAI.OpenAIClient(Configuration["OPENAI_API_KEY"]);
var client = new global::OpenAI.OpenAIClient(Configuration["OPENAI_API_KEY"]);
var store = client.GetVectorStoreClient().CreateVectorStore(true);
try
{
var file = client.GetOpenAIFileClient().UploadFile("Content/LNS0004592.md", OpenAI.Files.FileUploadPurpose.Assistants);
var file = client.GetOpenAIFileClient().UploadFile("Content/LNS0004592.md", global::OpenAI.Files.FileUploadPurpose.Assistants);
try
{
client.GetVectorStoreClient().AddFileToVectorStore(store.VectorStoreId, file.Value.Id, true);
Expand Down
7 changes: 4 additions & 3 deletions src/AI.Tests/ToolsTests.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using System.ComponentModel;
using Devlooped.Extensions.AI.OpenAI;
using Microsoft.Extensions.AI;
using static ConfigurationExtensions;

Expand All @@ -18,7 +19,7 @@ public async Task RunToolResult()
};

var client = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1",
OpenAI.OpenAIClientOptions.WriteTo(output))
global::OpenAI.OpenAIClientOptions.WriteTo(output))
.AsBuilder()
.UseFunctionInvocation()
.Build();
Expand Down Expand Up @@ -50,7 +51,7 @@ public async Task RunToolTerminateResult()
};

var client = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1",
OpenAI.OpenAIClientOptions.WriteTo(output))
global::OpenAI.OpenAIClientOptions.WriteTo(output))
.AsBuilder()
.UseFunctionInvocation()
.Build();
Expand Down Expand Up @@ -82,7 +83,7 @@ public async Task RunToolExceptionOutcome()
};

var client = new OpenAIChatClient(Configuration["OPENAI_API_KEY"]!, "gpt-4.1",
OpenAI.OpenAIClientOptions.WriteTo(output))
global::OpenAI.OpenAIClientOptions.WriteTo(output))
.AsBuilder()
.UseFunctionInvocation()
.Build();
Expand Down
18 changes: 13 additions & 5 deletions src/AI/Grok/GrokChatClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
using Microsoft.Extensions.AI;
using OpenAI;

namespace Devlooped.Extensions.AI;
namespace Devlooped.Extensions.AI.Grok;

/// <summary>
/// An <see cref="IChatClient"/> implementation for Grok.
Expand Down Expand Up @@ -64,6 +64,14 @@ IChatClient GetChatClient(string modelId) => clients.GetOrAdd(modelId, model
Mode = search.Value
};
}
else if (tool is null && options.Tools?.OfType<WebSearchTool>().FirstOrDefault() is { } web)
{
searchOptions = new GrokChatWebSearchOptions
{
Mode = GrokSearch.Auto,
Sources = [new GrokWebSource { Country = web.Country }]
};
}
else if (tool is null && options.Tools?.OfType<HostedWebSearchTool>().FirstOrDefault() is not null)
{
searchOptions = new GrokChatWebSearchOptions
Expand Down Expand Up @@ -92,9 +100,9 @@ IChatClient GetChatClient(string modelId) => clients.GetOrAdd(modelId, model
{
result.ReasoningEffortLevel = grok.ReasoningEffort switch
{
ReasoningEffort.High => OpenAI.Chat.ChatReasoningEffortLevel.High,
ReasoningEffort.High => global::OpenAI.Chat.ChatReasoningEffortLevel.High,
// Grok does not support Medium, so we map it to Low too
_ => OpenAI.Chat.ChatReasoningEffortLevel.Low,
_ => global::OpenAI.Chat.ChatReasoningEffortLevel.Low,
};
}

Expand All @@ -111,7 +119,7 @@ void IDisposable.Dispose() { }
// Allows creating the base OpenAIClient with a pre-created pipeline.
class PipelineClient(ClientPipeline pipeline, OpenAIClientOptions options) : OpenAIClient(pipeline, options) { }

class GrokChatWebSearchOptions : OpenAI.Chat.ChatWebSearchOptions
class GrokChatWebSearchOptions : global::OpenAI.Chat.ChatWebSearchOptions
{
public GrokSearch Mode { get; set; } = GrokSearch.Auto;
public DateOnly? FromDate { get; set; }
Expand Down Expand Up @@ -166,7 +174,7 @@ class LowercaseNamingPolicy : JsonNamingPolicy
}
}

class GrokCompletionOptions : OpenAI.Chat.ChatCompletionOptions
class GrokCompletionOptions : global::OpenAI.Chat.ChatCompletionOptions
{
protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions? options)
{
Expand Down
2 changes: 1 addition & 1 deletion src/AI/Grok/GrokChatOptions.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using Microsoft.Extensions.AI;

namespace Devlooped.Extensions.AI;
namespace Devlooped.Extensions.AI.Grok;

/// <summary>
/// Grok-specific chat options that extend the base <see cref="ChatOptions"/>
Expand Down
22 changes: 11 additions & 11 deletions src/AI/Grok/GrokClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
using Microsoft.Extensions.AI;
using OpenAI;

namespace Devlooped.Extensions.AI;
namespace Devlooped.Extensions.AI.Grok;

/// <summary>
/// Provides an OpenAI compability client for Grok. It's recommended you
Expand All @@ -26,7 +26,7 @@ public class GrokClient(string apiKey, OpenAIClientOptions? options = null)
/// Returns an adapter that surfaces an <see cref="IChatClient"/> interface that
/// can be used directly in the <see cref="ChatClientBuilder"/> pipeline builder.
/// </summary>
public override OpenAI.Chat.ChatClient GetChatClient(string model) => new GrokChatClientAdapter(this, model);
public override global::OpenAI.Chat.ChatClient GetChatClient(string model) => new GrokChatClientAdapter(this, model);

static OpenAIClientOptions EnsureEndpoint(OpenAIClientOptions? options)
{
Expand All @@ -39,7 +39,7 @@ static OpenAIClientOptions EnsureEndpoint(OpenAIClientOptions? options)
// OpenAI in MEAI docs. Most typical case would be to just create an <see cref="GrokChatClient"/> directly.
// This throws on any non-IChatClient invoked methods in the AsIChatClient adapter, and
// forwards the IChatClient methods to the GrokChatClient implementation which is cached per client.
class GrokChatClientAdapter(GrokClient client, string model) : OpenAI.Chat.ChatClient, IChatClient
class GrokChatClientAdapter(GrokClient client, string model) : global::OpenAI.Chat.ChatClient, IChatClient
{
void IDisposable.Dispose() { }

Expand All @@ -60,36 +60,36 @@ IAsyncEnumerable<ChatResponseUpdate> IChatClient.GetStreamingResponseAsync(IEnum
=> client.GetChatClientImpl(options?.ModelId ?? model).GetStreamingResponseAsync(messages, options, cancellation);

// These are the only two methods actually invoked by the AsIChatClient adapter from M.E.AI.OpenAI
public override Task<ClientResult<OpenAI.Chat.ChatCompletion>> CompleteChatAsync(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
public override Task<ClientResult<global::OpenAI.Chat.ChatCompletion>> CompleteChatAsync(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)} instead of invoking {nameof(OpenAIClientExtensions.AsIChatClient)} on this instance.");

public override AsyncCollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
public override AsyncCollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)} instead of invoking {nameof(OpenAIClientExtensions.AsIChatClient)} on this instance.");

#region Unsupported

public override ClientResult CompleteChat(BinaryContent? content, RequestOptions? options = null)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

public override ClientResult<OpenAI.Chat.ChatCompletion> CompleteChat(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
public override ClientResult<global::OpenAI.Chat.ChatCompletion> CompleteChat(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

public override ClientResult<OpenAI.Chat.ChatCompletion> CompleteChat(params OpenAI.Chat.ChatMessage[] messages)
public override ClientResult<global::OpenAI.Chat.ChatCompletion> CompleteChat(params global::OpenAI.Chat.ChatMessage[] messages)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

public override Task<ClientResult> CompleteChatAsync(BinaryContent? content, RequestOptions? options = null)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

public override Task<ClientResult<OpenAI.Chat.ChatCompletion>> CompleteChatAsync(params OpenAI.Chat.ChatMessage[] messages)
public override Task<ClientResult<global::OpenAI.Chat.ChatCompletion>> CompleteChatAsync(params global::OpenAI.Chat.ChatMessage[] messages)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

public override CollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(IEnumerable<OpenAI.Chat.ChatMessage>? messages, OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
public override CollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(IEnumerable<global::OpenAI.Chat.ChatMessage>? messages, global::OpenAI.Chat.ChatCompletionOptions? options = null, CancellationToken cancellationToken = default)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

public override CollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(params OpenAI.Chat.ChatMessage[] messages)
public override CollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreaming(params global::OpenAI.Chat.ChatMessage[] messages)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

public override AsyncCollectionResult<OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(params OpenAI.Chat.ChatMessage[] messages)
public override AsyncCollectionResult<global::OpenAI.Chat.StreamingChatCompletionUpdate> CompleteChatStreamingAsync(params global::OpenAI.Chat.ChatMessage[] messages)
=> throw new NotSupportedException($"Consume directly as an {nameof(IChatClient)}.");

#endregion
Expand Down
2 changes: 1 addition & 1 deletion src/AI/Grok/GrokSearchTool.cs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
using System.Text.Json.Serialization;
using Microsoft.Extensions.AI;

namespace Devlooped.Extensions.AI;
namespace Devlooped.Extensions.AI.Grok;

/// <summary>
/// Enables or disables Grok's live search capabilities.
Expand Down
2 changes: 1 addition & 1 deletion src/AI/OpenAI/OpenAIChatClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
using OpenAI;
using OpenAI.Responses;

namespace Devlooped.Extensions.AI;
namespace Devlooped.Extensions.AI.OpenAI;

/// <summary>
/// An <see cref="IChatClient"/> implementation for OpenAI.
Expand Down
Loading
Loading