Skip to content

Commit 9381c8d

Browse files
authored
.Net: Update KernelFunctionInvokingChatClient as a specialization of MEAI.FunctionInvokingChatClient (#11678)
### Motivation and Context - Resolves #11628 - dotnet/extensions#6325 This PR introduces changes recently merged to MEAI which allow flexibility and reusage from the higher level implementation perspective of Semantic Kernel. - KernelFunctionInvokingChatClient : MEAI.FunctionInvokingChatClient - AutoInvocationFunctionContext : MEAI.FunctionInvocationContext - KernelArguments : MEAI.AIFunctionArguments FYI @stephentoub @SteveSandersonMS
1 parent baf8250 commit 9381c8d

File tree

11 files changed

+648
-1198
lines changed

11 files changed

+648
-1198
lines changed

dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs

Lines changed: 14 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -155,16 +155,16 @@ public static IServiceCollection AddOllamaChatCompletion(
155155
{
156156
var loggerFactory = serviceProvider.GetService<ILoggerFactory>();
157157

158-
var builder = ((IChatClient)new OllamaApiClient(endpoint, modelId))
159-
.AsBuilder()
160-
.UseFunctionInvocation(loggerFactory, config => config.MaximumIterationsPerRequest = MaxInflightAutoInvokes);
158+
var ollamaClient = (IChatClient)new OllamaApiClient(endpoint, modelId);
161159

162160
if (loggerFactory is not null)
163161
{
164-
builder.UseLogging(loggerFactory);
162+
ollamaClient.AsBuilder().UseLogging(loggerFactory).Build();
165163
}
166164

167-
return builder.Build(serviceProvider).AsChatCompletionService(serviceProvider);
165+
return ollamaClient
166+
.AsKernelFunctionInvokingChatClient(loggerFactory)
167+
.AsChatCompletionService();
168168
});
169169
}
170170

@@ -190,16 +190,16 @@ public static IServiceCollection AddOllamaChatCompletion(
190190

191191
var loggerFactory = serviceProvider.GetService<ILoggerFactory>();
192192

193-
var builder = ((IChatClient)new OllamaApiClient(httpClient, modelId))
194-
.AsBuilder()
195-
.UseFunctionInvocation(loggerFactory, config => config.MaximumIterationsPerRequest = MaxInflightAutoInvokes);
193+
var ollamaClient = (IChatClient)new OllamaApiClient(httpClient, modelId);
196194

197195
if (loggerFactory is not null)
198196
{
199-
builder.UseLogging(loggerFactory);
197+
ollamaClient.AsBuilder().UseLogging(loggerFactory).Build();
200198
}
201199

202-
return builder.Build(serviceProvider).AsChatCompletionService(serviceProvider);
200+
return ollamaClient
201+
.AsKernelFunctionInvokingChatClient(loggerFactory)
202+
.AsChatCompletionService();
203203
});
204204
}
205205

@@ -231,15 +231,16 @@ public static IServiceCollection AddOllamaChatCompletion(
231231
}
232232

233233
var builder = ((IChatClient)ollamaClient)
234-
.AsBuilder()
235-
.UseFunctionInvocation(loggerFactory, config => config.MaximumIterationsPerRequest = MaxInflightAutoInvokes);
234+
.AsKernelFunctionInvokingChatClient(loggerFactory)
235+
.AsBuilder();
236236

237237
if (loggerFactory is not null)
238238
{
239239
builder.UseLogging(loggerFactory);
240240
}
241241

242-
return builder.Build(serviceProvider).AsChatCompletionService(serviceProvider);
242+
return builder.Build(serviceProvider)
243+
.AsChatCompletionService(serviceProvider);
243244
});
244245
}
245246

@@ -355,26 +356,4 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
355356
}
356357

357358
#endregion
358-
359-
#region Private
360-
361-
/// <summary>
362-
/// The maximum number of auto-invokes that can be in-flight at any given time as part of the current
363-
/// asynchronous chain of execution.
364-
/// </summary>
365-
/// <remarks>
366-
/// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that
367-
/// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself,
368-
/// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close
369-
/// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution.
370-
/// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in
371-
/// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that
372-
/// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that,
373-
/// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent
374-
/// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made
375-
/// configurable should need arise.
376-
/// </remarks>
377-
private const int MaxInflightAutoInvokes = 128;
378-
379-
#endregion
380359
}

dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterChatClientTests.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -736,7 +736,7 @@ public void Dispose()
736736
private static object? GetLastFunctionResultFromChatResponse(ChatResponse chatResponse)
737737
{
738738
Assert.NotEmpty(chatResponse.Messages);
739-
var chatMessage = chatResponse.Messages[^1];
739+
var chatMessage = chatResponse.Messages.Where(m => m.Role == ChatRole.Tool).Last();
740740

741741
Assert.NotEmpty(chatMessage.Contents);
742742
Assert.Contains(chatMessage.Contents, c => c is Microsoft.Extensions.AI.FunctionResultContent);

dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
using System.Threading.Tasks;
1414
using Microsoft.SemanticKernel.Http;
1515

16+
#pragma warning disable CA1859 // Use concrete types when possible for improved performance
17+
1618
namespace Microsoft.SemanticKernel.Plugins.OpenApi;
1719

1820
/// <summary>

dotnet/src/IntegrationTests/Plugins/Core/SessionsPythonPluginTests.cs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,19 @@
11
// Copyright (c) Microsoft. All rights reserved.
22

33
using System;
4-
using System.Threading.Tasks;
5-
using Xunit;
6-
using Microsoft.SemanticKernel.Plugins.Core.CodeInterpreter;
7-
using Microsoft.Extensions.Configuration;
8-
using SemanticKernel.IntegrationTests.TestSettings;
4+
using System.Collections.Generic;
95
using System.Net.Http;
10-
using Azure.Identity;
6+
using System.Threading.Tasks;
117
using Azure.Core;
12-
using System.Collections.Generic;
13-
using Microsoft.SemanticKernel;
8+
using Azure.Identity;
9+
using Microsoft.Extensions.Configuration;
1410
using Microsoft.Extensions.DependencyInjection;
11+
using Microsoft.SemanticKernel;
1512
using Microsoft.SemanticKernel.ChatCompletion;
1613
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
14+
using Microsoft.SemanticKernel.Plugins.Core.CodeInterpreter;
15+
using SemanticKernel.IntegrationTests.TestSettings;
16+
using Xunit;
1717

1818
namespace SemanticKernel.IntegrationTests.Plugins.Core;
1919

dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ internal sealed class FunctionCallsProcessor
5151
/// will be disabled. This is a safeguard against possible runaway execution if the model routinely re-requests
5252
/// the same function over and over.
5353
/// </remarks>
54-
private const int MaximumAutoInvokeAttempts = 128;
54+
internal const int MaximumAutoInvokeAttempts = 128;
5555

5656
/// <summary>Tracking <see cref="AsyncLocal{Int32}"/> for <see cref="MaxInflightAutoInvokes"/>.</summary>
5757
/// <remarks>

dotnet/src/SemanticKernel.Abstractions/AI/ChatClient/ChatOptionsExtensions.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ namespace Microsoft.SemanticKernel.ChatCompletion;
1414
internal static class ChatOptionsExtensions
1515
{
1616
internal const string KernelKey = "AutoInvokingKernel";
17-
internal const string IsStreamingKey = "AutoInvokingIsStreaming";
1817
internal const string ChatMessageContentKey = "AutoInvokingChatCompletionContent";
1918
internal const string PromptExecutionSettingsKey = "AutoInvokingPromptExecutionSettings";
2019

0 commit comments

Comments
 (0)