diff --git a/src/Open.Blazor.Core/Features/Chat/Chat.razor b/src/Open.Blazor.Core/Features/Chat/Chat.razor
index 81c46fe..3c8e4f9 100644
--- a/src/Open.Blazor.Core/Features/Chat/Chat.razor
+++ b/src/Open.Blazor.Core/Features/Chat/Chat.razor
@@ -23,16 +23,15 @@
What it is: Think of it as a measure of creativity.
How it works: Lower values make the model's responses more predictable and focused, while higher values make the responses more diverse and creative.
- @if (UseSemanticKernel)
- {
-
-
- What it is: Limits the length of the response.
- How it works: It sets a maximum number of words or parts of words the model can generate in its response.
-
- }
+
+
+
+ What it is: Limits the length of the response.
+ How it works: It sets a maximum number of words or parts of words the model can generate in its response.
+
+
What it is: Similar to the presence penalty but focuses on how often words are used.
How it works: It discourages the model from using words too frequently, making the output more diverse.
- @if (UseSemanticKernel)
- {
-
-
- What it is: Information about the person using the model.
- How it works: It might personalize the responses based on the user's identity or preferences.
-
- }
+
+
+
+ What it is: Information about the person using the model.
+ How it works: It might personalize the responses based on the user's identity or preferences.
+
diff --git a/src/Open.Blazor.Core/Features/Chat/Chat.razor.cs b/src/Open.Blazor.Core/Features/Chat/Chat.razor.cs
index 23d9686..17892d7 100644
--- a/src/Open.Blazor.Core/Features/Chat/Chat.razor.cs
+++ b/src/Open.Blazor.Core/Features/Chat/Chat.razor.cs
@@ -37,10 +37,6 @@ public partial class Chat : ComponentBase, IDisposable
private double _topP = 1;
private string _userMessage = string.Empty;
-
- [Parameter] public bool UseSemanticKernel { get; set; }
-
-
[Inject] private ChatService ChatService { get; set; } = default!;
[Inject] private OllamaService OllamaService { get; set; } = default!;
@@ -82,7 +78,7 @@ protected override async Task OnInitializedAsync()
}
var defaultModel = _activeOllamaModels.Models.First();
- if (UseSemanticKernel) _kernel = ChatService.CreateKernel(defaultModel.Name);
+ _kernel = ChatService.CreateKernel(defaultModel.Name);
_selectedModel = defaultModel;
_cancellationTokenSource = new CancellationTokenSource();
@@ -117,12 +113,8 @@ private async Task SendMessage()
var settings = ChatSettings.New(_temperature, _topP, _presencePenalty, _frequencyPenalty, _maxTokens,
default, _chatSystemPrompt);
- if (UseSemanticKernel)
- await ChatService.StreamChatMessageContentAsync(_kernel, _discourse, OnStreamCompletion, settings,
- _cancellationTokenSource.Token);
- else
- await ChatService.StreamChatMessageContentAsync(_discourse, OnStreamCompletion, settings,
- _cancellationTokenSource.Token);
+ await ChatService.StreamChatMessageContentAsync(_kernel, _discourse, OnStreamCompletion, settings,
+ _cancellationTokenSource.Token);
_discourse.ChatMessages.Last().IsDoneStreaming = true;
}
@@ -164,7 +156,7 @@ private void ShowError(string errorMessage)
private void HandleSelectedOptionChanged(OllamaModel selectedModelChanged)
{
_selectedModel = selectedModelChanged;
- if (UseSemanticKernel) _kernel = ChatService.CreateKernel(_selectedModel.Name);
+ _kernel = ChatService.CreateKernel(_selectedModel.Name);
}
private async Task StopChat()
@@ -175,7 +167,7 @@ private async Task StopChat()
private async Task ScrollToBottom()
{
await JsRuntime.InvokeVoidAsync("ScrollToBottom", "chat-window");
- StateHasChanged();
+ await InvokeAsync(StateHasChanged);
}
private void OnSpeechRecognized(object? sender, SpeechRecognitionEventArgs args)
diff --git a/src/Open.Blazor.Core/Features/Chat/ChatService.cs b/src/Open.Blazor.Core/Features/Chat/ChatService.cs
index 3606358..8dd03a9 100644
--- a/src/Open.Blazor.Core/Features/Chat/ChatService.cs
+++ b/src/Open.Blazor.Core/Features/Chat/ChatService.cs
@@ -1,4 +1,4 @@
-using Microsoft.Extensions.AI;
+
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.SemanticKernel;
@@ -9,26 +9,19 @@ namespace Open.Blazor.Core.Features.Chat;
internal sealed class ChatService
{
- private readonly IChatClient _client;
- private readonly Config _config;
- public ChatService(Config config)
- {
- _config = config;
- //test
- _client = new OllamaChatClient(new Uri(_config.OllamaUrl), "mistral:latest");
- }
+ private readonly Config _config;
+ public ChatService(Config config) => _config = config;
+
public Kernel CreateKernel(string model)
{
ArgumentNullException.ThrowIfNull(model);
-#pragma warning disable SKEXP0010
var kernelBuilder = Kernel.CreateBuilder()
- .AddOpenAIChatCompletion(
+ .AddOllamaChatCompletion(
model,
- new Uri(_config.OllamaUrl),
- null);
+ new Uri(_config.OllamaUrl));
return kernelBuilder.Build();
}
@@ -58,26 +51,6 @@ public async Task StreamChatMessageContentAsync(Kernel kernel,
await onStreamCompletion.Invoke(completionResult.Content ?? string.Empty);
}
}
-
- public async Task StreamChatMessageContentAsync(Discourse discourse,
- Func onStreamCompletion,
- ChatSettings chatSettings,
- CancellationToken cancellationToken = default)
- {
- ArgumentNullException.ThrowIfNull(discourse);
- ArgumentNullException.ThrowIfNull(onStreamCompletion);
- ArgumentNullException.ThrowIfNull(chatSettings);
-
-
- var chatOptions = chatSettings.ToChatOptions();
- var history = discourse.ToChatMessages();
- await foreach (var completionResult in _client.CompleteStreamingAsync(history, chatOptions, cancellationToken))
- {
- if (cancellationToken.IsCancellationRequested) return;
-
- await onStreamCompletion.Invoke(completionResult.Text ?? string.Empty);
- }
- }
}
public static class ChatServiceExensions
diff --git a/src/Open.Blazor.Core/Open.Blazor.Core.csproj b/src/Open.Blazor.Core/Open.Blazor.Core.csproj
index 992460d..caea2ba 100644
--- a/src/Open.Blazor.Core/Open.Blazor.Core.csproj
+++ b/src/Open.Blazor.Core/Open.Blazor.Core.csproj
@@ -4,6 +4,8 @@
net8.0
enable
enable
+ SKEXP0010
+ SKEXP0070
@@ -15,12 +17,13 @@
-
-
-
+
+
+
+
-
+
diff --git a/tests/Open.Blazor.Tests/ChatServiceTest.cs b/tests/Open.Blazor.Tests/ChatServiceTest.cs
index 4cbc554..77d8b28 100644
--- a/tests/Open.Blazor.Tests/ChatServiceTest.cs
+++ b/tests/Open.Blazor.Tests/ChatServiceTest.cs
@@ -93,67 +93,5 @@ public async Task StreamChatMessageContentAsync_WithChatSettings_ShouldReturnDis
await Assert.ThrowsAsync(() => task);
}
}
-
-
- [Fact]
- public async Task StreamChatMessageContentAsync_ShouldThrowArgumentNullException_WhenDiscourseIsNull()
- {
- var service = new ChatService(_config);
- var chatSettings = new ChatSettings();
- Func onStreamCompletion = async _ => await Task.CompletedTask;
-
- await Assert.ThrowsAsync(() =>
- service.StreamChatMessageContentAsync(null, onStreamCompletion, chatSettings));
- }
-
- [Fact]
- public async Task StreamChatMessageContentAsync_ShouldThrowArgumentNullException_WhenOnStreamCompletionIsNull()
- {
- var service = new ChatService(_config);
- var discourse = new Discourse();
- var chatSettings = new ChatSettings();
-
- await Assert.ThrowsAsync(() =>
- service.StreamChatMessageContentAsync(discourse, null, chatSettings));
- }
-
-
- [Fact]
- public async Task StreamChatMessageContentAsync_ShouldReturnWhenCancellationIsRequested()
- {
- var service = new ChatService(_config);
- var discourse = new Discourse();
- var chatSettings = new ChatSettings();
- Func onStreamCompletion = async _ => await Task.CompletedTask;
-
- using (var cts = new CancellationTokenSource())
- {
- var task = service.StreamChatMessageContentAsync(discourse, onStreamCompletion, chatSettings, cts.Token);
- Assert.False(task.IsCompleted);
-
- cts.Cancel();
-
- await Assert.ThrowsAsync(() => task);
- }
- }
-
- [Fact]
- public async Task StreamChatMessageContentAsync_ShouldProcessMessagesSuccessfully()
- {
- var service = new ChatService(_config);
- var discourse = new Discourse();
- var chatSettings = new ChatSettings();
- discourse.AddChatMessage(MessageRole.User, "Hello", Model);
- var onStreamCompletionCalled = false;
-
- Func onStreamCompletion = async message =>
- {
- onStreamCompletionCalled = true;
- await Task.CompletedTask;
- };
-
- await service.StreamChatMessageContentAsync(discourse, onStreamCompletion, chatSettings);
-
- Assert.True(onStreamCompletionCalled);
- }
+
}
\ No newline at end of file