Skip to content

Commit

Permalink
Utilize the new AddOllamaChatCompletion.
Browse files Browse the repository at this point in the history
Signed-off-by: joshuajpiluden@gmail.com <joshuajpiluden@Gmail.com>
  • Loading branch information
jjosh102 committed Nov 8, 2024
1 parent 74527cd commit 67a2e65
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 133 deletions.
37 changes: 17 additions & 20 deletions src/Open.Blazor.Core/Features/Chat/Chat.razor
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,15 @@
<p>What it is: Think of it as a measure of creativity. </p>
<p>How it works: Lower values make the model's responses more predictable and focused, while higher values make the responses more diverse and creative. </p>
</FluentTooltip>
@if (UseSemanticKernel)
{
<FluentSlider Id="max-token-control" Label="@($"Max Tokens: {_maxTokens}")" @bind-Value="_maxTokens" Min="0" Max="4000" Step="10" Style="margin:5px"/>
<FluentTooltip Anchor="max-token-control"
Delay="300"
Position="TooltipPosition.Start">
<p>What it is: Limits the length of the response. </p>
<p>How it works: It sets a maximum number of words or parts of words the model can generate in its response. </p>
</FluentTooltip>
}

<FluentSlider Id="max-token-control" Label="@($"Max Tokens: {_maxTokens}")" @bind-Value="_maxTokens" Min="0" Max="4000" Step="10" Style="margin:5px"/>
<FluentTooltip Anchor="max-token-control"
Delay="300"
Position="TooltipPosition.Start">
<p>What it is: Limits the length of the response. </p>
<p>How it works: It sets a maximum number of words or parts of words the model can generate in its response. </p>
</FluentTooltip>

<FluentSlider Id="topp-control" Label="@($"Top P: {_topP}")" @bind-Value="_topP" Min="0" Max="2" Step=".1" Style="margin:5px"/>
<FluentTooltip Anchor="topp-control"
Delay="300"
Expand All @@ -54,16 +53,14 @@
<p>What it is: Similar to the presence penalty but focuses on how often words are used. </p>
<p>How it works: It discourages the model from using words too frequently, making the output more diverse. </p>
</FluentTooltip>
@if (UseSemanticKernel)
{
<FluentTextArea Id="system-prompt-control" Label="System Prompt" @bind-Value="_chatSystemPrompt" Cols="150"></FluentTextArea>
<FluentTooltip Anchor="system-prompt-control"
Delay="300"
Position="TooltipPosition.Start">
<p>What it is: Information about the person using the model. </p>
<p>How it works: It might personalize the responses based on the user's identity or preferences. </p>
</FluentTooltip>
}

<FluentTextArea Id="system-prompt-control" Label="System Prompt" @bind-Value="_chatSystemPrompt" Cols="150"></FluentTextArea>
<FluentTooltip Anchor="system-prompt-control"
Delay="300"
Position="TooltipPosition.Start">
<p>What it is: Information about the person using the model. </p>
<p>How it works: It might personalize the responses based on the user's identity or preferences. </p>
</FluentTooltip>


</FluentCard>
Expand Down
18 changes: 5 additions & 13 deletions src/Open.Blazor.Core/Features/Chat/Chat.razor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,6 @@ public partial class Chat : ComponentBase, IDisposable
private double _topP = 1;
private string _userMessage = string.Empty;


[Parameter] public bool UseSemanticKernel { get; set; }


[Inject] private ChatService ChatService { get; set; } = default!;

[Inject] private OllamaService OllamaService { get; set; } = default!;
Expand Down Expand Up @@ -82,7 +78,7 @@ protected override async Task OnInitializedAsync()
}

var defaultModel = _activeOllamaModels.Models.First();
if (UseSemanticKernel) _kernel = ChatService.CreateKernel(defaultModel.Name);
_kernel = ChatService.CreateKernel(defaultModel.Name);

_selectedModel = defaultModel;
_cancellationTokenSource = new CancellationTokenSource();
Expand Down Expand Up @@ -117,12 +113,8 @@ private async Task SendMessage()
var settings = ChatSettings.New(_temperature, _topP, _presencePenalty, _frequencyPenalty, _maxTokens,
default, _chatSystemPrompt);

if (UseSemanticKernel)
await ChatService.StreamChatMessageContentAsync(_kernel, _discourse, OnStreamCompletion, settings,
_cancellationTokenSource.Token);
else
await ChatService.StreamChatMessageContentAsync(_discourse, OnStreamCompletion, settings,
_cancellationTokenSource.Token);
await ChatService.StreamChatMessageContentAsync(_kernel, _discourse, OnStreamCompletion, settings,
_cancellationTokenSource.Token);

_discourse.ChatMessages.Last().IsDoneStreaming = true;
}
Expand Down Expand Up @@ -164,7 +156,7 @@ private void ShowError(string errorMessage)
private void HandleSelectedOptionChanged(OllamaModel selectedModelChanged)
{
_selectedModel = selectedModelChanged;
if (UseSemanticKernel) _kernel = ChatService.CreateKernel(_selectedModel.Name);
_kernel = ChatService.CreateKernel(_selectedModel.Name);
}

private async Task StopChat()
Expand All @@ -175,7 +167,7 @@ private async Task StopChat()
private async Task ScrollToBottom()
{
await JsRuntime.InvokeVoidAsync("ScrollToBottom", "chat-window");
StateHasChanged();
await InvokeAsync(StateHasChanged);
}

private void OnSpeechRecognized(object? sender, SpeechRecognitionEventArgs args)
Expand Down
39 changes: 6 additions & 33 deletions src/Open.Blazor.Core/Features/Chat/ChatService.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Microsoft.Extensions.AI;

using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.SemanticKernel;
Expand All @@ -9,26 +9,19 @@ namespace Open.Blazor.Core.Features.Chat;

internal sealed class ChatService
{
private readonly IChatClient _client;
private readonly Config _config;

public ChatService(Config config)
{
_config = config;
//test
_client = new OllamaChatClient(new Uri(_config.OllamaUrl), "mistral:latest");
}
private readonly Config _config;

public ChatService(Config config) => _config = config;

public Kernel CreateKernel(string model)
{
ArgumentNullException.ThrowIfNull(model);
#pragma warning disable SKEXP0010

var kernelBuilder = Kernel.CreateBuilder()
.AddOpenAIChatCompletion(
.AddOllamaChatCompletion(
model,
new Uri(_config.OllamaUrl),
null);
new Uri(_config.OllamaUrl));

return kernelBuilder.Build();
}
Expand Down Expand Up @@ -58,26 +51,6 @@ public async Task StreamChatMessageContentAsync(Kernel kernel,
await onStreamCompletion.Invoke(completionResult.Content ?? string.Empty);
}
}

public async Task StreamChatMessageContentAsync(Discourse discourse,
Func<string, Task> onStreamCompletion,
ChatSettings chatSettings,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(discourse);
ArgumentNullException.ThrowIfNull(onStreamCompletion);
ArgumentNullException.ThrowIfNull(chatSettings);


var chatOptions = chatSettings.ToChatOptions();
var history = discourse.ToChatMessages();
await foreach (var completionResult in _client.CompleteStreamingAsync(history, chatOptions, cancellationToken))
{
if (cancellationToken.IsCancellationRequested) return;

await onStreamCompletion.Invoke(completionResult.Text ?? string.Empty);
}
}
}

public static class ChatServiceExensions
Expand Down
11 changes: 7 additions & 4 deletions src/Open.Blazor.Core/Open.Blazor.Core.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<NoWarn>SKEXP0010</NoWarn>
<NoWarn>SKEXP0070</NoWarn>
</PropertyGroup>


Expand All @@ -15,12 +17,13 @@
<PackageReference Include="Microsoft.AspNetCore.Components.Web" Version="8.0.10" />
<PackageReference Include="Microsoft.FluentUI.AspNetCore.Components" Version="4.10.2" />
<PackageReference Include="Microsoft.FluentUI.AspNetCore.Components.Icons" Version="4.10.2" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.25.0" />
<PackageReference Include="Microsoft.SemanticKernel.Planners.Handlebars" Version="1.14.1-preview"/>
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Core" Version="1.14.1-alpha"/>
<PackageReference Include="Microsoft.SemanticKernel" Version="1.28.0" />
<PackageReference Include="Microsoft.SemanticKernel.Planners.Handlebars" Version="1.28.0-preview"/>
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Core" Version="1.28.0-alpha"/>
<PackageReference Include="Microsoft.SemanticKernel.Connectors.Ollama" Version="1.28.0-alpha" />
<PackageReference Include="Markdig" Version="0.37.0"/>
<PackageReference Include="Toolbelt.Blazor.SpeechRecognition" Version="1.0.0"/>
<PackageReference Include="Microsoft.Extensions.AI.Ollama" Version="9.0.0-preview.9.24507.7"/>

</ItemGroup>


Expand Down
64 changes: 1 addition & 63 deletions tests/Open.Blazor.Tests/ChatServiceTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -93,67 +93,5 @@ public async Task StreamChatMessageContentAsync_WithChatSettings_ShouldReturnDis
await Assert.ThrowsAsync<TaskCanceledException>(() => task);
}
}


[Fact]
public async Task StreamChatMessageContentAsync_ShouldThrowArgumentNullException_WhenDiscourseIsNull()
{
var service = new ChatService(_config);
var chatSettings = new ChatSettings();
Func<string, Task> onStreamCompletion = async _ => await Task.CompletedTask;

await Assert.ThrowsAsync<ArgumentNullException>(() =>
service.StreamChatMessageContentAsync(null, onStreamCompletion, chatSettings));
}

[Fact]
public async Task StreamChatMessageContentAsync_ShouldThrowArgumentNullException_WhenOnStreamCompletionIsNull()
{
var service = new ChatService(_config);
var discourse = new Discourse();
var chatSettings = new ChatSettings();

await Assert.ThrowsAsync<ArgumentNullException>(() =>
service.StreamChatMessageContentAsync(discourse, null, chatSettings));
}


[Fact]
public async Task StreamChatMessageContentAsync_ShouldReturnWhenCancellationIsRequested()
{
var service = new ChatService(_config);
var discourse = new Discourse();
var chatSettings = new ChatSettings();
Func<string, Task> onStreamCompletion = async _ => await Task.CompletedTask;

using (var cts = new CancellationTokenSource())
{
var task = service.StreamChatMessageContentAsync(discourse, onStreamCompletion, chatSettings, cts.Token);
Assert.False(task.IsCompleted);

cts.Cancel();

await Assert.ThrowsAsync<TaskCanceledException>(() => task);
}
}

[Fact]
public async Task StreamChatMessageContentAsync_ShouldProcessMessagesSuccessfully()
{
var service = new ChatService(_config);
var discourse = new Discourse();
var chatSettings = new ChatSettings();
discourse.AddChatMessage(MessageRole.User, "Hello", Model);
var onStreamCompletionCalled = false;

Func<string, Task> onStreamCompletion = async message =>
{
onStreamCompletionCalled = true;
await Task.CompletedTask;
};

await service.StreamChatMessageContentAsync(discourse, onStreamCompletion, chatSettings);

Assert.True(onStreamCompletionCalled);
}

}

0 comments on commit 67a2e65

Please sign in to comment.