Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ private static void ProcessUpdate(ChatResponseUpdate update, ChatResponse respon
// response ID than the newest update, create a new message.
ChatMessage message;
if (response.Messages.Count == 0 ||
(update.ResponseId is string updateId && response.ResponseId is string responseId && updateId != responseId))
(update.ResponseId is { Length: > 0 } updateId && response.ResponseId is string responseId && updateId != responseId))
{
message = new ChatMessage(ChatRole.Assistant, []);
response.Messages.Add(message);
Expand Down Expand Up @@ -213,7 +213,7 @@ private static void ProcessUpdate(ChatResponseUpdate update, ChatResponse respon
// Other members on a ChatResponseUpdate map to members of the ChatResponse.
// Update the response object with those, preferring the values from later updates.

if (update.ResponseId is not null)
if (update.ResponseId is { Length: > 0 })
{
// Note that this must come after the message checks earlier, as they depend
// on this value for change detection.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,12 @@ public IList<AIContent> Contents
public AdditionalPropertiesDictionary? AdditionalProperties { get; set; }

/// <summary>Gets or sets the ID of the response of which this update is a part.</summary>
/// <remarks>
/// This value is used when <see cref="ChatResponseExtensions.ToChatResponseAsync(IAsyncEnumerable{ChatResponseUpdate}, System.Threading.CancellationToken)"/>
/// groups <see cref="ChatResponseUpdate"/> instances into <see cref="ChatMessage"/> instances.
/// The value must be unique to each call to the underlying provider, and must be shared by
/// all updates that are part of the same response.
/// </remarks>
public string? ResponseId { get; set; }

/// <summary>Gets or sets the chat thread ID associated with the chat response of which this update is a part.</summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,9 @@ public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
await OllamaUtilities.ThrowUnsuccessfulOllamaResponseAsync(httpResponse, cancellationToken).ConfigureAwait(false);
}

// Ollama doesn't set a response ID on streamed chunks, so we need to generate one.
var responseId = Guid.NewGuid().ToString("N");

using var httpResponseStream = await httpResponse.Content
#if NET
.ReadAsStreamAsync(cancellationToken)
Expand Down Expand Up @@ -160,7 +163,7 @@ public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
CreatedAt = DateTimeOffset.TryParse(chunk.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null,
FinishReason = ToFinishReason(chunk),
ModelId = modelId,
ResponseId = chunk.CreatedAt,
ResponseId = responseId,
Role = chunk.Message?.Role is not null ? new ChatRole(chunk.Message.Role) : null,
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public async Task ToChatResponse_SuccessfullyCreatesResponse(bool useAsync)

Assert.Equal("123", response.ChatThreadId);

ChatMessage message = response.Messages.Last();
ChatMessage message = response.Messages.Single();
Assert.Equal(new ChatRole("human"), message.Role);
Assert.Equal("Someone", message.AuthorName);
Assert.Null(message.AdditionalProperties);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,25 @@ public virtual async Task GetStreamingResponseAsync_UsageDataAvailable()
Assert.Equal(usage.Details.InputTokenCount + usage.Details.OutputTokenCount, usage.Details.TotalTokenCount);
}

[ConditionalFact]
public virtual async Task GetStreamingResponseAsync_AppendToHistory()
{
SkipIfNotEnabled();

List<ChatMessage> history = [new(ChatRole.User, "Explain in 100 words how AI works")];

var streamingResponse = _chatClient.GetStreamingResponseAsync(history);

Assert.Single(history);
await history.AddMessagesAsync(streamingResponse);
Assert.Equal(2, history.Count);
Assert.Equal(ChatRole.Assistant, history[1].Role);

var singleTextContent = (TextContent)history[1].Contents.Single();
Assert.NotEmpty(singleTextContent.Text);
Assert.Equal(history[1].Text, singleTextContent.Text);
}

protected virtual string? GetModel_MultiModal_DescribeImage() => null;

[ConditionalFact]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,11 +171,12 @@ public async Task BasicRequestResponse_Streaming()
using IChatClient client = new OllamaChatClient("http://localhost:11434", "llama3.1", httpClient);

List<ChatResponseUpdate> updates = [];
await foreach (var update in client.GetStreamingResponseAsync("hello", new()
var streamingResponse = client.GetStreamingResponseAsync("hello", new()
{
MaxOutputTokens = 20,
Temperature = 0.5f,
}))
});
await foreach (var update in streamingResponse)
{
updates.Add(update);
}
Expand All @@ -201,6 +202,10 @@ public async Task BasicRequestResponse_Streaming()
Assert.Equal(11, usage.Details.InputTokenCount);
Assert.Equal(20, usage.Details.OutputTokenCount);
Assert.Equal(31, usage.Details.TotalTokenCount);

var chatResponse = await streamingResponse.ToChatResponseAsync();
Assert.Single(Assert.Single(chatResponse.Messages).Contents);
Assert.Equal("Hello! How are you today? Is there something I can help you with or would you like to", chatResponse.Text);
}

[Fact]
Expand Down
Loading