Skip to content

Tweaks for Evaluate AI Responses tests #45072

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions docs/ai/quickstarts/snippets/evaluate-ai-responses/MyTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ public sealed class MyTests
// <SnippetPrivateMembers>
private static ChatConfiguration? s_chatConfiguration;
private static IList<ChatMessage> s_messages = [
new ChatMessage(
new ChatMessage(
ChatRole.System,
"""
You're an AI assistant that can answer questions related to astronomy.
Expand All @@ -28,7 +28,7 @@ Keep your responses concise and try to stay under 100 words.
public static async Task InitializeAsync(TestContext _)
{
/// Set up the <see cref="ChatConfiguration"/>,
// which includes the <see cref="IChatClient"/> that the
/// which includes the <see cref="IChatClient"/> that the
/// evaluator uses to communicate with the model.
s_chatConfiguration = GetOllamaChatConfiguration();

Expand All @@ -39,7 +39,7 @@ public static async Task InitializeAsync(TestContext _)
ResponseFormat = ChatResponseFormat.Text
};

/// Fetch the response to be evaluated
// Fetch the response to be evaluated
// and store it in a static variable.
ChatResponse response = await s_chatConfiguration.ChatClient.GetResponseAsync(s_messages, chatOptions);
s_response = response.Message;
Expand All @@ -49,7 +49,7 @@ public static async Task InitializeAsync(TestContext _)
// <SnippetGetChatConfig>
private static ChatConfiguration GetOllamaChatConfiguration()
{
/// Get a chat client for the Ollama endpoint.
// Get a chat client for the Ollama endpoint.
IChatClient client =
new OllamaChatClient(
new Uri("http://localhost:11434"),
Expand All @@ -72,7 +72,7 @@ public async Task TestCoherence()
/// Retrieve the score for coherence from the <see cref="EvaluationResult"/>.
NumericMetric coherence = result.Get<NumericMetric>(CoherenceEvaluator.CoherenceMetricName);

/// Validate the default interpretation
// Validate the default interpretation
// for the returned coherence metric.
Assert.IsFalse(coherence.Interpretation!.Failed);
Assert.IsTrue(coherence.Interpretation.Rating is EvaluationRating.Good or EvaluationRating.Exceptional);
Expand Down
Loading