diff --git a/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/.template.config/template.json b/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/.template.config/template.json index 8ef67218de8..cd88e30dfc1 100644 --- a/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/.template.config/template.json +++ b/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/.template.config/template.json @@ -180,7 +180,7 @@ "type": "generated", "generator": "constant", "parameters": { - "value": "llama3.1" + "value": "llama3.2" } }, "OllamaEmbeddingModelDefault": { diff --git a/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/Chat.razor b/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/Chat.razor index 83ed600af4d..8592a767ad0 100644 --- a/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/Chat.razor +++ b/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/Chat.razor @@ -24,8 +24,12 @@ Use only simple markdown to format your responses. Use the search tool to find relevant information. When you do this, end your - reply with citations in the special format, always formatted as XML: - exact quote here. + reply with citations in the special XML format: + + exact quote here + + Always include the citation in your response if there are results. + The quote must be max 5 words, taken word-for-word from the search result, and is the basis for why the citation is relevant. Don't refer to the presence of citations; just emit these tags right at the end, with no surrounding text. "; @@ -52,14 +56,14 @@ chatSuggestions?.Clear(); await chatInput!.FocusAsync(); -#if (IsOllama) +@*#if (IsOllama) // Display a new response from the IChatClient, streaming responses // aren't supported because Ollama will not support both streaming and using Tools currentResponseCancellation = new(); ChatCompletion response = await ChatClient.CompleteAsync(messages, chatOptions, currentResponseCancellation.Token); currentResponseMessage = response.Message; ChatMessageItem.NotifyChanged(currentResponseMessage); -#else +#else*@ // Stream and display a new response from the IChatClient var responseText = new TextContent(""); currentResponseMessage = new ChatMessage(ChatRole.Assistant, [responseText]); @@ -69,7 +73,7 @@ responseText.Text += chunk.Text; ChatMessageItem.NotifyChanged(currentResponseMessage); } -#endif +@*#endif*@ // Store the final response in the conversation, and begin getting suggestions messages.Add(currentResponseMessage!); diff --git a/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/ChatInput.razor b/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/ChatInput.razor index 1bc1cd08cec..e87ac6ccf47 100644 --- a/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/ChatInput.razor +++ b/src/ProjectTemplates/Microsoft.Extensions.AI.Templates/src/ChatWithCustomData/ChatWithCustomData.Web/Components/Pages/Chat/ChatInput.razor @@ -5,13 +5,6 @@
- -