diff --git a/blazor/smart-paste/claude-service.md b/blazor/smart-paste/claude-service.md new file mode 100644 index 0000000000..fabb849df1 --- /dev/null +++ b/blazor/smart-paste/claude-service.md @@ -0,0 +1,217 @@ +--- +layout: post +title: Custom AI Service with Syncfusion Smart Components in Blazor Web App +description: Learn how to integrate and use the Syncfusion component in a Blazor Web App with Claude AI services. +platform: Blazor +control: Smart Paste Button +documentation: ug +--- + +# Getting Started with Smart Components using Claude AI + +This guide walks you through integrating Anthropic's Claude AI with Syncfusion Smart Components in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* Claude account and API key (see setup instructions below) + +### Setting Up Claude + +1. **Create an Anthropic Account** + * Visit [Anthropic Console](https://console.anthropic.com) + * Sign up for a new account + +2. **Get Your API Key** + * Navigate to [API Keys](https://console.anthropic.com/settings/keys) + * Click "Create Key" + +### Models + +For detailed specifications and capabilities, visit the [Claude Models Documentation](https://docs.anthropic.com/claude/docs/models-overview). + +--- + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart Components to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-paste/getting-started) +2. [Configure form annotations for better AI understanding](https://blazor.syncfusion.com/documentation/smart-paste/annotation) +3. [Customize the appearance and behavior of Smart Components](https://blazor.syncfusion.com/documentation/smart-paste/customization) + +--- + +## Step 1: Create a Claude AI Service + +In this step, we'll create a service that handles all communication with Claude's AI. This service is to: + +* Manage secure API authentication +* Send prompts to Claude's models +* Process AI responses + +### Implementation Steps + +1. Create a new file named `ClaudeAIService.cs` +2. Add the required namespaces for HTTP and JSON handling +3. Implement the service class following the code below + +```csharp +using Microsoft.Extensions.AI; +using System.Net; +using System.Text; +using System.Text.Json; +public class ClaudeAIService +{ + private const string ApiKey = "Your API Key"; + private const string ModelName = "Your Model Name"; + private const string Endpoint = "https://api.anthropic.com/v1/messages"; + + private static readonly HttpClient HttpClient = new(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = HttpVersion.Version30 + }; + + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public ClaudeAIService() + { + if (!HttpClient.DefaultRequestHeaders.Contains("x-api-key")) + { + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("x-api-key", ApiKey); + HttpClient.DefaultRequestHeaders.Add("anthropic-version", "2023-06-01"); + } + } + + public async Task CompleteAsync(IList chatMessages) + { + + var requestBody = new ClaudeChatRequest + { + Model = ModelName, + Max_tokens = 1000, + Messages = chatMessages.Select(m => new ClaudeMessage + { + Role = m.Role == ChatRole.User ? "user" : "assistant", + Content = m.Text + }).ToList(), + Stop_sequences = new() { "END_INSERTION", "NEED_INFO", "END_RESPONSE" } + }; + + var json = JsonSerializer.Serialize(requestBody, JsonOptions); + var content = new StringContent(json, Encoding.UTF8, "application/json"); + + try + { + var response = await HttpClient.PostAsync(Endpoint, content); + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync(); + var responseObject = JsonSerializer.Deserialize(responseString, JsonOptions); + + return responseObject?.Content?.FirstOrDefault()?.Text ?? "No response from Claude model."; + } + catch (Exception ex) when (ex is HttpRequestException || ex is JsonException) + { + throw new InvalidOperationException("Failed to communicate with Claude API.", ex); + } + } +} +``` + +## Step 2: Define Request and Response Models + +To effectively communicate with Claude's API, we need to define structured models for our requests and responses. These models ensure type safety and make the code more maintainable. + +### Request Models +The request models define the structure of data we send to Claude. + +### Response Models +The response models handle Claude's API responses. + +```CSharp +public class ClaudeChatRequest +{ + public string Model { get; set; } + public int Max_tokens { get; set; } + public List Messages { get; set; } + public List Stop_sequences { get; set; } +} + +public class ClaudeMessage +{ + public string Role { get; set; } // "user" or "assistant" + public string Content { get; set; } +} + +// Claude response format +public class ClaudeChatResponse +{ + public List Content { get; set; } +} + +public class ClaudeContentBlock +{ + public string Text { get; set; } +} +``` + +## Step 3: Create a Custom AI Service + +To integrate Claude with Syncfusion Smart Components, you need to implement the `IInferenceBackend` interface. This interface acts as a bridge between Syncfusion's components and Claude's AI capabilities. + +The `IInferenceBackend` interface is designed to allow custom AI service implementations. It defines the contract for how Syncfusion components communicate with AI services: + +1. Create a new file named `MyCustomService.cs` +2. Add the Syncfusion namespace +3. Implement the interface as shown below + +```CSharp +using Syncfusion.Blazor.SmartComponents; +public class MyCustomService : IInferenceBackend +{ + private readonly ClaudeService _ClaudeService; + + public MyCustomService(ClaudeAIService ClaudeService) + { + _ClaudeService = ClaudeService; + } + + public Task GetChatResponseAsync(ChatParameters options) + { + return _ClaudeService.CompleteAsync(options.Messages); + } +} +``` + + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the Claude AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` \ No newline at end of file diff --git a/blazor/smart-paste/custom-inference-backend.md b/blazor/smart-paste/custom-inference-backend.md new file mode 100644 index 0000000000..624aa5c598 --- /dev/null +++ b/blazor/smart-paste/custom-inference-backend.md @@ -0,0 +1,52 @@ +--- +layout: post +title: Custom AI Service Integration with Smart Components +description: Learn how to use IInferenceBackend to integrate custom AI services with Syncfusion Smart Components +platform: Blazor +control: Smart Paste Button +documentation: ug +--- + +# Custom AI Service Integration + +## Overview + +Syncfusion Smart Components provide built-in support for OpenAI and Azure OpenAI services. However, you can also integrate other AI services using the `IInferenceBackend` interface, which acts as a bridge between Smart Components and your custom AI service. + + +## IInferenceBackend Interface + +The `IInferenceBackend` interface defines a simple contract for AI service integration: + +```csharp +public interface IInferenceBackend +{ + Task GetChatResponseAsync(ChatParameters options); +} +``` + +This interface enables: +- Consistent communication between components and AI services +- Easy switching between different AI providers + + +## Implemented AI Services + +Here are examples of AI services integrated using the `IInferenceBackend` interface: + +| Service | Description | Documentation | +|---------|-------------|---------------| +| Claude | Anthropic's Claude AI | [Claude Integration](claude-service.md) | +| DeepSeek | DeepSeek's AI platform | [DeepSeek Integration](deepseek-service.md) | +| Groq | Groq inference engine | [Groq Integration](groq-service.md) | +| Gemini | Google's Gemini AI | [Gemini Integration](gemini-service.md) | + + +## Service Registration + +Register your custom implementation in `Program.cs`: + +```csharp +builder.Services.AddSingleton(); +``` + diff --git a/blazor/smart-paste/deepseek-service.md b/blazor/smart-paste/deepseek-service.md new file mode 100644 index 0000000000..b23ea5dd35 --- /dev/null +++ b/blazor/smart-paste/deepseek-service.md @@ -0,0 +1,201 @@ +--- +layout: post +title: Custom AI Service with Syncfusion Smart Components in Blazor Web App +description: Learn how to integrate and use the Syncfusion component in a Blazor Web App with DeepSeek AI services. +platform: Blazor +control: Smart Paste Button +documentation: ug +--- + +# Getting Started with Smart Components using DeepSeek AI + +This guide demonstrates how to integrate DeepSeek's powerful AI capabilities with Syncfusion Smart Components in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* DeepSeek account and API key (see setup instructions below) + +### Setting Up DeepSeek + +1. **DeepSeek API Access** + * Create a DeepSeek account at [platform.deepseek.com](https://platform.deepseek.com) + * Navigate to [API Keys](https://platform.deepseek.com/api_keys) + +2. **DeepSeek Models** + + For detailed specifications and pricing, visit the [DeepSeek Models Documentation](https://api-docs.deepseek.com/quick_start/pricing). + + +--- + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart Components to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-paste/getting-started) +2. [Configure form annotations for better AI understanding](https://blazor.syncfusion.com/documentation/smart-paste/annotation) +3. [Customize the appearance and behavior of Smart Components](https://blazor.syncfusion.com/documentation/smart-paste/customization) + +--- + +## Step 1: Create a DeepSeek AI Service + +The `DeepSeekAIService` class is responsible for managing all interactions with the DeepSeek API. This service: + +### Implementation Steps + +1. Create a new file named `DeepSeekAIService.cs` +2. Add the following using statements for required dependencies +3. Implement the service class as shown below + +```csharp +using System.Text; +using System.Text.Json; +using System.Net; +using Microsoft.Extensions.AI; +public class DeepSeekAIService +{ + private const string ApiKey = "Your API Key"; + private const string ModelName = "Your Model Name"; + private const string Endpoint = "https://api.deepseek.com/v1/chat/completions"; + + private static readonly HttpClient HttpClient = new(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = HttpVersion.Version30 + }; + + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public DeepSeekAIService() + { + if (!HttpClient.DefaultRequestHeaders.Contains("Authorization")) + { + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {ApiKey}"); + } + } + + public async Task CompleteAsync(IList chatMessages) + { + var requestBody = new DeepSeekChatRequest + { + Model = ModelName, + Temperature = 0.7f, + Messages = chatMessages.Select(m => new DeepSeekMessage + { + Role = m.Role == ChatRole.User ? "user" : "system", + Content = m.Text + }).ToList() + }; + + + var json = JsonSerializer.Serialize(requestBody, JsonOptions); + var content = new StringContent(json, Encoding.UTF8, "application/json"); + + try + { + var response = await HttpClient.PostAsync(Endpoint, content); + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync(); + var responseObject = JsonSerializer.Deserialize(responseString, JsonOptions); + + return responseObject?.Choices?.FirstOrDefault()?.Message?.Content ?? "No response from DeepSeek."; + } + catch (Exception ex) when (ex is HttpRequestException || ex is JsonException) + { + + throw new InvalidOperationException("Failed to communicate with DeepSeek API.", ex); + } + } +} +``` + +## Step 2: Define Request and Response Models + +To effectively communicate with DeepSeek's API, we need to create strongly-typed models that represent the request and response data structures. + +Create a new file named `DeepSeekModels.cs` with the following models: + +```CSharp +public class DeepSeekMessage +{ + public string Role { get; set; } + public string Content { get; set; } +} + +public class DeepSeekChatRequest +{ + public string Model { get; set; } + public float Temperature { get; set; } + public List Messages { get; set; } +} + +public class DeepSeekChatResponse +{ + public List Choices { get; set; } +} + +public class DeepSeekChoice +{ + public DeepSeekMessage Message { get; set; } +} +``` + +## Step 3: Create a Custom AI Service + +To integrate DeepSeek with Syncfusion Smart Components, we'll create a custom implementation of the `IInferenceBackend` interface. This interface acts as a bridge between Syncfusion components and your AI service. + +The `IInferenceBackend` interface is the bridge between Syncfusion Smart Components and AI services: + +1. Create a new file named `MyCustomService.cs` +2. Add the following implementation: + +```csharp +using Syncfusion.Blazor.SmartComponents; +public class MyCustomService : IInferenceBackend +{ + private readonly DeepSeekAIService _DeepSeekService; + + public MyCustomService(DeepSeekAIService DeepSeekService) + { + _DeepSeekService = DeepSeekService; + } + + public Task GetChatResponseAsync(ChatParameters options) + { + return _DeepSeekService.CompleteAsync(options.Messages); + } +} +``` + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the DeepSeek AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` diff --git a/blazor/smart-paste/gemini-service.md b/blazor/smart-paste/gemini-service.md new file mode 100644 index 0000000000..72849c968e --- /dev/null +++ b/blazor/smart-paste/gemini-service.md @@ -0,0 +1,281 @@ +--- +layout: post +title: Gemini AI with Syncfusion Smart Components in Blazor App | Syncfusion +description: Learn how to implement a custom AI service using Google's Gemini API with Syncfusion Smart Components in a Blazor App. +control: Smart Paste Button +documentation: ug +--- + +# Getting Started with Smart Components using Gemini AI Service + +This guide provides step-by-step instructions for integrating and using Syncfusion's Smart Components with Gemini AI services in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* [Gemini API Key](https://ai.google.dev/gemini-api/docs/api-key) - Obtain an API key from Google AI Studio + + +## Models + +For a complete list of models and their capabilities, visit the [Gemini Models Documentation](https://ai.google.dev/gemini-api/docs/models). + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart Components to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-paste/getting-started) +2. [Configure form annotations for better AI understanding](https://blazor.syncfusion.com/documentation/smart-paste/annotation) +3. [Customize the appearance and behavior of Smart Components](https://blazor.syncfusion.com/documentation/smart-paste/customization) + +--- + +## Step 1: Create a Gemini AI Service + +The `GeminiService` class serves as the foundation for integrating Gemini AI into your Blazor application. This service manages: + +* API communication with Gemini endpoints +* Request/response handling +* Message formatting +* Safety settings configuration + +### Implementation Steps + +1. Create a new class file named `GeminiService.cs` in your project +2. Add the following implementation: + +```csharp +public class GeminiService +{ + // HTTP client configuration for optimal performance + private static readonly Version _httpVersion = HttpVersion.Version30; + private static readonly HttpClient HttpClient = new HttpClient(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = _httpVersion + }; + + // Configuration settings + private const string ApiKey = "YOUR_API_KEY_HERE"; + private const string ModelName = "YOUR_MODEL_NAME"; + + // JSON serialization settings + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public GeminiService() + { + // Set up authentication headers + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("x-goog-api-key", ApiKey); + } + + // Main method for interacting with Gemini API + public async Task CompleteAsync(IList chatMessages) + { + // Construct the API endpoint URL + var requestUri = $"https://generativelanguage.googleapis.com/v1beta/models/{ModelName}:generateContent"; + + // Prepare the request parameters + var parameters = BuildGeminiChatParameters(chatMessages); + var payload = new StringContent( + JsonSerializer.Serialize(parameters, JsonOptions), + Encoding.UTF8, + "application/json" + ); + + try + { + // Send request and process response + using var response = await HttpClient.PostAsync(requestUri, payload); + response.EnsureSuccessStatusCode(); + + var json = await response.Content.ReadAsStringAsync(); + var result = JsonSerializer.Deserialize(json, JsonOptions); + + // Extract and return the generated text + return result?.Candidates?.FirstOrDefault()?.Content?.Parts?.FirstOrDefault()?.Text + ?? "No response from model."; + } + catch (Exception ex) when (ex is HttpRequestException or JsonException) + { + throw new InvalidOperationException("Gemini API error.", ex); + } + } + + private GeminiChatParameters BuildGeminiChatParameters(IList messages) + { + // Convert chat messages to Gemini's format + var contents = messages.Select(m => new ResponseContent( + m.Text, + m.Role == ChatRole.User ? "user" : "model" + )).ToList(); + + // Configure request parameters including safety settings + return new GeminiChatParameters + { + Contents = contents, + GenerationConfig = new GenerationConfig + { + MaxOutputTokens = 2000, + StopSequences = new() { "END_INSERTION", "NEED_INFO", "END_RESPONSE" } + }, + SafetySettings = new() + { + new() { Category = "HARM_CATEGORY_HARASSMENT", Threshold = "BLOCK_ONLY_HIGH" }, + new() { Category = "HARM_CATEGORY_HATE_SPEECH", Threshold = "BLOCK_ONLY_HIGH" }, + new() { Category = "HARM_CATEGORY_SEXUALLY_EXPLICIT", Threshold = "BLOCK_ONLY_HIGH" }, + new() { Category = "HARM_CATEGORY_DANGEROUS_CONTENT", Threshold = "BLOCK_ONLY_HIGH" } + } + }; + } +} +``` + +## Step 2: Define Request and Response Models + +To efficiently communicate with the Gemini AI API, we need to define a set of C# classes that map to Gemini's JSON request and response format. These models ensure type safety and provide a clean interface for working with the API. + +1. Create a new file named `GeminiModels.cs` in your project +2. Add the following model classes: + +```csharp +// Represents a text segment in the API communication +public class Part +{ + public string Text { get; set; } +} + +// Contains an array of text parts +public class Content +{ + public Part[] Parts { get; init; } = Array.Empty(); +} + +// Represents a generated response candidate +public class Candidate +{ + public Content Content { get; init; } = new(); +} + +// The main response object from Gemini API +public class GeminiResponseObject +{ + public Candidate[] Candidates { get; init; } = Array.Empty(); +} + +// Represents a message in the chat conversation +public class ResponseContent +{ + public List Parts { get; init; } + public string Role { get; init; } // "user" or "model" + + public ResponseContent(string text, string role) + { + Parts = new List { new Part { Text = text } }; + Role = role; + } +} + +// Configuration for text generation +public class GenerationConfig +{ + // Controls randomness (0.0 to 1.0) + public int Temperature { get; init; } = 0; + + // Limits token consideration (1 to 40) + public int TopK { get; init; } = 0; + + // Nucleus sampling threshold (0.0 to 1.0) + public int TopP { get; init; } = 0; + + // Maximum tokens in response + public int MaxOutputTokens { get; init; } = 2048; + + // Sequences that stop generation + public List StopSequences { get; init; } = new(); +} + +// Controls content filtering +public class SafetySetting +{ + // Harm category to filter + public string Category { get; init; } = string.Empty; + + // Filtering threshold level + public string Threshold { get; init; } = string.Empty; +} + +// Main request parameters for Gemini API +public class GeminiChatParameters +{ + // Chat message history + public List Contents { get; init; } = new(); + + // Generation settings + public GenerationConfig GenerationConfig { get; init; } = new(); + + // Content safety filters + public List SafetySettings { get; init; } = new(); +} +``` + + +## Step 3: Create a Custom AI Service + +The Syncfusion Smart Components are designed to work with different AI backends through the `IInferenceBackend` interface. This section shows you how to create a custom implementation that connects the Smart Components to the Gemini AI service. + +### Understanding the Interface + +The `IInferenceBackend` interface is the bridge between Syncfusion Smart Components and AI services: + +1. Create a new file named `MyCustomService.cs` +2. Add the following implementation: + +```csharp +using Syncfusion.Blazor.SmartComponents; + +public class MyCustomService : IInferenceBackend +{ + private readonly GeminiService _geminiService; + + public MyCustomService(GeminiService geminiService) + { + _geminiService = geminiService; + } + + public Task GetChatResponseAsync(ChatParameters options) + { + // Forward the chat parameters to our Gemini service + return _geminiService.CompleteAsync(options.Messages); + } +} +``` + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the Gemini AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` diff --git a/blazor/smart-paste/groq-service.md b/blazor/smart-paste/groq-service.md new file mode 100644 index 0000000000..ce79005d84 --- /dev/null +++ b/blazor/smart-paste/groq-service.md @@ -0,0 +1,217 @@ +--- +layout: post +title: Gemini AI with Syncfusion Smart Components in Blazor App | Syncfusion +description: Learn how to implement a custom AI service using Groq API with Syncfusion Smart Components in a Blazor App. +control: Smart Paste Button +documentation: ug +--- + +# Getting Started with Smart Components using Groq AI Service + +This guide provides step-by-step instructions for integrating and using Syncfusion's Smart Components with Groq AI services in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* Groq account and API key (see setup instructions below) + +### Setting Up Groq + +1. **Create a Groq Account** + * Visit [Groq Cloud Console](https://console.groq.com) + * Sign up for a new account or sign in + * Complete the verification process + +2. **Get Your API Key** + * Navigate to [API Keys](https://console.groq.com/keys) in the Groq Console + * Click "Create API Key" + +### Models + +For detailed model specifications and capabilities, visit the [Groq Models Documentation](https://console.groq.com/docs/models). + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart Components to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-paste/getting-started) +2. [Configure form annotations for better AI understanding](https://blazor.syncfusion.com/documentation/smart-paste/annotation) +3. [Customize the appearance and behavior of Smart Components](https://blazor.syncfusion.com/documentation/smart-paste/customization) + +--- + +## Step 1: Create a Groq AI Service + +In this step, we'll create a service class that handles all interactions with the Groq API. This service will: + +* Manage API authentication +* Send chat messages to Groq's LLM models +* Process responses for use in your application + +### Implementation Steps + +1. Create a new file named `GroqService.cs` in your project's `Services` folder +2. Add the required namespaces for HTTP and JSON handling +3. Implement the service class following the code below + + +```csharp + +using System.Net; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.AI; +public class GroqService +{ + private const string ApiKey = "Your API key"; + private const string ModelName = "Your Model Name"; + private const string Endpoint = "https://api.groq.com/openai/v1/chat/completions"; + + private static readonly HttpClient HttpClient = new(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = HttpVersion.Version30 + }; + + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public GroqService() + { + if (!HttpClient.DefaultRequestHeaders.Contains("Authorization")) + { + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {ApiKey}"); + } + } + + public async Task CompleteAsync(IList chatMessages) + { + var requestPayload = new GroqChatParameters + { + Model = ModelName, + Messages = chatMessages.Select(m => new Message + { + Role = m.Role == ChatRole.User ? "user" : "assistant", + Content = m.Text + }).ToList(), + Stop = new() { "END_INSERTION", "NEED_INFO", "END_RESPONSE" } + }; + + var content = new StringContent(JsonSerializer.Serialize(requestPayload, JsonOptions), Encoding.UTF8, "application/json"); + + try + { + var response = await HttpClient.PostAsync(Endpoint, content); + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync(); + var responseObject = JsonSerializer.Deserialize(responseString, JsonOptions); + + return responseObject?.Choices?.FirstOrDefault()?.Message?.Content ?? "No response from model."; + } + catch (Exception ex) when (ex is HttpRequestException || ex is JsonException) + { + throw new InvalidOperationException("Failed to communicate with Groq API.", ex); + } + } +} + + +``` + +## Step 2: Define Request and Response Models + +To communicate effectively with the Groq API, we need to define C# classes that map to Groq's API format. + +1. Create a new file named `GroqModels.cs` in your project +2. Add the following model classes that represent the API contract + +### Key Components + +* **Message**: Represents a single chat message with role and content +* **GroqChatParameters**: The main request object sent to Groq +* **GroqResponseObject**: The response received from Groq +* **Choice**: Represents a single response option from the model + +```CSharp +public class Choice +{ + public Message Message { get; set; } +} + +public class Message +{ + public string Role { get; set; } + public string Content { get; set; } +} + +public class GroqChatParameters +{ + public string Model { get; set; } + public List Messages { get; set; } + public List Stop { get; set; } +} + +public class GroqResponseObject +{ + public string Model { get; set; } + public List Choices { get; set; } +} +``` + +## Step 3: Create a Custom AI Service + +Create a bridge between Syncfusion's Smart Components and our Groq service. This enables the Smart Components to use Groq's AI capabilities through a `IInferenceBackend` interface. + +The `IInferenceBackend` interface is part of Syncfusion's infrastructure that allows Smart Components to work with different AI providers: + +1. Create a new file named `MyCustomService.cs` +2. Add the Syncfusion namespace +3. Implement the interface as shown below + + +```CSharp +using Syncfusion.Blazor.SmartComponents; +public class MyCustomService : IInferenceBackend +{ + public GroqService _groqServices; + public MyCustomService(GroqService groqServices) { + _groqServices = groqServices; + } + public Task GetChatResponseAsync(ChatParameters options) + { + return _groqServices.CompleteAsync(options.Messages); + throw new NotImplementedException(); + } +} +``` + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the Groq AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` + diff --git a/blazor/smart-textarea/claude-service.md b/blazor/smart-textarea/claude-service.md new file mode 100644 index 0000000000..8ecafb4f3c --- /dev/null +++ b/blazor/smart-textarea/claude-service.md @@ -0,0 +1,218 @@ +--- +layout: post +title: Custom AI Service with Syncfusion Smart Components in Blazor Web App +description: Learn how to integrate and use the Syncfusion component in a Blazor Web App with Claude AI services. +platform: Blazor +control: Smart TextArea +documentation: ug +--- + +# Getting Started with Smart Components using Claude AI + +This guide walks you through integrating Anthropic's Claude AI with Syncfusion Smart Components in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* Claude account and API key (see setup instructions below) + +### Setting Up Claude + +1. **Create an Anthropic Account** + * Visit [Anthropic Console](https://console.anthropic.com) + * Sign up for a new account + +2. **Get Your API Key** + * Navigate to [API Keys](https://console.anthropic.com/settings/keys) + * Click "Create Key" + +### Models + +For detailed specifications and capabilities, visit the [Claude Models Documentation](https://docs.anthropic.com/claude/docs/models-overview). + +--- + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart TextArea to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-textarea/getting-started) +2. [Configure AI integration in Smart TextArea](https://blazor.syncfusion.com/documentation/smart-textarea/ai-integration) +3. [Customize Smart TextArea features](https://blazor.syncfusion.com/documentation/smart-textarea/customization) + +--- + +## Step 1: Create a Claude AI Service + +In this step, we'll create a service that handles all communication with Claude's AI. This service is to: + +* Manage secure API authentication +* Send prompts to Claude's models +* Process AI responses + +### Implementation Steps + +1. Create a new file named `ClaudeAIService.cs` +2. Add the required namespaces for HTTP and JSON handling +3. Implement the service class following the code below + +```csharp +using Microsoft.Extensions.AI; +using System.Net; +using System.Text; +using System.Text.Json; +public class ClaudeAIService +{ + private const string ApiKey = "Your API Key"; + private const string ModelName = "Your Model Name"; + private const string Endpoint = "https://api.anthropic.com/v1/messages"; + + private static readonly HttpClient HttpClient = new(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = HttpVersion.Version30 + }; + + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public ClaudeAIService() + { + if (!HttpClient.DefaultRequestHeaders.Contains("x-api-key")) + { + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("x-api-key", ApiKey); + HttpClient.DefaultRequestHeaders.Add("anthropic-version", "2023-06-01"); + } + } + + public async Task CompleteAsync(IList chatMessages) + { + + var requestBody = new ClaudeChatRequest + { + Model = ModelName, + Max_tokens = 1000, + Messages = chatMessages.Select(m => new ClaudeMessage + { + Role = m.Role == ChatRole.User ? "user" : "assistant", + Content = m.Text + }).ToList(), + Stop_sequences = new() { "END_INSERTION", "NEED_INFO", "END_RESPONSE" } + }; + + var json = JsonSerializer.Serialize(requestBody, JsonOptions); + var content = new StringContent(json, Encoding.UTF8, "application/json"); + + try + { + var response = await HttpClient.PostAsync(Endpoint, content); + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync(); + var responseObject = JsonSerializer.Deserialize(responseString, JsonOptions); + + return responseObject?.Content?.FirstOrDefault()?.Text ?? "No response from Claude model."; + } + catch (Exception ex) when (ex is HttpRequestException || ex is JsonException) + { + throw new InvalidOperationException("Failed to communicate with Claude API.", ex); + } + } +} +``` + +## Step 2: Define Request and Response Models + +To effectively communicate with Claude's API, we need to define structured models for our requests and responses. These models ensure type safety and make the code more maintainable. + +### Request Models +The request models define the structure of data we send to Claude. + +### Response Models +The response models handle Claude's API responses. + +```CSharp +public class ClaudeChatRequest +{ + public string Model { get; set; } + public int Max_tokens { get; set; } + public List Messages { get; set; } + public List Stop_sequences { get; set; } +} + +public class ClaudeMessage +{ + public string Role { get; set; } // "user" or "assistant" + public string Content { get; set; } +} + +// Claude response format +public class ClaudeChatResponse +{ + public List Content { get; set; } +} + +public class ClaudeContentBlock +{ + public string Text { get; set; } +} +``` + + +## Step 3: Create a Custom AI Service + +To integrate Claude with Syncfusion Smart Components, you need to implement the `IInferenceBackend` interface. This interface acts as a bridge between Syncfusion's components and Claude's AI capabilities. + +The `IInferenceBackend` interface is designed to allow custom AI service implementations. It defines the contract for how Syncfusion components communicate with AI services: + +1. Create a new file named `MyCustomService.cs` +2. Add the Syncfusion namespace +3. Implement the interface as shown below + +```CSharp +using Syncfusion.Blazor.SmartComponents; +public class MyCustomService : IInferenceBackend +{ + private readonly ClaudeAIService _ClaudeService; + + public MyCustomService(ClaudeAIService ClaudeService) + { + _ClaudeService = ClaudeService; + } + + public Task GetChatResponseAsync(ChatParameters options) + { + return _ClaudeService.CompleteAsync(options.Messages); + } +} +``` + + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the Claude AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` \ No newline at end of file diff --git a/blazor/smart-textarea/custom-inference-backend.md b/blazor/smart-textarea/custom-inference-backend.md new file mode 100644 index 0000000000..14bf769f49 --- /dev/null +++ b/blazor/smart-textarea/custom-inference-backend.md @@ -0,0 +1,52 @@ +--- +layout: post +title: Custom AI Service Integration with Smart Components +description: Learn how to use IInferenceBackend to integrate custom AI services with Syncfusion Smart Components +platform: Blazor +control: Smart TextArea +documentation: ug +--- + +# Custom AI Service Integration + +## Overview + +Syncfusion Smart Components provide built-in support for OpenAI and Azure OpenAI services. However, you can also integrate other AI services using the `IInferenceBackend` interface, which acts as a bridge between Smart Components and your custom AI service. + + +## IInferenceBackend Interface + +The `IInferenceBackend` interface defines a simple contract for AI service integration: + +```csharp +public interface IInferenceBackend +{ + Task GetChatResponseAsync(ChatParameters options); +} +``` + +This interface enables: +- Consistent communication between components and AI services +- Easy switching between different AI providers + + +## Implemented AI Services + +Here are examples of AI services integrated using the `IInferenceBackend` interface: + +| Service | Description | Documentation | +|---------|-------------|---------------| +| Claude | Anthropic's Claude AI | [Claude Integration](claude-service.md) | +| DeepSeek | DeepSeek's AI platform | [DeepSeek Integration](deepseek-service.md) | +| Groq | Groq inference engine | [Groq Integration](groq-service.md) | +| Gemini | Google's Gemini AI | [Gemini Integration](gemini-service.md) | + + +## Service Registration + +Register your custom implementation in `Program.cs`: + +```csharp +builder.Services.AddSingleton(); +``` + diff --git a/blazor/smart-textarea/deepseek-service.md b/blazor/smart-textarea/deepseek-service.md new file mode 100644 index 0000000000..43727da9d6 --- /dev/null +++ b/blazor/smart-textarea/deepseek-service.md @@ -0,0 +1,201 @@ +--- +layout: post +title: Custom AI Service with Syncfusion Smart Components in Blazor Web App +description: Learn how to integrate and use the Syncfusion component in a Blazor Web App with DeepSeek AI services. +platform: Blazor +control: Smart TextArea +documentation: ug +--- + +# Getting Started with Smart Components using DeepSeek AI + +This guide demonstrates how to integrate DeepSeek's powerful AI capabilities with Syncfusion Smart Components in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* DeepSeek account and API key (see setup instructions below) + +### Setting Up DeepSeek + +1. **DeepSeek API Access** + * Create a DeepSeek account at [platform.deepseek.com](https://platform.deepseek.com) + * Navigate to [API Keys](https://platform.deepseek.com/api_keys) + +2. **DeepSeek Models** + + For detailed specifications and pricing, visit the [DeepSeek Models Documentation](https://api-docs.deepseek.com/quick_start/pricing). + + +--- + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart TextArea to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-textarea/getting-started) +2. [Configure AI integration in Smart TextArea](https://blazor.syncfusion.com/documentation/smart-textarea/ai-integration) +3. [Customize Smart TextArea features](https://blazor.syncfusion.com/documentation/smart-textarea/customization) + +--- + +## Step 1: Create a DeepSeek AI Service + +The `DeepSeekAIService` class is responsible for managing all interactions with the DeepSeek API. This service: + +### Implementation Steps + +1. Create a new file named `DeepSeekAIService.cs` +2. Add the following using statements for required dependencies +3. Implement the service class as shown below + +```csharp +using System.Text; +using System.Text.Json; +using System.Net; +using Microsoft.Extensions.AI; +public class DeepSeekAIService +{ + private const string ApiKey = "Your API Key"; + private const string ModelName = "Your Model Name"; + private const string Endpoint = "https://api.deepseek.com/v1/chat/completions"; + + private static readonly HttpClient HttpClient = new(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = HttpVersion.Version30 + }; + + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public DeepSeekAIService() + { + if (!HttpClient.DefaultRequestHeaders.Contains("Authorization")) + { + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {ApiKey}"); + } + } + + public async Task CompleteAsync(IList chatMessages) + { + var requestBody = new DeepSeekChatRequest + { + Model = ModelName, + Temperature = 0.7f, + Messages = chatMessages.Select(m => new DeepSeekMessage + { + Role = m.Role == ChatRole.User ? "user" : "system", + Content = m.Text + }).ToList() + }; + + + var json = JsonSerializer.Serialize(requestBody, JsonOptions); + var content = new StringContent(json, Encoding.UTF8, "application/json"); + + try + { + var response = await HttpClient.PostAsync(Endpoint, content); + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync(); + var responseObject = JsonSerializer.Deserialize(responseString, JsonOptions); + + return responseObject?.Choices?.FirstOrDefault()?.Message?.Content ?? "No response from DeepSeek."; + } + catch (Exception ex) when (ex is HttpRequestException || ex is JsonException) + { + + throw new InvalidOperationException("Failed to communicate with DeepSeek API.", ex); + } + } +} +``` + +## Step 2: Define Request and Response Models + +To effectively communicate with DeepSeek's API, we need to create strongly-typed models that represent the request and response data structures. + +Create a new file named `DeepSeekModels.cs` with the following models: + +```CSharp +public class DeepSeekMessage +{ + public string Role { get; set; } + public string Content { get; set; } +} + +public class DeepSeekChatRequest +{ + public string Model { get; set; } + public float Temperature { get; set; } + public List Messages { get; set; } +} + +public class DeepSeekChatResponse +{ + public List Choices { get; set; } +} + +public class DeepSeekChoice +{ + public DeepSeekMessage Message { get; set; } +} +``` + +## Step 3: Create a Custom AI Service + +To integrate DeepSeek with Syncfusion Smart Components, we'll create a custom implementation of the `IInferenceBackend` interface. This interface acts as a bridge between Syncfusion components and your AI service. + +The `IInferenceBackend` interface is the bridge between Syncfusion Smart Components and AI services: + +1. Create a new file named `MyCustomService.cs` +2. Add the following implementation: + +```csharp +using Syncfusion.Blazor.SmartComponents; +public class MyCustomService : IInferenceBackend +{ + private readonly DeepSeekAIService _DeepSeekService; + + public MyCustomService(DeepSeekAIService DeepSeekService) + { + _DeepSeekService = DeepSeekService; + } + + public Task GetChatResponseAsync(ChatParameters options) + { + return _DeepSeekService.CompleteAsync(options.Messages); + } +} +``` + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the DeepSeek AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` diff --git a/blazor/smart-textarea/gemini-service.md b/blazor/smart-textarea/gemini-service.md new file mode 100644 index 0000000000..adbae07fd8 --- /dev/null +++ b/blazor/smart-textarea/gemini-service.md @@ -0,0 +1,281 @@ +--- +layout: post +title: Gemini AI with Syncfusion Smart Components in Blazor App | Syncfusion +description: Learn how to implement a custom AI service using Google's Gemini API with Syncfusion Smart Components in a Blazor App. +control: Smart TextArea +documentation: ug +--- + +# Getting Started with Smart Components using Gemini AI Service + +This guide provides step-by-step instructions for integrating and using Syncfusion's Smart Components with Gemini AI services in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* [Gemini API Key](https://ai.google.dev/gemini-api/docs/api-key) - Obtain an API key from Google AI Studio + + +## Models + +For a complete list of models and their capabilities, visit the [Gemini Models Documentation](https://ai.google.dev/gemini-api/docs/models). + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart TextArea to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-textarea/getting-started) +2. [Configure AI integration in Smart TextArea](https://blazor.syncfusion.com/documentation/smart-textarea/ai-integration) +3. [Customize Smart TextArea features](https://blazor.syncfusion.com/documentation/smart-textarea/customization) + +--- + +## Step 1: Create a Gemini AI Service + +The `GeminiService` class serves as the foundation for integrating Gemini AI into your Blazor application. This service manages: + +* API communication with Gemini endpoints +* Request/response handling +* Message formatting +* Safety settings configuration + +### Implementation Steps + +1. Create a new class file named `GeminiService.cs` in your project +2. Add the following implementation: + +```csharp +public class GeminiService +{ + // HTTP client configuration for optimal performance + private static readonly Version _httpVersion = HttpVersion.Version30; + private static readonly HttpClient HttpClient = new HttpClient(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = _httpVersion + }; + + // Configuration settings + private const string ApiKey = "YOUR_API_KEY_HERE"; + private const string ModelName = "YOUR_MODEL_NAME"; + + // JSON serialization settings + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public GeminiService() + { + // Set up authentication headers + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("x-goog-api-key", ApiKey); + } + + // Main method for interacting with Gemini API + public async Task CompleteAsync(IList chatMessages) + { + // Construct the API endpoint URL + var requestUri = $"https://generativelanguage.googleapis.com/v1beta/models/{ModelName}:generateContent"; + + // Prepare the request parameters + var parameters = BuildGeminiChatParameters(chatMessages); + var payload = new StringContent( + JsonSerializer.Serialize(parameters, JsonOptions), + Encoding.UTF8, + "application/json" + ); + + try + { + // Send request and process response + using var response = await HttpClient.PostAsync(requestUri, payload); + response.EnsureSuccessStatusCode(); + + var json = await response.Content.ReadAsStringAsync(); + var result = JsonSerializer.Deserialize(json, JsonOptions); + + // Extract and return the generated text + return result?.Candidates?.FirstOrDefault()?.Content?.Parts?.FirstOrDefault()?.Text + ?? "No response from model."; + } + catch (Exception ex) when (ex is HttpRequestException or JsonException) + { + throw new InvalidOperationException("Gemini API error.", ex); + } + } + + private GeminiChatParameters BuildGeminiChatParameters(IList messages) + { + // Convert chat messages to Gemini's format + var contents = messages.Select(m => new ResponseContent( + m.Text, + m.Role == ChatRole.User ? "user" : "model" + )).ToList(); + + // Configure request parameters including safety settings + return new GeminiChatParameters + { + Contents = contents, + GenerationConfig = new GenerationConfig + { + MaxOutputTokens = 2000, + StopSequences = new() { "END_INSERTION", "NEED_INFO", "END_RESPONSE" } + }, + SafetySettings = new() + { + new() { Category = "HARM_CATEGORY_HARASSMENT", Threshold = "BLOCK_ONLY_HIGH" }, + new() { Category = "HARM_CATEGORY_HATE_SPEECH", Threshold = "BLOCK_ONLY_HIGH" }, + new() { Category = "HARM_CATEGORY_SEXUALLY_EXPLICIT", Threshold = "BLOCK_ONLY_HIGH" }, + new() { Category = "HARM_CATEGORY_DANGEROUS_CONTENT", Threshold = "BLOCK_ONLY_HIGH" } + } + }; + } +} +``` + +## Step 2: Define Request and Response Models + +To efficiently communicate with the Gemini AI API, we need to define a set of C# classes that map to Gemini's JSON request and response format. These models ensure type safety and provide a clean interface for working with the API. + +1. Create a new file named `GeminiModels.cs` in your project +2. Add the following model classes: + +```csharp +// Represents a text segment in the API communication +public class Part +{ + public string Text { get; set; } +} + +// Contains an array of text parts +public class Content +{ + public Part[] Parts { get; init; } = Array.Empty(); +} + +// Represents a generated response candidate +public class Candidate +{ + public Content Content { get; init; } = new(); +} + +// The main response object from Gemini API +public class GeminiResponseObject +{ + public Candidate[] Candidates { get; init; } = Array.Empty(); +} + +// Represents a message in the chat conversation +public class ResponseContent +{ + public List Parts { get; init; } + public string Role { get; init; } // "user" or "model" + + public ResponseContent(string text, string role) + { + Parts = new List { new Part { Text = text } }; + Role = role; + } +} + +// Configuration for text generation +public class GenerationConfig +{ + // Controls randomness (0.0 to 1.0) + public int Temperature { get; init; } = 0; + + // Limits token consideration (1 to 40) + public int TopK { get; init; } = 0; + + // Nucleus sampling threshold (0.0 to 1.0) + public int TopP { get; init; } = 0; + + // Maximum tokens in response + public int MaxOutputTokens { get; init; } = 2048; + + // Sequences that stop generation + public List StopSequences { get; init; } = new(); +} + +// Controls content filtering +public class SafetySetting +{ + // Harm category to filter + public string Category { get; init; } = string.Empty; + + // Filtering threshold level + public string Threshold { get; init; } = string.Empty; +} + +// Main request parameters for Gemini API +public class GeminiChatParameters +{ + // Chat message history + public List Contents { get; init; } = new(); + + // Generation settings + public GenerationConfig GenerationConfig { get; init; } = new(); + + // Content safety filters + public List SafetySettings { get; init; } = new(); +} +``` + + +## Step 3: Create a Custom AI Service + +The Syncfusion Smart Components are designed to work with different AI backends through the `IInferenceBackend` interface. This section shows you how to create a custom implementation that connects the Smart Components to the Gemini AI service. + +### Understanding the Interface + +The `IInferenceBackend` interface is the bridge between Syncfusion Smart Components and AI services: + +1. Create a new file named `MyCustomService.cs` +2. Add the following implementation: + +```csharp +using Syncfusion.Blazor.SmartComponents; + +public class MyCustomService : IInferenceBackend +{ + private readonly GeminiService _geminiService; + + public MyCustomService(GeminiService geminiService) + { + _geminiService = geminiService; + } + + public Task GetChatResponseAsync(ChatParameters options) + { + // Forward the chat parameters to our Gemini service + return _geminiService.CompleteAsync(options.Messages); + } +} +``` + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the Gemini AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` diff --git a/blazor/smart-textarea/groq-service.md b/blazor/smart-textarea/groq-service.md new file mode 100644 index 0000000000..eaa5c13409 --- /dev/null +++ b/blazor/smart-textarea/groq-service.md @@ -0,0 +1,217 @@ +--- +layout: post +title: Gemini AI with Syncfusion Smart Components in Blazor App | Syncfusion +description: Learn how to implement a custom AI service using Groq API with Syncfusion Smart Components in a Blazor App. +control: Smart TextArea +documentation: ug +--- + +# Getting Started with Smart Components using Groq AI Service + +This guide provides step-by-step instructions for integrating and using Syncfusion's Smart Components with Groq AI services in your Blazor App. + +## Prerequisites + +Before you begin, ensure you have: + +* [System requirements for Blazor components](https://blazor.syncfusion.com/documentation/system-requirements) +* Groq account and API key (see setup instructions below) + +### Setting Up Groq + +1. **Create a Groq Account** + * Visit [Groq Cloud Console](https://console.groq.com) + * Sign up for a new account or sign in + * Complete the verification process + +2. **Get Your API Key** + * Navigate to [API Keys](https://console.groq.com/keys) in the Groq Console + * Click "Create API Key" + +### Models + +For detailed model specifications and capabilities, visit the [Groq Models Documentation](https://console.groq.com/docs/models). + +## Next Steps + +After completing this setup, you can: + +1. [Add Smart TextArea to your Blazor pages](https://blazor.syncfusion.com/documentation/smart-textarea/getting-started) +2. [Configure AI integration in Smart TextArea](https://blazor.syncfusion.com/documentation/smart-textarea/ai-integration) +3. [Customize Smart TextArea features](https://blazor.syncfusion.com/documentation/smart-textarea/customization) + +--- + +## Step 1: Create a Groq AI Service + +In this step, we'll create a service class that handles all interactions with the Groq API. This service will: + +* Manage API authentication +* Send chat messages to Groq's LLM models +* Process responses for use in your application + +### Implementation Steps + +1. Create a new file named `GroqService.cs` in your project's `Services` folder +2. Add the required namespaces for HTTP and JSON handling +3. Implement the service class following the code below + + +```csharp + +using System.Net; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.AI; +public class GroqService +{ + private const string ApiKey = "Your API key"; + private const string ModelName = "Your Model Name"; + private const string Endpoint = "https://api.groq.com/openai/v1/chat/completions"; + + private static readonly HttpClient HttpClient = new(new SocketsHttpHandler + { + PooledConnectionLifetime = TimeSpan.FromMinutes(30), + EnableMultipleHttp2Connections = true, + }) + { + DefaultRequestVersion = HttpVersion.Version30 + }; + + private static readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public GroqService() + { + if (!HttpClient.DefaultRequestHeaders.Contains("Authorization")) + { + HttpClient.DefaultRequestHeaders.Clear(); + HttpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {ApiKey}"); + } + } + + public async Task CompleteAsync(IList chatMessages) + { + var requestPayload = new GroqChatParameters + { + Model = ModelName, + Messages = chatMessages.Select(m => new Message + { + Role = m.Role == ChatRole.User ? "user" : "assistant", + Content = m.Text + }).ToList(), + Stop = new() { "END_INSERTION", "NEED_INFO", "END_RESPONSE" } + }; + + var content = new StringContent(JsonSerializer.Serialize(requestPayload, JsonOptions), Encoding.UTF8, "application/json"); + + try + { + var response = await HttpClient.PostAsync(Endpoint, content); + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync(); + var responseObject = JsonSerializer.Deserialize(responseString, JsonOptions); + + return responseObject?.Choices?.FirstOrDefault()?.Message?.Content ?? "No response from model."; + } + catch (Exception ex) when (ex is HttpRequestException || ex is JsonException) + { + throw new InvalidOperationException("Failed to communicate with Groq API.", ex); + } + } +} + + +``` + +## Step 2: Define Request and Response Models + +To communicate effectively with the Groq API, we need to define C# classes that map to Groq's API format. + +1. Create a new file named `GroqModels.cs` in your project +2. Add the following model classes that represent the API contract + +### Key Components + +* **Message**: Represents a single chat message with role and content +* **GroqChatParameters**: The main request object sent to Groq +* **GroqResponseObject**: The response received from Groq +* **Choice**: Represents a single response option from the model + +```CSharp +public class Choice +{ + public Message Message { get; set; } +} + +public class Message +{ + public string Role { get; set; } + public string Content { get; set; } +} + +public class GroqChatParameters +{ + public string Model { get; set; } + public List Messages { get; set; } + public List Stop { get; set; } +} + +public class GroqResponseObject +{ + public string Model { get; set; } + public List Choices { get; set; } +} +``` + +## Step 3: Create a Custom AI Service + +Create a bridge between Syncfusion's Smart Components and our Groq service. This enables the Smart Components to use Groq's AI capabilities through a `IInferenceBackend` interface. + +The `IInferenceBackend` interface is part of Syncfusion's infrastructure that allows Smart Components to work with different AI providers: + +1. Create a new file named `MyCustomService.cs` +2. Add the Syncfusion namespace +3. Implement the interface as shown below + + +```CSharp +using Syncfusion.Blazor.SmartComponents; +public class MyCustomService : IInferenceBackend +{ + public GroqService _groqServices; + public MyCustomService(GroqService groqServices) { + _groqServices = groqServices; + } + public Task GetChatResponseAsync(ChatParameters options) + { + return _groqServices.CompleteAsync(options.Messages); + throw new NotImplementedException(); + } +} +``` + +## Step 4: Configure the Blazor App + +Configure your Blazor application to use the Groq AI service with Syncfusion Smart Components. This involves registering necessary services and setting up the dependency injection container. + +```CSharp + +using Syncfusion.Blazor.SmartComponents; +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); +builder.Services.AddSyncfusionSmartComponents(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +var app = builder.Build(); +.... + +``` +