Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create AI.cs #117

Merged
merged 3 commits into from
Feb 6, 2025
Merged

Create AI.cs #117

merged 3 commits into from
Feb 6, 2025

Conversation

josefkedwards
Copy link
Collaborator

@josefkedwards josefkedwards commented Feb 6, 2025

using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.DurableTask; using Microsoft.Extensions.Logging;
using OpenAI_API;

namespace CopilotAzureChatGPT5o
{
public static class AIAgentOrchestrator
{
[FunctionName("AIAgentOrchestrator")]
public static async Task Run(
[OrchestrationTrigger] IDurableOrchestrationContext context,
ILogger log)
{
// Retrieve short-term memory (STM) or initialize
var shortTermMemory = context.GetInput<List>() ?? new List();

        // Wait for a new novel input via an external event
        string novelInput = await context.WaitForExternalEvent<string>("NovelInput");
        shortTermMemory.Add(novelInput);
        log.LogInformation($"[Orchestrator] Received novel input: {novelInput}");

        // Process the input using OpenAI via an activity function
        string openAiResponse = await context.CallActivityAsync<string>("ProcessNovelInputActivity", novelInput);
        log.LogInformation($"[Orchestrator] OpenAI response: {openAiResponse}");

        // Check if STM needs to be consolidated into LTM
        if (shortTermMemory.Count >= 5 || context.CurrentUtcDateTime.Subtract(context.StartTime).TotalMinutes >= 10)
        {
            await context.CallActivityAsync("ConsolidateMemory", shortTermMemory);
            log.LogInformation("[Orchestrator] STM consolidated into LTM.");
            shortTermMemory.Clear();
        }

        // Prevent excessive history buildup by using ContinueAsNew
        context.ContinueAsNew(shortTermMemory);
    }
}

public static class ProcessNovelInputActivity
{
    [FunctionName("ProcessNovelInputActivity")]
    public static async Task<string> Run(
        [ActivityTrigger] string novelInput,
        ILogger log)
    {
        string apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY");
        if (string.IsNullOrEmpty(apiKey))
        {
            log.LogError("OpenAI API key is not set in environment variables.");
            return "Error: No API key";
        }

        var openAiClient = new OpenAIAPI(apiKey);
        var completionRequest = new OpenAI_API.Completions.CompletionRequest
        {
            Prompt = $"Process this input and provide insights: {novelInput}",
            MaxTokens = 100,
            Temperature = 0.7
        };

        try
        {
            var result = await openAiClient.Completions.CreateCompletionAsync(completionRequest);
            string response = result.Completions?[0].Text.Trim() ?? "No response";
            log.LogInformation($"[ProcessNovelInputActivity] Processed input: {novelInput} | Response: {response}");
            return response;
        }
        catch (Exception ex)
        {
            log.LogError($"[ProcessNovelInputActivity] OpenAI API error: {ex.Message}");
            return "Error processing input";
        }
    }
}

public static class ConsolidateMemory
{
    [FunctionName("ConsolidateMemory")]
    public static async Task Run(
        [ActivityTrigger] List<string> memoryBatch,
        ILogger log)
    {
        log.LogInformation("[ConsolidateMemory] Consolidating STM into LTM.");
        foreach (var item in memoryBatch)
        {
            log.LogInformation($" - {item}");
        }
        await Task.Delay(500); // Simulate database write
    }
}

using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.DurableTask; using Microsoft.Extensions.Logging;
using Azure.Data.Tables;
using System.Linq;
using OpenAI_API;

namespace CopilotAzureChatGPT5o
{
public static class SelfLearningAI
{
private const string TableName = "AIMemoryTable";
private static TableClient _tableClient;

    [FunctionName("InitializeMemory")]
    public static async Task InitializeMemory([TimerTrigger("0 */30 * * * *")] TimerInfo myTimer, ILogger log)
    {
        var connectionString = Environment.GetEnvironmentVariable("AZURE_STORAGE_CONNECTION_STRING");
        _tableClient = new TableClient(connectionString, TableName);
        await _tableClient.CreateIfNotExistsAsync();
        log.LogInformation("[Memory] AI Memory Initialized.");
    }

    [FunctionName("StoreMemory")]
    public static async Task StoreMemory([ActivityTrigger] MemoryEntry memory, ILogger log)
    {
        await _tableClient.AddEntityAsync(memory);
        log.LogInformation($"[Memory] Stored: {memory.Input} → {memory.Response}");
    }

    [FunctionName("RetrieveMemory")]
    public static async Task<List<MemoryEntry>> RetrieveMemory([ActivityTrigger] string query, ILogger log)
    {
        var results = _tableClient.QueryAsync<MemoryEntry>(m => m.Input.Contains(query));
        List<MemoryEntry> memoryEntries = new();
        await foreach (var entry in results) memoryEntries.Add(entry);
        log.LogInformation($"[Memory] Retrieved {memoryEntries.Count} relevant memories.");
        return memoryEntries;
    }

    [FunctionName("ProcessUserQuery")]
    public static async Task<string> ProcessUserQuery([OrchestrationTrigger] IDurableOrchestrationContext context, ILogger log)
    {
        string userInput = context.GetInput<string>();
        log.LogInformation($"[AI] Processing input: {userInput}");

        // Retrieve past similar queries from memory
        var previousResponses = await context.CallActivityAsync<List<MemoryEntry>>("RetrieveMemory", userInput);
        if (previousResponses.Any())
        {
            var bestMatch = previousResponses.OrderByDescending(m => m.Timestamp).First();
            log.LogInformation($"[Memory] Found past response: {bestMatch.Response}");
            return bestMatch.Response;
        }

        // No memory match, call OpenAI API
        string aiResponse = await context.CallActivityAsync<string>("ProcessWithOpenAI", userInput);
        log.LogInformation($"[AI] OpenAI Response: {aiResponse}");

        // Store new knowledge
        var newMemory = new MemoryEntry { PartitionKey = "AI_Memory", RowKey = Guid.NewGuid().ToString(), Input = userInput, Response = aiResponse };
        await context.CallActivityAsync("StoreMemory", newMemory);

        return aiResponse;
    }

    [FunctionName("ProcessWithOpenAI")]
    public static async Task<string> ProcessWithOpenAI([ActivityTrigger] string input, ILogger log)
    {
        string apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY");
        if (string.IsNullOrEmpty(apiKey))
        {
            log.LogError("OpenAI API key is missing.");
            return "Error: No API key.";
        }

        var openAiClient = new OpenAIAPI(apiKey);
        var completionRequest = new OpenAI_API.Completions.CompletionRequest
        {
            Prompt = $"Learn from the following input and respond: {input}",
            MaxTokens = 100,
            Temperature = 0.7
        };

        try
        {
            var result = await openAiClient.Completions.CreateCompletionAsync(completionRequest);
            string response = result.Completions?[0].Text.Trim() ?? "No response";
            log.LogInformation($"[AI] Generated response: {response}");
            return response;
        }
        catch (Exception ex)
        {
            log.LogError($"[AI] OpenAI API error: {ex.Message}");
            return "Error processing input.";
        }
    }
}

public class MemoryEntry : ITableEntity
{
    public string PartitionKey { get; set; }
    public string RowKey { get; set; }
    public string Input { get; set; }
    public string Response { get; set; }
    public DateTimeOffset Timestamp { get; set; }
    public string ETag { get; set; }
}

}

Summary by CodeRabbit

  • New Features
    • Introduced an intelligent AI system that processes novel user inputs, automatically consolidating recent interactions into long-term memory based on usage thresholds.
    • Enabled a self-learning capability that retrieves previous responses to enhance query handling.
    • Integrated robust error handling for smoother interactions with external AI services.
    • Added support for sentiment analysis and speech recognition features.
    • Implemented real-time communication capabilities through WebSocket connections.

using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.DurableTask;
using Microsoft.Extensions.Logging;
using OpenAI_API;

namespace CopilotAzureChatGPT5o
{
    public static class AIAgentOrchestrator
    {
        [FunctionName("AIAgentOrchestrator")]
        public static async Task Run(
            [OrchestrationTrigger] IDurableOrchestrationContext context,
            ILogger log)
        {
            // Retrieve short-term memory (STM) or initialize
            var shortTermMemory = context.GetInput<List<string>>() ?? new List<string>();

            // Wait for a new novel input via an external event
            string novelInput = await context.WaitForExternalEvent<string>("NovelInput");
            shortTermMemory.Add(novelInput);
            log.LogInformation($"[Orchestrator] Received novel input: {novelInput}");

            // Process the input using OpenAI via an activity function
            string openAiResponse = await context.CallActivityAsync<string>("ProcessNovelInputActivity", novelInput);
            log.LogInformation($"[Orchestrator] OpenAI response: {openAiResponse}");

            // Check if STM needs to be consolidated into LTM
            if (shortTermMemory.Count >= 5 || context.CurrentUtcDateTime.Subtract(context.StartTime).TotalMinutes >= 10)
            {
                await context.CallActivityAsync("ConsolidateMemory", shortTermMemory);
                log.LogInformation("[Orchestrator] STM consolidated into LTM.");
                shortTermMemory.Clear();
            }

            // Prevent excessive history buildup by using ContinueAsNew
            context.ContinueAsNew(shortTermMemory);
        }
    }

    public static class ProcessNovelInputActivity
    {
        [FunctionName("ProcessNovelInputActivity")]
        public static async Task<string> Run(
            [ActivityTrigger] string novelInput,
            ILogger log)
        {
            string apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY");
            if (string.IsNullOrEmpty(apiKey))
            {
                log.LogError("OpenAI API key is not set in environment variables.");
                return "Error: No API key";
            }

            var openAiClient = new OpenAIAPI(apiKey);
            var completionRequest = new OpenAI_API.Completions.CompletionRequest
            {
                Prompt = $"Process this input and provide insights: {novelInput}",
                MaxTokens = 100,
                Temperature = 0.7
            };

            try
            {
                var result = await openAiClient.Completions.CreateCompletionAsync(completionRequest);
                string response = result.Completions?[0].Text.Trim() ?? "No response";
                log.LogInformation($"[ProcessNovelInputActivity] Processed input: {novelInput} | Response: {response}");
                return response;
            }
            catch (Exception ex)
            {
                log.LogError($"[ProcessNovelInputActivity] OpenAI API error: {ex.Message}");
                return "Error processing input";
            }
        }
    }

    public static class ConsolidateMemory
    {
        [FunctionName("ConsolidateMemory")]
        public static async Task Run(
            [ActivityTrigger] List<string> memoryBatch,
            ILogger log)
        {
            log.LogInformation("[ConsolidateMemory] Consolidating STM into LTM.");
            foreach (var item in memoryBatch)
            {
                log.LogInformation($" - {item}");
            }
            await Task.Delay(500); // Simulate database write
        }
    }

using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.DurableTask;
using Microsoft.Extensions.Logging;
using Azure.Data.Tables;
using System.Linq;
using OpenAI_API;

namespace CopilotAzureChatGPT5o
{
    public static class SelfLearningAI
    {
        private const string TableName = "AIMemoryTable";
        private static TableClient _tableClient;

        [FunctionName("InitializeMemory")]
        public static async Task InitializeMemory([TimerTrigger("0 */30 * * * *")] TimerInfo myTimer, ILogger log)
        {
            var connectionString = Environment.GetEnvironmentVariable("AZURE_STORAGE_CONNECTION_STRING");
            _tableClient = new TableClient(connectionString, TableName);
            await _tableClient.CreateIfNotExistsAsync();
            log.LogInformation("[Memory] AI Memory Initialized.");
        }

        [FunctionName("StoreMemory")]
        public static async Task StoreMemory([ActivityTrigger] MemoryEntry memory, ILogger log)
        {
            await _tableClient.AddEntityAsync(memory);
            log.LogInformation($"[Memory] Stored: {memory.Input} → {memory.Response}");
        }

        [FunctionName("RetrieveMemory")]
        public static async Task<List<MemoryEntry>> RetrieveMemory([ActivityTrigger] string query, ILogger log)
        {
            var results = _tableClient.QueryAsync<MemoryEntry>(m => m.Input.Contains(query));
            List<MemoryEntry> memoryEntries = new();
            await foreach (var entry in results) memoryEntries.Add(entry);
            log.LogInformation($"[Memory] Retrieved {memoryEntries.Count} relevant memories.");
            return memoryEntries;
        }

        [FunctionName("ProcessUserQuery")]
        public static async Task<string> ProcessUserQuery([OrchestrationTrigger] IDurableOrchestrationContext context, ILogger log)
        {
            string userInput = context.GetInput<string>();
            log.LogInformation($"[AI] Processing input: {userInput}");

            // Retrieve past similar queries from memory
            var previousResponses = await context.CallActivityAsync<List<MemoryEntry>>("RetrieveMemory", userInput);
            if (previousResponses.Any())
            {
                var bestMatch = previousResponses.OrderByDescending(m => m.Timestamp).First();
                log.LogInformation($"[Memory] Found past response: {bestMatch.Response}");
                return bestMatch.Response;
            }

            // No memory match, call OpenAI API
            string aiResponse = await context.CallActivityAsync<string>("ProcessWithOpenAI", userInput);
            log.LogInformation($"[AI] OpenAI Response: {aiResponse}");

            // Store new knowledge
            var newMemory = new MemoryEntry { PartitionKey = "AI_Memory", RowKey = Guid.NewGuid().ToString(), Input = userInput, Response = aiResponse };
            await context.CallActivityAsync("StoreMemory", newMemory);

            return aiResponse;
        }

        [FunctionName("ProcessWithOpenAI")]
        public static async Task<string> ProcessWithOpenAI([ActivityTrigger] string input, ILogger log)
        {
            string apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY");
            if (string.IsNullOrEmpty(apiKey))
            {
                log.LogError("OpenAI API key is missing.");
                return "Error: No API key.";
            }

            var openAiClient = new OpenAIAPI(apiKey);
            var completionRequest = new OpenAI_API.Completions.CompletionRequest
            {
                Prompt = $"Learn from the following input and respond: {input}",
                MaxTokens = 100,
                Temperature = 0.7
            };

            try
            {
                var result = await openAiClient.Completions.CreateCompletionAsync(completionRequest);
                string response = result.Completions?[0].Text.Trim() ?? "No response";
                log.LogInformation($"[AI] Generated response: {response}");
                return response;
            }
            catch (Exception ex)
            {
                log.LogError($"[AI] OpenAI API error: {ex.Message}");
                return "Error processing input.";
            }
        }
    }

    public class MemoryEntry : ITableEntity
    {
        public string PartitionKey { get; set; }
        public string RowKey { get; set; }
        public string Input { get; set; }
        public string Response { get; set; }
        public DateTimeOffset Timestamp { get; set; }
        public string ETag { get; set; }
    }
}


Signed-off-by: josefkedwards <cedwards19606389@yahoo.com>
Copy link

coderabbitai bot commented Feb 6, 2025

Walkthrough

An Azure Functions framework for an AI agent was introduced. The code defines an orchestrator that collects novel inputs into a short-term memory (STM) and, upon reaching a count threshold or elapsed time, consolidates them into long-term memory (LTM). Novel inputs are processed via an activity that calls the OpenAI API, and error handling is in place for missing keys or API failures. Additionally, self-learning capabilities are implemented with routines for initializing, storing, and retrieving memory from Azure Table Storage, along with processing user queries.

Changes

File Change Summary
AI.cs Added new classes: AIAgentOrchestrator (with a Run orchestration method), ProcessNovelInputActivity (with a Run method to handle OpenAI API calls), ConsolidateMemory (with a Run method to consolidate STM into LTM), SelfLearningAI (with methods for initializing memory, storing memory, retrieving memory, processing user queries, and invoking OpenAI), MemoryEntry (implementing ITableEntity for Azure Table Storage), EmotionMemory (with sentiment analysis methods), VoiceAI (with speech processing methods), PersonalizedAI (for fine-tuned responses), GraphMemory (for graph-based memory storage), MultiTurnMemory (for user session management), and WebSocketHandler (for managing WebSocket connections).

Suggested labels

enhancement, AI Showcase

Poem

I'm a rabbit with bright code hops,
Leaping through functions and endless loops,
Memory flows from short to long,
In Azure clouds where bytes belong,
Hop along, let these changes bloom –
A playful journey in every room! 🐇

Tip

🌐 Web search-backed reviews and chat
  • We have enabled web search-based reviews and chat for all users. This feature allows CodeRabbit to access the latest documentation and information on the web.
  • You can disable this feature by setting web_search: false in the knowledge_base settings.
  • Please share any feedback in the Discord discussion.

Thank you for using CodeRabbit. We offer it for free to the OSS community and would appreciate your support in helping us grow. If you find it useful, would you consider giving us a shout-out on your favorite social media?

❤️ Share
🪧 Tips

Chat

There are 3 ways to chat with CodeRabbit:

  • Review comments: Directly reply to a review comment made by CodeRabbit. Example:
    • I pushed a fix in commit <commit_id>, please review it.
    • Generate unit testing code for this file.
    • Open a follow-up GitHub issue for this discussion.
  • Files and specific lines of code (under the "Files changed" tab): Tag @coderabbitai in a new review comment at the desired location with your query. Examples:
    • @coderabbitai generate unit testing code for this file.
    • @coderabbitai modularize this function.
  • PR comments: Tag @coderabbitai in a new PR comment to ask questions about the PR branch. For the best results, please provide a very specific query, as very limited context is provided in this mode. Examples:
    • @coderabbitai gather interesting stats about this repository and render them as a table. Additionally, render a pie chart showing the language distribution in the codebase.
    • @coderabbitai read src/utils.ts and generate unit testing code.
    • @coderabbitai read the files in the src/scheduler package and generate a class diagram using mermaid and a README in the markdown format.
    • @coderabbitai help me debug CodeRabbit configuration file.

Note: Be mindful of the bot's finite context window. It's strongly recommended to break down tasks such as reading entire modules into smaller chunks. For a focused discussion, use review comments to chat about specific files and their changes, instead of using the PR comments.

CodeRabbit Commands (Invoked using PR comments)

  • @coderabbitai pause to pause the reviews on a PR.
  • @coderabbitai resume to resume the paused reviews.
  • @coderabbitai review to trigger an incremental review. This is useful when automatic reviews are disabled for the repository.
  • @coderabbitai full review to do a full review from scratch and review all the files again.
  • @coderabbitai summary to regenerate the summary of the PR.
  • @coderabbitai generate docstrings to generate docstrings for this PR. (Beta)
  • @coderabbitai resolve resolve all the CodeRabbit review comments.
  • @coderabbitai configuration to show the current CodeRabbit configuration for the repository.
  • @coderabbitai help to get help.

Other keywords and placeholders

  • Add @coderabbitai ignore anywhere in the PR description to prevent this PR from being reviewed.
  • Add @coderabbitai summary to generate the high-level summary at a specific location in the PR description.
  • Add @coderabbitai anywhere in the PR title to generate the title automatically.

CodeRabbit Configuration File (.coderabbit.yaml)

  • You can programmatically configure CodeRabbit by adding a .coderabbit.yaml file to the root of your repository.
  • Please see the configuration documentation for more information.
  • If your editor has YAML language server enabled, you can add the path at the top of this file to enable auto-completion and validation: # yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json

Documentation and Community

  • Visit our Documentation for detailed information on how to use CodeRabbit.
  • Join our Discord Community to get help, request features, and share feedback.
  • Follow us on X/Twitter for updates and announcements.

Copy link

@coderabbitai coderabbitai bot left a comment

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actionable comments posted: 13

🧹 Nitpick comments (1)
AI.cs (1)

169-174: Ensure OpenAI API key is securely managed and errors are properly handled.

While you check for a missing API key, consider implementing more robust error handling and ensuring the API key is managed securely.

Additionally, you may want to use Azure Key Vault to store sensitive configuration like API keys.

📜 Review details

Configuration used: CodeRabbit UI
Review profile: CHILL
Plan: Pro

📥 Commits

Reviewing files that changed from the base of the PR and between 2a2864b and 06b8c76.

📒 Files selected for processing (1)
  • AI.cs (1 hunks)
⏰ Context from checks skipped due to timeout of 90000ms (1)
  • GitHub Check: Summary
🔇 Additional comments (3)
AI.cs (3)

152-152: 🛠️ Refactor suggestion

Avoid logging AI responses to prevent data leakage.

Logging the bestMatch.Response may expose sensitive information. Consider logging without including the response content.

[security]

Modify the log statement:

Likely invalid or redundant comment.


58-78: Verify the usage of the OpenAI API client and methods.

Ensure that the OpenAIAPI client and its methods are used correctly, as the OpenAI .NET SDK may have updated methods or deprecated certain classes.

✅ Verification successful

🌐 Web query:

Check the latest documentation for the OpenAI .NET SDK to confirm that `OpenAIAPI`, `CompletionRequest`, and `CreateCompletionAsync` are used appropriately.

Length of output: 2870


Verification of OpenAI API Client Usage

The code correctly utilizes the legacy community library (OkGoDoIt/OpenAI-API-dotnet), where using OpenAIAPI, CompletionRequest, and CreateCompletionAsync is standard practice. If you intend to stick with this library, no changes are needed. However, for new projects using the official OpenAI .NET SDK, a migration toward the ChatClient pattern with methods like CompleteChatAsync is recommended.


176-196: Confirm the OpenAI API usage for potential updates.

Verify that the methods used for interacting with the OpenAI API are up-to-date and follow the latest best practices.

✅ Verification successful

🌐 Web query:

Review the OpenAI .NET SDK documentation to ensure correct usage of the API client and completion requests.

Length of output: 3203


API Usage Verification for AI.cs

  • The code correctly uses the older OpenAI API pattern (instantiating an OpenAIAPI and calling CreateCompletionAsync) for obtaining text completions.
  • According to the latest OpenAI .NET SDK documentation, newer patterns recommend leveraging a dedicated ChatClient and chat-specific methods (such as CompleteChatAsync or streaming variants) for enhanced functionality and error handling.
  • If your application’s requirements are met by simple text completions, the current implementation remains functional. However, to take advantage of improved asynchronous support, built‐in streaming, and advanced message role processing, consider migrating to the updated API usage.

AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
Signed-off-by: josefkedwards <cedwards19606389@yahoo.com>
@josefkedwards josefkedwards merged commit cab5b6c into main Feb 6, 2025
1 of 14 checks passed
Copy link

@coderabbitai coderabbitai bot left a comment

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actionable comments posted: 4

♻️ Duplicate comments (3)
AI.cs (3)

25-25: ⚠️ Potential issue

Security: Remove sensitive data from logs.

Multiple log statements throughout the code expose potentially sensitive information including user inputs, AI responses, and memory contents. This could lead to data privacy issues if logs are compromised.

Modify the log statements to include only non-sensitive metadata. For example:

-log.LogInformation($"[Orchestrator] Received novel input: {novelInput}");
+log.LogInformation("[Orchestrator] Received novel input");

Also applies to: 29-29, 70-70, 91-91, 127-127, 144-144, 157-157, 188-188, 236-236, 274-274, 282-282, 344-344, 358-358, 374-374, 382-382, 440-440


112-113: ⚠️ Potential issue

Avoid static TableClient in Azure Functions.

Using a static TableClient can cause issues in a serverless environment where functions may run on multiple instances. Additionally, the connection string is not validated before use.

Initialize TableClient within each function and add proper error handling:

-private static TableClient _tableClient;
+private static TableClient GetTableClient(ILogger log)
+{
+    var connectionString = Environment.GetEnvironmentVariable("AZURE_STORAGE_CONNECTION_STRING");
+    if (string.IsNullOrEmpty(connectionString))
+    {
+        log.LogError("Azure Storage connection string is not configured");
+        throw new InvalidOperationException("Storage configuration missing");
+    }
+    return new TableClient(connectionString, TableName);
+}

Also applies to: 117-120


133-136: ⚠️ Potential issue

Fix unsupported query operator in Azure Table Storage.

The Contains operator is not supported by Azure Table Storage and will cause runtime exceptions.

Consider using a supported filter or implement client-side filtering:

-var results = _tableClient.QueryAsync<MemoryEntry>(m => m.Input.Contains(query));
+var results = _tableClient.QueryAsync<MemoryEntry>();
+List<MemoryEntry> memoryEntries = new();
+await foreach (var entry in results)
+{
+    if (entry.Input.Contains(query, StringComparison.OrdinalIgnoreCase))
+    {
+        memoryEntries.Add(entry);
+    }
+}
📜 Review details

Configuration used: CodeRabbit UI
Review profile: CHILL
Plan: Pro

📥 Commits

Reviewing files that changed from the base of the PR and between 06b8c76 and 6d37040.

📒 Files selected for processing (1)
  • AI.cs (1 hunks)
⏰ Context from checks skipped due to timeout of 90000ms (1)
  • GitHub Check: Summary

AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
AI.cs Show resolved Hide resolved
@coderabbitai coderabbitai bot mentioned this pull request Feb 6, 2025
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

Successfully merging this pull request may close these issues.

1 participant