From 03d4c8afec13eb626fefd28d42cb6a42fc0c94fd Mon Sep 17 00:00:00 2001 From: George Pickett Date: Tue, 14 Jan 2025 00:10:58 -0800 Subject: [PATCH] Refactor index.py into smaller modules --- For more details, open the [Copilot Workspace session](https://copilot-workspace.githubnext.com/grp06/cursor-boost?shareId=XXXX-XXXX-XXXX-XXXX). --- README.md | 10 ++++ command_runner.py | 39 +++++++++++++ config_loader.py | 26 +++++++++ docker_logs.py | 36 ++++++++++++ index.py | 126 ++---------------------------------------- openai_integration.py | 20 +++++++ snapshot_writer.py | 5 ++ 7 files changed, 142 insertions(+), 120 deletions(-) create mode 100644 command_runner.py create mode 100644 config_loader.py create mode 100644 docker_logs.py create mode 100644 openai_integration.py create mode 100644 snapshot_writer.py diff --git a/README.md b/README.md index 7bbb025..49b0705 100644 --- a/README.md +++ b/README.md @@ -106,3 +106,13 @@ Here’s an example of how your project directory might look: - **`.cursorrules`**: Generated at the root of your project directory. - **`snapshot.txt`**: Contains the latest system and project snapshots. +## New Module Structure +The `index.py` file has been refactored into smaller, more focused modules: + +- `config_loader.py`: Handles configuration loading and validation. +- `command_runner.py`: Handles system and project command execution. +- `snapshot_writer.py`: Handles writing snapshots to files. +- `docker_logs.py`: Handles Docker log retrieval. +- `openai_integration.py`: Handles OpenAI API interaction. + +Make sure to update your imports in `index.py` to use these new modules. diff --git a/command_runner.py b/command_runner.py new file mode 100644 index 0000000..3fc86ac --- /dev/null +++ b/command_runner.py @@ -0,0 +1,39 @@ +import subprocess +import os +from datetime import datetime + +def run_commands(commands_list, project_dir=None): + print(f"πŸ“Š Collecting {'project' if project_dir else 'system'} information...") + output = [] + output.append(f"Timestamp: {datetime.utcnow().isoformat()}Z\n") + + if project_dir: + original_dir = os.getcwd() + base_project_path = config.get('base_project_path') + if not base_project_path or base_project_path == "": + raise ValueError( + "❌ Error: 'base_project_path' is not defined or is still set to '' in the configuration file (config.json). " + "Please update it to your project's root directory." + ) + + project_path = os.path.join(os.path.expanduser(base_project_path), project_dir) + try: + os.chdir(project_path) + output.append(f"\n### Project: {project_dir} ###\n") + except FileNotFoundError: + print(f" ❌ Project directory not found: {project_path}") + return "" + + for command in commands_list: + print(f" ⚑ Running: {command}") + try: + result = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT) + output.append(f"{command}:\n{result.decode('utf-8')}\n") + except subprocess.CalledProcessError as e: + print(f" ❌ Failed: {command}") + output.append(f"{command} (FAILED):\n{e.output.decode('utf-8')}\n") + + if project_dir: + os.chdir(original_dir) + + return "\n".join(output) diff --git a/config_loader.py b/config_loader.py new file mode 100644 index 0000000..f738929 --- /dev/null +++ b/config_loader.py @@ -0,0 +1,26 @@ +import os +import json + +def load_config(): + try: + script_dir = os.path.dirname(os.path.abspath(__file__)) + config_file = os.path.join(script_dir, 'config.json') + with open(config_file, 'r') as f: + config = json.load(f) + + if 'tree' not in config or 'system_commands' not in config: + raise KeyError("Missing required keys in config.json") + + return config + except (FileNotFoundError, KeyError): + print("⚠️ config.json not found or invalid, using default settings") + return { + "tree": { + "max_depth": 3, + "ignore_patterns": ["venv", "__pycache__", "node_modules", "build", "public", "dist", ".git"], + "ignore_extensions": ["*.pyc", "*.pyo", "*.pyd", "*.so", "*.dll", "*.class"] + }, + "system_commands": { + "disk_usage_threshold": 80 + } + } diff --git a/docker_logs.py b/docker_logs.py new file mode 100644 index 0000000..b42e511 --- /dev/null +++ b/docker_logs.py @@ -0,0 +1,36 @@ +import subprocess +import os + +def get_docker_logs(): + try: + ignore_containers = config.get('docker', {}).get('ignore_containers', []) + + containers = subprocess.check_output( + "docker ps --format '{{.ID}} {{.Names}}'", + shell=True + ).decode('utf-8').strip().split('\n') + + logs = [] + for container in containers: + if not container: + continue + + container_id, container_name = container.split() + + if container_name in ignore_containers: + continue + + try: + container_logs = subprocess.check_output( + f"docker logs --tail 25 {container_id}", + shell=True + ).decode('utf-8') + + logs.append(f"\nDocker Logs for {container_name} ({container_id}):\n") + logs.append(container_logs) + except subprocess.CalledProcessError as e: + logs.append(f"\nError getting logs for {container_name}: {str(e)}\n") + + return "\n".join(logs) + except subprocess.CalledProcessError as e: + return "Error getting Docker container list" diff --git a/index.py b/index.py index 81d20d9..ef17aa8 100644 --- a/index.py +++ b/index.py @@ -1,36 +1,16 @@ -import subprocess -import datetime from dotenv import load_dotenv import os import json import time from datetime import datetime -load_dotenv() +from config_loader import load_config +from command_runner import run_commands +from snapshot_writer import write_snapshot +from docker_logs import get_docker_logs +from openai_integration import generate_cursorrules -def load_config(): - try: - script_dir = os.path.dirname(os.path.abspath(__file__)) - config_file = os.path.join(script_dir, 'config.json') - with open(config_file, 'r') as f: - config = json.load(f) - - if 'tree' not in config or 'system_commands' not in config: - raise KeyError("Missing required keys in config.json") - - return config - except (FileNotFoundError, KeyError): - print("⚠️ config.json not found or invalid, using default settings") - return { - "tree": { - "max_depth": 3, - "ignore_patterns": ["venv", "__pycache__", "node_modules", "build", "public", "dist", ".git"], - "ignore_extensions": ["*.pyc", "*.pyo", "*.pyd", "*.so", "*.dll", "*.class"] - }, - "system_commands": { - "disk_usage_threshold": 80 - } - } +load_dotenv() config = load_config() @@ -70,100 +50,6 @@ def get_project_directories(): print("❌ Error: containers-list.md not found in .cursorboost directory") return [] -def run_commands(commands_list, project_dir=None): - print(f"πŸ“Š Collecting {'project' if project_dir else 'system'} information...") - output = [] - output.append(f"Timestamp: {datetime.utcnow().isoformat()}Z\n") - - if project_dir: - original_dir = os.getcwd() - base_project_path = config.get('base_project_path') - if not base_project_path or base_project_path == "": - raise ValueError( - "❌ Error: 'base_project_path' is not defined or is still set to '' in the configuration file (config.json). " - "Please update it to your project's root directory." - ) - - project_path = os.path.join(os.path.expanduser(base_project_path), project_dir) - try: - os.chdir(project_path) - output.append(f"\n### Project: {project_dir} ###\n") - except FileNotFoundError: - print(f" ❌ Project directory not found: {project_path}") - return "" - - for command in commands_list: - print(f" ⚑ Running: {command}") - try: - result = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT) - output.append(f"{command}:\n{result.decode('utf-8')}\n") - except subprocess.CalledProcessError as e: - print(f" ❌ Failed: {command}") - output.append(f"{command} (FAILED):\n{e.output.decode('utf-8')}\n") - - if project_dir: - os.chdir(original_dir) - - return "\n".join(output) - -def write_snapshot(snapshot): - with open("snapshot.txt", "w") as f: - f.write(snapshot) - -import openai - -def generate_cursorrules(snapshot): - api_key = os.getenv('OPENAI_API_KEY') - if not api_key: - print("❌ Error: OPENAI_API_KEY not found in environment variables") - return None - - client = openai.OpenAI(api_key=api_key) - - response = client.chat.completions.create( - model="chatgpt-4o-latest", - messages=[ - {"role": "system", "content": "You are a helpful assistant that injects a user's system information and parses out the most important details. Only respond with the complete file contents, without any additional explanation or commentary."}, - {"role": "user", "content": f"You are a coding assistant optimizing text files for LLM-based applications. Using the following system snapshot, generate a text file that highlights the most relevant details for coding context. Only output the complete file contents, without explanations or extra text:\n\n{snapshot}"} - ], - temperature=1, - ) - return response.choices[0].message.content - -def get_docker_logs(): - try: - ignore_containers = config.get('docker', {}).get('ignore_containers', []) - - containers = subprocess.check_output( - "docker ps --format '{{.ID}} {{.Names}}'", - shell=True - ).decode('utf-8').strip().split('\n') - - logs = [] - for container in containers: - if not container: - continue - - container_id, container_name = container.split() - - if container_name in ignore_containers: - continue - - try: - container_logs = subprocess.check_output( - f"docker logs --tail 25 {container_id}", - shell=True - ).decode('utf-8') - - logs.append(f"\nDocker Logs for {container_name} ({container_id}):\n") - logs.append(container_logs) - except subprocess.CalledProcessError as e: - logs.append(f"\nError getting logs for {container_name}: {str(e)}\n") - - return "\n".join(logs) - except subprocess.CalledProcessError as e: - return "Error getting Docker container list" - def write_cursorrules(cursorrules): project_description = "" try: diff --git a/openai_integration.py b/openai_integration.py new file mode 100644 index 0000000..2e8d80c --- /dev/null +++ b/openai_integration.py @@ -0,0 +1,20 @@ +import os +import openai + +def generate_cursorrules(snapshot): + api_key = os.getenv('OPENAI_API_KEY') + if not api_key: + print("❌ Error: OPENAI_API_KEY not found in environment variables") + return None + + client = openai.OpenAI(api_key=api_key) + + response = client.chat.completions.create( + model="chatgpt-4o-latest", + messages=[ + {"role": "system", "content": "You are a helpful assistant that injects a user's system information and parses out the most important details. Only respond with the complete file contents, without any additional explanation or commentary."}, + {"role": "user", "content": f"You are a coding assistant optimizing text files for LLM-based applications. Using the following system snapshot, generate a text file that highlights the most relevant details for coding context. Only output the complete file contents, without explanations or extra text:\n\n{snapshot}"} + ], + temperature=1, + ) + return response.choices[0].message.content diff --git a/snapshot_writer.py b/snapshot_writer.py new file mode 100644 index 0000000..8c8f615 --- /dev/null +++ b/snapshot_writer.py @@ -0,0 +1,5 @@ +import os + +def write_snapshot(snapshot): + with open("snapshot.txt", "w") as f: + f.write(snapshot)