diff --git a/.github/workflows/bundle-desktop-intel.yml b/.github/workflows/bundle-desktop-intel.yml index b6b9d714338a..594f2d311278 100644 --- a/.github/workflows/bundle-desktop-intel.yml +++ b/.github/workflows/bundle-desktop-intel.yml @@ -82,7 +82,8 @@ jobs: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: - ref: ${{ inputs.ref }} + # Only pass ref if it's explicitly set, otherwise let checkout action use its default behavior + ref: ${{ inputs.ref != '' && inputs.ref || '' }} fetch-depth: 0 # Update versions before build diff --git a/.github/workflows/bundle-desktop-linux.yml b/.github/workflows/bundle-desktop-linux.yml index f31f80b4c42b..50ca82af832d 100644 --- a/.github/workflows/bundle-desktop-linux.yml +++ b/.github/workflows/bundle-desktop-linux.yml @@ -28,7 +28,8 @@ jobs: - name: Checkout repository uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 with: - ref: ${{ inputs.ref }} + # Only pass ref if it's explicitly set, otherwise let checkout action use its default behavior + ref: ${{ inputs.ref != '' && inputs.ref || '' }} fetch-depth: 0 # 2) Update versions before build @@ -264,4 +265,4 @@ jobs: path: | ui/desktop/out/make/deb/x64/*.deb ui/desktop/out/make/rpm/x64/*.rpm - if-no-files-found: error \ No newline at end of file + if-no-files-found: error diff --git a/.github/workflows/bundle-desktop-windows.yml b/.github/workflows/bundle-desktop-windows.yml index 37b2cbbf98eb..3ebcdb5b1123 100644 --- a/.github/workflows/bundle-desktop-windows.yml +++ b/.github/workflows/bundle-desktop-windows.yml @@ -17,10 +17,10 @@ on: type: boolean default: false ref: - description: 'Git ref to checkout' + description: 'Git ref to checkout (branch, tag, or SHA). Defaults to main branch if not specified.' required: false type: string - default: 'refs/heads/main' + default: '' secrets: WINDOWS_CODESIGN_CERTIFICATE: required: false @@ -41,11 +41,34 @@ jobs: runs-on: ubuntu-latest # Use Ubuntu for cross-compilation steps: + # Debug information about the workflow and inputs + - name: Debug workflow info + env: + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_REF: ${{ github.ref }} + EVENT_NAME: ${{ github.event_name }} + REPOSITORY: ${{ github.repository }} + INPUT_REF: ${{ inputs.ref }} + INPUT_VERSION: ${{ inputs.version }} + INPUT_SIGNING: ${{ inputs.signing }} + run: | + echo "=== Workflow Information ===" + echo "Workflow: ${WORKFLOW_NAME}" + echo "Ref: ${WORKFLOW_REF}" + echo "Event: ${EVENT_NAME}" + echo "Repo: ${REPOSITORY}" + echo "" + echo "=== Input Parameters ===" + echo "Build ref: ${INPUT_REF:-}" + echo "Version: ${INPUT_VERSION:-not set}" + echo "Signing: ${INPUT_SIGNING:-false}" + # 1) Check out source - name: Checkout repository uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 with: - ref: ${{ inputs.ref }} + # Only pass ref if it's explicitly set, otherwise let checkout action use its default behavior + ref: ${{ inputs.ref != '' && inputs.ref || '' }} fetch-depth: 0 # 2) Configure AWS credentials for code signing @@ -53,7 +76,7 @@ jobs: if: inputs.signing && inputs.signing == true uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # ratchet:aws-actions/configure-aws-credentials@v4 with: - role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.WINDOW_SIGNING_ROLE || secrets.WINDOW_SIGNING_ROLE_TAG }} + role-to-assume: ${{ (inputs.ref == '' || inputs.ref == 'refs/heads/main') && secrets.WINDOW_SIGNING_ROLE || secrets.WINDOW_SIGNING_ROLE_TAG }} aws-region: us-west-2 # 2) Set up Node.js @@ -99,10 +122,10 @@ jobs: - name: Build Windows executable using Docker cross-compilation with enhanced caching run: | echo "🚀 Building Windows executable with enhanced GitHub Actions caching..." - + # Create cache directories mkdir -p ~/.cargo/registry ~/.cargo/git - + # Use enhanced caching with GitHub Actions cache mounts docker run --rm \ -v "$(pwd)":/usr/src/myapp \ @@ -115,7 +138,7 @@ jobs: echo '=== Setting up Rust environment with caching ===' export CARGO_HOME=/usr/local/cargo export PATH=/usr/local/cargo/bin:\$PATH - + # Check if Windows target is already installed in cache if rustup target list --installed | grep -q x86_64-pc-windows-gnu; then echo '✅ Windows cross-compilation target already installed' @@ -123,11 +146,11 @@ jobs: echo '📦 Installing Windows cross-compilation target...' rustup target add x86_64-pc-windows-gnu fi - + echo '=== Setting up build dependencies ===' apt-get update apt-get install -y mingw-w64 protobuf-compiler cmake time - + echo '=== Setting up cross-compilation environment ===' export CC_x86_64_pc_windows_gnu=x86_64-w64-mingw32-gcc export CXX_x86_64_pc_windows_gnu=x86_64-w64-mingw32-g++ @@ -135,7 +158,7 @@ jobs: export CARGO_TARGET_X86_64_PC_WINDOWS_GNU_LINKER=x86_64-w64-mingw32-gcc export PKG_CONFIG_ALLOW_CROSS=1 export PROTOC=/usr/bin/protoc - + echo '=== Optimized Cargo configuration ===' mkdir -p .cargo echo '[build]' > .cargo/config.toml @@ -157,7 +180,7 @@ jobs: echo '' >> .cargo/config.toml echo '[registries.crates-io]' >> .cargo/config.toml echo 'protocol = \"sparse\"' >> .cargo/config.toml - + echo '=== Building with cached dependencies ===' # Check if we have cached build artifacts if [ -d target/x86_64-pc-windows-gnu/release/deps ] && [ \"\$(ls -A target/x86_64-pc-windows-gnu/release/deps)\" ]; then @@ -167,31 +190,31 @@ jobs: echo '🔨 No cached artifacts found, performing full build...' CARGO_INCREMENTAL=0 fi - + echo '🔨 Building Windows executable...' CARGO_INCREMENTAL=\$CARGO_INCREMENTAL \ CARGO_NET_RETRY=3 \ CARGO_HTTP_TIMEOUT=60 \ RUST_BACKTRACE=1 \ cargo build --release --target x86_64-pc-windows-gnu --jobs 4 - + echo '=== Copying Windows runtime DLLs ===' GCC_DIR=\$(ls -d /usr/lib/gcc/x86_64-w64-mingw32/*/ | head -n 1) cp \"\$GCC_DIR/libstdc++-6.dll\" target/x86_64-pc-windows-gnu/release/ cp \"\$GCC_DIR/libgcc_s_seh-1.dll\" target/x86_64-pc-windows-gnu/release/ cp /usr/x86_64-w64-mingw32/lib/libwinpthread-1.dll target/x86_64-pc-windows-gnu/release/ - + echo '✅ Build completed successfully!' ls -la target/x86_64-pc-windows-gnu/release/ " - + # Verify build succeeded if [ ! -f "./target/x86_64-pc-windows-gnu/release/goosed.exe" ]; then echo "❌ Windows binary not found." ls -la ./target/x86_64-pc-windows-gnu/release/ || echo "Release directory doesn't exist" exit 1 fi - + echo "✅ Windows binary found!" ls -la ./target/x86_64-pc-windows-gnu/release/goosed.exe ls -la ./target/x86_64-pc-windows-gnu/release/*.dll @@ -229,31 +252,31 @@ jobs: echo "Windows binary not found." exit 1 fi - + if [ ! -f "./temporal-service/temporal-service.exe" ]; then echo "temporal-service.exe not found." exit 1 fi - + if [ ! -f "./temporal.exe" ]; then echo "temporal.exe not found." exit 1 fi - + echo "Cleaning destination directory..." rm -rf ./ui/desktop/src/bin mkdir -p ./ui/desktop/src/bin - + echo "Copying Windows binary and DLLs..." cp -f ./target/x86_64-pc-windows-gnu/release/goosed.exe ./ui/desktop/src/bin/ cp -f ./target/x86_64-pc-windows-gnu/release/*.dll ./ui/desktop/src/bin/ - + echo "Copying temporal-service.exe..." cp -f ./temporal-service/temporal-service.exe ./ui/desktop/src/bin/ - + echo "Copying temporal.exe..." cp -f ./temporal.exe ./ui/desktop/src/bin/ - + # Copy Windows platform files (tools, scripts, etc.) if [ -d "./ui/desktop/src/platform/windows/bin" ]; then echo "Copying Windows platform files..." @@ -262,7 +285,7 @@ jobs: cp -f "$file" ./ui/desktop/src/bin/ fi done - + if [ -d "./ui/desktop/src/platform/windows/bin/goose-npm" ]; then echo "Setting up npm environment..." rsync -a --delete ./ui/desktop/src/platform/windows/bin/goose-npm/ ./ui/desktop/src/bin/goose-npm/ @@ -274,18 +297,18 @@ jobs: - name: Build desktop UI with npm run: | cd ui/desktop - + # Fix for rollup native module issue (npm optional dependencies bug) echo "🔧 Fixing npm optional dependencies issue..." rm -rf node_modules package-lock.json npm install - + # Verify rollup native module is installed if [ ! -d "node_modules/@rollup/rollup-linux-x64-gnu" ]; then echo "⚠️ Rollup native module missing, installing manually..." npm install @rollup/rollup-linux-x64-gnu --save-optional fi - + npm run bundle:windows # 7) Copy exe/dll to final out folder and prepare flat distribution @@ -294,11 +317,11 @@ jobs: cd ui/desktop mkdir -p ./out/Goose-win32-x64/resources/bin rsync -av src/bin/ out/Goose-win32-x64/resources/bin/ - + # Create flat distribution structure mkdir -p ./dist-windows cp -r ./out/Goose-win32-x64/* ./dist-windows/ - + # Verify the final structure echo "📋 Final flat distribution structure:" ls -la ./dist-windows/ @@ -311,25 +334,25 @@ jobs: run: | set -exuo pipefail echo "🔐 Starting Windows code signing with jsign + AWS KMS..." - + # Create certificate file from secret echo "📝 Creating certificate file from GitHub secret..." echo "${{ secrets.WINDOWS_CODESIGN_CERTIFICATE }}" > block-codesign-cert.pem - + # Install Java (required for jsign) echo "☕ Installing Java runtime..." sudo apt-get update sudo apt-get install -y openjdk-11-jre-headless osslsigncode - + # Download jsign echo "📥 Downloading jsign..." wget -q https://github.com/ebourg/jsign/releases/download/6.0/jsign-6.0.jar -O jsign.jar echo "05ca18d4ab7b8c2183289b5378d32860f0ea0f3bdab1f1b8cae5894fb225fa8a jsign.jar" | sha256sum -c - + # Sign the main Electron executable (Goose.exe) echo "🔐 Signing main Electron executable: Goose.exe" cd ui/desktop/dist-windows/ - + java -jar ${GITHUB_WORKSPACE}/jsign.jar \ --storetype AWS \ --keystore us-west-2 \ @@ -343,11 +366,11 @@ jobs: osslsigncode verify Goose.exe echo "✅ Main executable Goose.exe signed successfully" - + # Sign the backend executable (goosed.exe) echo "🔐 Signing backend executable: goosed.exe" cd resources/bin/ - + java -jar ${GITHUB_WORKSPACE}/jsign.jar \ --storetype AWS \ --keystore us-west-2 \ @@ -369,7 +392,7 @@ jobs: sha256sum Goose.exe ls -la resources/bin/goosed.exe sha256sum resources/bin/goosed.exe - + # Clean up certificate file rm -f ${GITHUB_WORKSPACE}/block-codesign-cert.pem @@ -382,7 +405,7 @@ jobs: ls -la ui/desktop/dist-windows/Goose.exe osslsigncode verify ui/desktop/dist-windows/Goose.exe echo "✅ Main executable signature verification passed" - + echo "Backend executable:" ls -la ui/desktop/dist-windows/resources/bin/goosed.exe osslsigncode verify ui/desktop/dist-windows/resources/bin/goosed.exe @@ -393,13 +416,13 @@ jobs: run: | cd ui/desktop echo "📦 Creating Windows zip package..." - + # Create a zip file from the dist-windows directory zip -r "Goose-win32-x64.zip" dist-windows/ - + echo "✅ Windows zip package created:" ls -la Goose-win32-x64.zip - + # Also create the zip in the expected output structure for consistency mkdir -p out/Goose-win32-x64/ cp Goose-win32-x64.zip out/Goose-win32-x64/ diff --git a/Cargo.lock b/Cargo.lock index 9541e0b7dbf9..755607202056 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3650,6 +3650,7 @@ dependencies = [ "serde_with", "serial_test", "shellexpand", + "similar", "sysinfo 0.32.1", "tempfile", "thiserror 1.0.69", @@ -7745,6 +7746,12 @@ dependencies = [ "quote", ] +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + [[package]] name = "simple_asn1" version = "0.6.3" diff --git a/crates/goose-mcp/Cargo.toml b/crates/goose-mcp/Cargo.toml index b6b77d0182ac..1305f73d517b 100644 --- a/crates/goose-mcp/Cargo.toml +++ b/crates/goose-mcp/Cargo.toml @@ -35,6 +35,7 @@ reqwest = { version = "0.11", features = [ ], default-features = false } async-trait = "0.1" chrono = { version = "0.4.38", features = ["serde"] } +similar = { version = "2.4", features = ["inline"] } etcetera = "0.8.0" tempfile = "3.8" include_dir = "0.7.4" diff --git a/crates/goose-mcp/src/developer/mod.rs b/crates/goose-mcp/src/developer/mod.rs index 882dd1d81e23..7a43a54effe6 100644 --- a/crates/goose-mcp/src/developer/mod.rs +++ b/crates/goose-mcp/src/developer/mod.rs @@ -4,6 +4,7 @@ mod shell; use anyhow::Result; use base64::Engine; +use chrono::Utc; use etcetera::{choose_app_strategy, AppStrategy}; use indoc::formatdoc; use serde_json::{json, Value}; @@ -13,6 +14,7 @@ use std::{ io::Cursor, path::{Path, PathBuf}, pin::Pin, + sync::{Arc, Mutex}, }; use tokio::{ io::{AsyncBufReadExt, BufReader}, @@ -42,7 +44,6 @@ use self::editor_models::{create_editor_model, EditorModel}; use self::shell::{expand_path, get_shell_config, is_absolute_path, normalize_line_endings}; use indoc::indoc; use std::process::Stdio; -use std::sync::{Arc, Mutex}; use xcap::{Monitor, Window}; use ignore::gitignore::{Gitignore, GitignoreBuilder}; @@ -100,6 +101,8 @@ pub struct DeveloperRouter { file_history: Arc>>>, ignore_patterns: Arc, editor_model: Option, + checkpoint_dir: PathBuf, + checkpoint_index: Arc>>>, } impl Default for DeveloperRouter { @@ -109,6 +112,14 @@ impl Default for DeveloperRouter { } impl DeveloperRouter { + // Helper function to get the checkpoints directory + fn get_checkpoints_dir() -> PathBuf { + choose_app_strategy(crate::APP_STRATEGY.clone()) + .expect("goose requires a home dir") + .data_dir() + .join("checkpoints") + } + pub fn new() -> Self { // TODO consider rust native search tools, we could use // https://docs.rs/ignore/latest/ignore/ @@ -344,6 +355,54 @@ impl DeveloperRouter { }), ); + let list_checkpoints_tool = Tool::new( + "list_checkpoints", + "Return checkpoints for a file", + json!({ + "type": "object", + "required": ["path"], + "properties": { + "path": { + "type": "string", + "description": "Absolute path to the file to list checkpoints for" + } + } + }), + Some(ToolAnnotations { + title: Some("List file checkpoints".to_string()), + read_only_hint: true, + destructive_hint: false, + idempotent_hint: true, + open_world_hint: false, + }), + ); + + let restore_checkpoint_tool = Tool::new( + "restore_checkpoint", + "Restore a file from a specific checkpoint", + json!({ + "type": "object", + "required": ["path", "checkpoint_path"], + "properties": { + "path": { + "type": "string", + "description": "Absolute path to the file to restore" + }, + "checkpoint_path": { + "type": "string", + "description": "Path to the checkpoint file to restore from" + } + } + }), + Some(ToolAnnotations { + title: Some("Restore from checkpoint".to_string()), + read_only_hint: false, + destructive_hint: true, + idempotent_hint: false, + open_world_hint: false, + }), + ); + // Get base instructions and working directory let cwd = std::env::current_dir().expect("should have a current working dir"); let os = std::env::consts::OS; @@ -480,6 +539,9 @@ impl DeveloperRouter { let ignore_patterns = builder.build().expect("Failed to build ignore patterns"); + let chk = Self::get_checkpoints_dir(); + std::fs::create_dir_all(&chk).ok(); + Self { tools: vec![ bash_tool, @@ -487,12 +549,16 @@ impl DeveloperRouter { list_windows_tool, screen_capture_tool, image_processor_tool, + list_checkpoints_tool, + restore_checkpoint_tool, ], prompts: Arc::new(load_prompt_files()), instructions, file_history: Arc::new(Mutex::new(HashMap::new())), ignore_patterns: Arc::new(ignore_patterns), editor_model, + checkpoint_dir: chk, + checkpoint_index: Arc::new(Mutex::new(HashMap::new())), } } @@ -519,6 +585,308 @@ impl DeveloperRouter { } } + // Helper function to create a checkpoint of a file + fn create_checkpoint(&self, file: &Path) -> Result<(PathBuf, String), ToolError> { + if !file.exists() { + return Err(ToolError::NotFound(format!( + "{} does not exist", + file.display() + ))); + } + let ts = Utc::now().format("%Y%m%dT%H%M%S%3f").to_string(); + let rel = file + .strip_prefix(std::env::current_dir().unwrap()) + .unwrap_or(file); + let dest = self.checkpoint_dir.join(&ts).join(rel); + if let Some(p) = dest.parent() { + std::fs::create_dir_all(p).map_err(|e| { + ToolError::ExecutionError(format!("Failed to create checkpoint directory: {}", e)) + })?; + } + std::fs::copy(file, &dest).map_err(|e| { + ToolError::ExecutionError(format!("Failed to copy file to checkpoint: {}", e)) + })?; + self.checkpoint_index + .lock() + .unwrap() + .entry(file.to_path_buf()) + .or_default() + .push(dest.clone()); + Ok((dest, ts)) + } + + // Helper function to generate diff string with intelligent hunk splitting + fn diff_string(&self, old_txt: &str, new_txt: &str, file_path: &Path) -> String { + use similar::{ChangeTag, TextDiff}; + + let diff = TextDiff::from_lines(old_txt, new_txt); + let mut result = String::new(); + + // Use relative path from current directory if possible, otherwise use full path + let cwd = std::env::current_dir().unwrap_or_default(); + let display_path = file_path + .strip_prefix(&cwd) + .unwrap_or(file_path) + .to_string_lossy(); + + result.push_str(&format!("--- a/{}\n", display_path)); + result.push_str(&format!("+++ b/{}\n", display_path)); + + // Use intelligent hunk grouping based on file type and content + let groups = self.smart_group_ops(&diff, file_path); + + for (idx, group) in groups.iter().enumerate() { + if idx > 0 { + result.push('\n'); // Separate hunks with newline + } + + // Calculate hunk header + let first = group[0]; + let last = group[group.len() - 1]; + let old_range = (last.old_range().end - first.old_range().start) as i32; + let new_range = (last.new_range().end - first.new_range().start) as i32; + + result.push_str(&format!( + "@@ -{},{} +{},{} @@\n", + first.old_range().start + 1, + old_range, + first.new_range().start + 1, + new_range + )); + + for op in group { + for change in diff.iter_changes(op) { + match change.tag() { + ChangeTag::Delete => { + result.push_str(&format!("-{}", change.value())); + if !change.value().ends_with('\n') { + result.push('\n'); + } + } + ChangeTag::Insert => { + result.push_str(&format!("+{}", change.value())); + if !change.value().ends_with('\n') { + result.push('\n'); + } + } + ChangeTag::Equal => { + result.push_str(&format!(" {}", change.value())); + if !change.value().ends_with('\n') { + result.push('\n'); + } + } + } + } + } + } + + result + } + + // Smart hunk grouping that considers file type and content structure + fn smart_group_ops( + &self, + diff: &similar::TextDiff, + file_path: &Path, + ) -> Vec> { + use similar::DiffOp; + + // Get file extension to determine context strategy + let extension = file_path.extension().and_then(|s| s.to_str()).unwrap_or(""); + + // Determine context size based on file type + let context_lines = match extension { + // Code files - use more context to capture function boundaries + "rs" | "py" | "js" | "ts" | "tsx" | "jsx" | "java" | "cpp" | "c" | "h" => 5, + // Markup files - moderate context for section boundaries + "md" | "html" | "xml" => 4, + // Config files - less context needed + "json" | "yaml" | "yml" | "toml" | "ini" => 3, + // Default for other files + _ => 3, + }; + + let ops = diff.ops(); + if ops.is_empty() { + return vec![]; + } + + // First, try the standard grouping with increased context + let standard_groups: Vec> = diff.grouped_ops(context_lines); + + // If we only have one group and it's very large, try to split it intelligently + if standard_groups.len() == 1 && self.should_split_large_group(&standard_groups[0]) { + self.split_large_group_intelligently(&standard_groups[0]) + } else { + standard_groups + } + } + + // Check if a group is too large and should be split + fn should_split_large_group(&self, group: &[similar::DiffOp]) -> bool { + // Count total lines in the group + let total_lines: usize = group + .iter() + .map(|op| { + (op.old_range().end - op.old_range().start) + .max(op.new_range().end - op.new_range().start) + }) + .sum(); + + // Split if more than 50 lines or more than 20 changed lines + let changed_lines: usize = group + .iter() + .filter_map(|op| match op.tag() { + similar::DiffTag::Delete | similar::DiffTag::Insert => Some( + (op.old_range().end - op.old_range().start) + .max(op.new_range().end - op.new_range().start), + ), + _ => None, + }) + .sum(); + + total_lines > 50 || changed_lines > 20 + } + + // Split a large group into smaller logical chunks + fn split_large_group_intelligently( + &self, + group: &[similar::DiffOp], + ) -> Vec> { + // Implement a simple splitting strategy based on line count + let mut result = Vec::new(); + let mut current_group = Vec::new(); + let mut lines_in_group = 0; + const MAX_LINES_PER_GROUP: usize = 30; + + for op in group { + let op_lines = (op.old_range().end - op.old_range().start) + .max(op.new_range().end - op.new_range().start); + + // If adding this op would make the group too large, start a new group + if lines_in_group > 0 + && lines_in_group + op_lines > MAX_LINES_PER_GROUP + && !current_group.is_empty() + { + result.push(current_group.clone()); + current_group.clear(); + lines_in_group = 0; + } + + current_group.push(*op); + lines_in_group += op_lines; + } + + if !current_group.is_empty() { + result.push(current_group); + } + + // If we couldn't split it effectively, return the original group + if result.len() <= 1 { + vec![group.to_vec()] + } else { + result + } + } + + // List checkpoints for a file + async fn list_checkpoints(&self, params: Value) -> Result, ToolError> { + let path_str = params + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::InvalidParameters("Missing 'path' parameter".into()))?; + + let file = self.resolve_path(path_str)?; + let list = self + .checkpoint_index + .lock() + .unwrap() + .get(&file) + .cloned() + .unwrap_or_default(); + + Ok(vec![ + Content::text(format!( + "Found {} checkpoints for {}", + list.len(), + file.display() + )) + .with_audience(vec![Role::Assistant]), + Content::text(serde_json::to_string_pretty(&json!({ "checkpoints": list })).unwrap()) + .with_audience(vec![Role::User]) + .with_priority(0.0), + ]) + } + + // Restore a file from a checkpoint + async fn restore_checkpoint(&self, params: Value) -> Result, ToolError> { + let path_str = params + .get("path") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::InvalidParameters("Missing 'path' parameter".into()))?; + + let checkpoint_path_str = params + .get("checkpoint_path") + .and_then(|v| v.as_str()) + .ok_or_else(|| { + ToolError::InvalidParameters("Missing 'checkpoint_path' parameter".into()) + })?; + + let file = self.resolve_path(path_str)?; + let checkpoint_path = Path::new(checkpoint_path_str); + + // Check if checkpoint exists + if !checkpoint_path.exists() { + return Err(ToolError::NotFound(format!( + "Checkpoint {} does not exist", + checkpoint_path.display() + ))); + } + + // Read checkpoint content + let checkpoint_content = std::fs::read_to_string(checkpoint_path) + .map_err(|e| ToolError::ExecutionError(format!("Failed to read checkpoint: {}", e)))?; + + // Create new checkpoint of current state before restore + let (new_ckpt_path, ts) = self.create_checkpoint(&file)?; + + // Read current content for diff + let current_content = std::fs::read_to_string(&file).unwrap_or_default(); + + // Restore from checkpoint + std::fs::write(&file, &checkpoint_content) + .map_err(|e| ToolError::ExecutionError(format!("Failed to restore file: {}", e)))?; + + // Generate diff and create payload + let diff = self.diff_string(¤t_content, &checkpoint_content, &file); + let payload = json!({ + "file": file, + "checkpoint": new_ckpt_path, + "timestamp": ts, + "diff": diff, + "action": "restore" + }); + + Ok(vec![ + Content::text(format!( + "Restored {} from checkpoint {}", + file.display(), + checkpoint_path.display() + )) + .with_audience(vec![Role::Assistant]), + Content::text(format!( + "File {} has been restored from checkpoint", + file.display() + )) + .with_audience(vec![Role::User]), + Content::embedded_text( + "goose://checkpoint", + serde_json::to_string(&payload).unwrap(), + ) + .with_audience(vec![Role::Assistant]), + ]) + } + // Shell command execution with platform-specific handling async fn bash( &self, @@ -903,6 +1271,13 @@ impl DeveloperRouter { normalized_text.push('\n'); } + // Create checkpoint if file exists, before overwriting + let (ckpt_path, ts) = if path.exists() { + self.create_checkpoint(path)? + } else { + (PathBuf::new(), String::new()) // new file => no checkpoint + }; + // Write to the file std::fs::write(path, &normalized_text) // Write the potentially modified text .map_err(|e| ToolError::ExecutionError(format!("Failed to write file: {}", e)))?; @@ -910,6 +1285,23 @@ impl DeveloperRouter { // Try to detect the language from the file extension let language = lang::get_language_identifier(path); + // Generate diff (empty old content for new files) + let old_content = if ckpt_path.as_os_str().is_empty() { + "" + } else { + &std::fs::read_to_string(&ckpt_path).map_err(|e| { + ToolError::ExecutionError(format!("Failed to read checkpoint: {}", e)) + })? + }; + let diff = self.diff_string(old_content, &normalized_text, path); + + let payload = json!({ + "file": path, + "checkpoint": ckpt_path, // may be empty for brand-new file + "timestamp": ts, + "diff": diff + }); + // The assistant output does not show the file again because the content is already in the tool request // but we do show it to the user here, using the final written content Ok(vec![ @@ -928,6 +1320,11 @@ impl DeveloperRouter { }) .with_audience(vec![Role::User]) .with_priority(0.2), + Content::embedded_text( + "goose://checkpoint", + serde_json::to_string(&payload).unwrap(), + ) + .with_audience(vec![Role::Assistant]), ]) } @@ -951,8 +1348,9 @@ impl DeveloperRouter { // Check if Editor API is configured and use it as the primary path if let Some(ref editor) = self.editor_model { - // Editor API path - save history then call API directly + // Editor API path - save history and create checkpoint then call API directly self.save_file_history(path)?; + let (ckpt_path, ts) = self.create_checkpoint(path)?; match editor.edit_code(&content, old_str, new_str).await { Ok(updated_content) => { @@ -962,6 +1360,15 @@ impl DeveloperRouter { ToolError::ExecutionError(format!("Failed to write file: {}", e)) })?; + // Generate diff and create checkpoint payload + let diff = self.diff_string(&content, &normalized_content, path); + let payload = json!({ + "file": path, + "checkpoint": ckpt_path, + "timestamp": ts, + "diff": diff + }); + // Simple success message for Editor API return Ok(vec![ Content::text(format!("Successfully edited {}", path.display())) @@ -969,6 +1376,11 @@ impl DeveloperRouter { Content::text(format!("File {} has been edited", path.display())) .with_audience(vec![Role::User]) .with_priority(0.2), + Content::embedded_text( + "goose://checkpoint", + serde_json::to_string(&payload).unwrap(), + ) + .with_audience(vec![Role::Assistant]), ]); } Err(e) => { @@ -995,8 +1407,9 @@ impl DeveloperRouter { )); } - // Save history for undo (original behavior - after validation) + // Save history and create checkpoint for undo (original behavior - after validation) self.save_file_history(path)?; + let (ckpt_path, ts) = self.create_checkpoint(path)?; let new_content = content.replace(old_str, new_str); let normalized_content = normalize_line_endings(&new_content); @@ -1049,11 +1462,25 @@ impl DeveloperRouter { output }; + // Generate diff and create checkpoint payload + let diff = self.diff_string(&content, &normalized_content, path); + let payload = json!({ + "file": path, + "checkpoint": ckpt_path, + "timestamp": ts, + "diff": diff + }); + Ok(vec![ Content::text(success_message).with_audience(vec![Role::Assistant]), Content::text(output) .with_audience(vec![Role::User]) .with_priority(0.2), + Content::embedded_text( + "goose://checkpoint", + serde_json::to_string(&payload).unwrap(), + ) + .with_audience(vec![Role::Assistant]), ]) } @@ -1442,6 +1869,8 @@ impl Router for DeveloperRouter { "list_windows" => this.list_windows(arguments).await, "screen_capture" => this.screen_capture(arguments).await, "image_processor" => this.image_processor(arguments).await, + "list_checkpoints" => this.list_checkpoints(arguments).await, + "restore_checkpoint" => this.restore_checkpoint(arguments).await, _ => Err(ToolError::NotFound(format!("Tool {} not found", tool_name))), } }) @@ -1500,6 +1929,8 @@ impl Clone for DeveloperRouter { file_history: Arc::clone(&self.file_history), ignore_patterns: Arc::clone(&self.ignore_patterns), editor_model: create_editor_model(), // Recreate the editor model since it's not Clone + checkpoint_dir: self.checkpoint_dir.clone(), + checkpoint_index: Arc::clone(&self.checkpoint_index), } } } @@ -1930,6 +2361,8 @@ mod tests { file_history: Arc::new(Mutex::new(HashMap::new())), ignore_patterns: Arc::new(ignore_patterns), editor_model: None, + checkpoint_dir: DeveloperRouter::get_checkpoints_dir(), + checkpoint_index: Arc::new(Mutex::new(HashMap::new())), }; // Test basic file matching @@ -1981,6 +2414,8 @@ mod tests { file_history: Arc::new(Mutex::new(HashMap::new())), ignore_patterns: Arc::new(ignore_patterns), editor_model: None, + checkpoint_dir: DeveloperRouter::get_checkpoints_dir(), + checkpoint_index: Arc::new(Mutex::new(HashMap::new())), }; // Try to write to an ignored file @@ -2041,6 +2476,8 @@ mod tests { file_history: Arc::new(Mutex::new(HashMap::new())), ignore_patterns: Arc::new(ignore_patterns), editor_model: None, + checkpoint_dir: DeveloperRouter::get_checkpoints_dir(), + checkpoint_index: Arc::new(Mutex::new(HashMap::new())), }; // Create an ignored file diff --git a/crates/goose-server/src/routes/mod.rs b/crates/goose-server/src/routes/mod.rs index c5e662ec2c16..b757baa0306d 100644 --- a/crates/goose-server/src/routes/mod.rs +++ b/crates/goose-server/src/routes/mod.rs @@ -5,6 +5,7 @@ pub mod config_management; pub mod context; pub mod extension; pub mod health; +pub mod project; pub mod recipe; pub mod reply; pub mod schedule; @@ -27,4 +28,5 @@ pub fn configure(state: Arc) -> Router { .merge(recipe::routes(state.clone())) .merge(session::routes(state.clone())) .merge(schedule::routes(state.clone())) + .merge(project::routes(state.clone())) } diff --git a/crates/goose-server/src/routes/project.rs b/crates/goose-server/src/routes/project.rs new file mode 100644 index 000000000000..a83c1a0101e4 --- /dev/null +++ b/crates/goose-server/src/routes/project.rs @@ -0,0 +1,358 @@ +use super::utils::verify_secret_key; +use std::sync::Arc; + +use crate::state::AppState; +use axum::{ + extract::{Path, State}, + http::{HeaderMap, StatusCode}, + routing::{delete, get, post, put}, + Json, Router, +}; +use goose::project::{Project, ProjectMetadata}; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; + +#[derive(Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct CreateProjectRequest { + /// Display name of the project + pub name: String, + /// Optional description of the project + pub description: Option, + /// Default working directory for sessions in this project + #[schema(value_type = String)] + pub default_directory: std::path::PathBuf, +} + +#[derive(Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct UpdateProjectRequest { + /// Display name of the project + pub name: Option, + /// Optional description of the project + pub description: Option>, + /// Default working directory for sessions in this project + #[schema(value_type = String)] + pub default_directory: Option, +} + +#[derive(Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProjectListResponse { + /// List of available project metadata objects + pub projects: Vec, +} + +#[derive(Serialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProjectResponse { + /// Project details + pub project: Project, +} + +#[utoipa::path( + get, + path = "/projects", + responses( + (status = 200, description = "List of available projects retrieved successfully", body = ProjectListResponse), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Project Management" +)] +// List all available projects +async fn list_projects( + State(state): State>, + headers: HeaderMap, +) -> Result, StatusCode> { + verify_secret_key(&headers, &state)?; + + let projects = + goose::project::list_projects().map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + Ok(Json(ProjectListResponse { projects })) +} + +#[utoipa::path( + get, + path = "/projects/{project_id}", + params( + ("project_id" = String, Path, description = "Unique identifier for the project") + ), + responses( + (status = 200, description = "Project details retrieved successfully", body = ProjectResponse), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 404, description = "Project not found"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Project Management" +)] +// Get a specific project details +async fn get_project_details( + State(state): State>, + headers: HeaderMap, + Path(project_id): Path, +) -> Result, StatusCode> { + verify_secret_key(&headers, &state)?; + + let project = goose::project::get_project(&project_id).map_err(|e| { + if e.to_string().contains("not found") { + StatusCode::NOT_FOUND + } else { + StatusCode::INTERNAL_SERVER_ERROR + } + })?; + + Ok(Json(ProjectResponse { project })) +} + +#[utoipa::path( + post, + path = "/projects", + request_body = CreateProjectRequest, + responses( + (status = 201, description = "Project created successfully", body = ProjectResponse), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 400, description = "Invalid request - Bad input parameters"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Project Management" +)] +// Create a new project +async fn create_project( + State(state): State>, + headers: HeaderMap, + Json(create_req): Json, +) -> Result, StatusCode> { + verify_secret_key(&headers, &state)?; + + // Validate input + if create_req.name.trim().is_empty() { + return Err(StatusCode::BAD_REQUEST); + } + + let project = goose::project::create_project( + create_req.name, + create_req.description, + create_req.default_directory, + ) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + Ok(Json(ProjectResponse { project })) +} + +#[utoipa::path( + put, + path = "/projects/{project_id}", + params( + ("project_id" = String, Path, description = "Unique identifier for the project") + ), + request_body = UpdateProjectRequest, + responses( + (status = 200, description = "Project updated successfully", body = ProjectResponse), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 404, description = "Project not found"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Project Management" +)] +// Update a project +async fn update_project( + State(state): State>, + headers: HeaderMap, + Path(project_id): Path, + Json(update_req): Json, +) -> Result, StatusCode> { + verify_secret_key(&headers, &state)?; + + let project = goose::project::update_project( + &project_id, + update_req.name, + update_req.description, + update_req.default_directory, + ) + .map_err(|e| { + if e.to_string().contains("not found") { + StatusCode::NOT_FOUND + } else { + StatusCode::INTERNAL_SERVER_ERROR + } + })?; + + Ok(Json(ProjectResponse { project })) +} + +#[utoipa::path( + delete, + path = "/projects/{project_id}", + params( + ("project_id" = String, Path, description = "Unique identifier for the project") + ), + responses( + (status = 204, description = "Project deleted successfully"), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 404, description = "Project not found"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Project Management" +)] +// Delete a project +async fn delete_project( + State(state): State>, + headers: HeaderMap, + Path(project_id): Path, +) -> Result { + verify_secret_key(&headers, &state)?; + + goose::project::delete_project(&project_id).map_err(|e| { + if e.to_string().contains("not found") { + StatusCode::NOT_FOUND + } else { + StatusCode::INTERNAL_SERVER_ERROR + } + })?; + + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + post, + path = "/projects/{project_id}/sessions/{session_id}", + params( + ("project_id" = String, Path, description = "Unique identifier for the project"), + ("session_id" = String, Path, description = "Unique identifier for the session to add") + ), + responses( + (status = 204, description = "Session added to project successfully"), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 404, description = "Project or session not found"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Project Management" +)] +// Add session to project +async fn add_session_to_project( + State(state): State>, + headers: HeaderMap, + Path((project_id, session_id)): Path<(String, String)>, +) -> Result { + verify_secret_key(&headers, &state)?; + + // Add the session to project + goose::project::add_session_to_project(&project_id, &session_id).map_err(|e| { + if e.to_string().contains("not found") { + StatusCode::NOT_FOUND + } else { + StatusCode::INTERNAL_SERVER_ERROR + } + })?; + + // Also update session metadata to include the project_id + let session_path = + goose::session::get_path(goose::session::Identifier::Name(session_id.clone())) + .map_err(|_| StatusCode::NOT_FOUND)?; + let mut metadata = + goose::session::read_metadata(&session_path).map_err(|_| StatusCode::NOT_FOUND)?; + metadata.project_id = Some(project_id); + + tokio::task::spawn(async move { + if let Err(e) = goose::session::update_metadata(&session_path, &metadata).await { + tracing::error!("Failed to update session metadata: {}", e); + } + }); + + Ok(StatusCode::NO_CONTENT) +} + +#[utoipa::path( + delete, + path = "/projects/{project_id}/sessions/{session_id}", + params( + ("project_id" = String, Path, description = "Unique identifier for the project"), + ("session_id" = String, Path, description = "Unique identifier for the session to remove") + ), + responses( + (status = 204, description = "Session removed from project successfully"), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 404, description = "Project or session not found"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Project Management" +)] +// Remove session from project +async fn remove_session_from_project( + State(state): State>, + headers: HeaderMap, + Path((project_id, session_id)): Path<(String, String)>, +) -> Result { + verify_secret_key(&headers, &state)?; + + // Remove from project + goose::project::remove_session_from_project(&project_id, &session_id).map_err(|e| { + if e.to_string().contains("not found") { + StatusCode::NOT_FOUND + } else { + StatusCode::INTERNAL_SERVER_ERROR + } + })?; + + // Also update session metadata to remove the project_id + let session_path = + goose::session::get_path(goose::session::Identifier::Name(session_id.clone())) + .map_err(|_| StatusCode::NOT_FOUND)?; + let mut metadata = + goose::session::read_metadata(&session_path).map_err(|_| StatusCode::NOT_FOUND)?; + + // Only update if this session was actually in this project + if metadata.project_id.as_deref() == Some(&project_id) { + metadata.project_id = None; + + tokio::task::spawn(async move { + if let Err(e) = goose::session::update_metadata(&session_path, &metadata).await { + tracing::error!("Failed to update session metadata: {}", e); + } + }); + } + + Ok(StatusCode::NO_CONTENT) +} + +// Configure routes for this module +pub fn routes(state: Arc) -> Router { + Router::new() + .route("/projects", get(list_projects)) + .route("/projects", post(create_project)) + .route("/projects/{project_id}", get(get_project_details)) + .route("/projects/{project_id}", put(update_project)) + .route("/projects/{project_id}", delete(delete_project)) + .route( + "/projects/{project_id}/sessions/{session_id}", + post(add_session_to_project), + ) + .route( + "/projects/{project_id}/sessions/{session_id}", + delete(remove_session_from_project), + ) + .with_state(state) +} diff --git a/crates/goose-server/src/routes/session.rs b/crates/goose-server/src/routes/session.rs index ca0d59703058..1c363c1a6bad 100644 --- a/crates/goose-server/src/routes/session.rs +++ b/crates/goose-server/src/routes/session.rs @@ -1,4 +1,6 @@ use super::utils::verify_secret_key; +use chrono::{DateTime, Datelike}; +use std::collections::HashMap; use std::sync::Arc; use crate::state::AppState; @@ -13,6 +15,7 @@ use goose::session; use goose::session::info::{get_valid_sorted_sessions, SessionInfo, SortOrder}; use goose::session::SessionMetadata; use serde::Serialize; +use tracing::{error, info}; use utoipa::ToSchema; #[derive(Serialize, ToSchema)] @@ -33,6 +36,29 @@ pub struct SessionHistoryResponse { messages: Vec, } +#[derive(Serialize, ToSchema, Debug)] +#[serde(rename_all = "camelCase")] +pub struct SessionInsights { + /// Total number of sessions + total_sessions: usize, + /// Most active working directories with session counts + most_active_dirs: Vec<(String, usize)>, + /// Average session duration in minutes + avg_session_duration: f64, + /// Total tokens used across all sessions + total_tokens: i64, + /// Activity trend for the last 7 days + recent_activity: Vec<(String, usize)>, +} + +#[derive(Serialize, ToSchema, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ActivityHeatmapCell { + pub week: usize, + pub day: usize, + pub count: usize, +} + #[utoipa::path( get, path = "/sessions", @@ -106,10 +132,161 @@ async fn get_session_history( })) } +#[utoipa::path( + get, + path = "/sessions/insights", + responses( + (status = 200, description = "Session insights retrieved successfully", body = SessionInsights), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 500, description = "Internal server error") + ), + security( + ("api_key" = []) + ), + tag = "Session Management" +)] +async fn get_session_insights( + State(state): State>, + headers: HeaderMap, +) -> Result, StatusCode> { + info!("Received request for session insights"); + + verify_secret_key(&headers, &state)?; + + let sessions = get_valid_sorted_sessions(SortOrder::Descending).map_err(|e| { + error!("Failed to get session info: {:?}", e); + StatusCode::INTERNAL_SERVER_ERROR + })?; + + // Filter out sessions without descriptions + let sessions: Vec = sessions + .into_iter() + .filter(|session| !session.metadata.description.is_empty()) + .collect(); + + info!("Found {} sessions with descriptions", sessions.len()); + + // Calculate insights + let total_sessions = sessions.len(); + + // Track directory usage + let mut dir_counts: HashMap = HashMap::new(); + let mut total_duration = 0.0; + let mut total_tokens = 0; + let mut activity_by_date: HashMap = HashMap::new(); + + for session in &sessions { + // Track directory usage + let dir = session.metadata.working_dir.to_string_lossy().to_string(); + *dir_counts.entry(dir).or_insert(0) += 1; + + // Track tokens + if let Some(tokens) = session.metadata.accumulated_total_tokens { + total_tokens += tokens as i64; + } + + // Track activity by date + if let Ok(date) = DateTime::parse_from_str(&session.modified, "%Y-%m-%d %H:%M:%S UTC") { + let date_str = date.format("%Y-%m-%d").to_string(); + *activity_by_date.entry(date_str).or_insert(0) += 1; + } + + // Calculate session duration from messages + let session_path = session::get_path(session::Identifier::Name(session.id.clone())); + if let Ok(session_path) = session_path { + if let Ok(messages) = session::read_messages(&session_path) { + if let (Some(first), Some(last)) = (messages.first(), messages.last()) { + let duration = (last.created - first.created) as f64 / 60.0; // Convert to minutes + total_duration += duration; + } + } + } + } + + // Get top 3 most active directories + let mut dir_vec: Vec<(String, usize)> = dir_counts.into_iter().collect(); + dir_vec.sort_by(|a, b| b.1.cmp(&a.1)); + let most_active_dirs = dir_vec.into_iter().take(3).collect(); + + // Calculate average session duration + let avg_session_duration = if total_sessions > 0 { + total_duration / total_sessions as f64 + } else { + 0.0 + }; + + // Get last 7 days of activity + let mut activity_vec: Vec<(String, usize)> = activity_by_date.into_iter().collect(); + activity_vec.sort_by(|a, b| b.0.cmp(&a.0)); // Sort by date descending + let recent_activity = activity_vec.into_iter().take(7).collect(); + + let insights = SessionInsights { + total_sessions, + most_active_dirs, + avg_session_duration, + total_tokens, + recent_activity, + }; + + info!("Returning insights: {:?}", insights); + Ok(Json(insights)) +} + +#[utoipa::path( + get, + path = "/sessions/activity-heatmap", + responses( + (status = 200, description = "Activity heatmap data", body = [ActivityHeatmapCell]), + (status = 401, description = "Unauthorized - Invalid or missing API key"), + (status = 500, description = "Internal server error") + ), + security(("api_key" = [])), + tag = "Session Management" +)] +async fn get_activity_heatmap( + State(state): State>, + headers: HeaderMap, +) -> Result>, StatusCode> { + verify_secret_key(&headers, &state)?; + + let sessions = get_valid_sorted_sessions(SortOrder::Descending) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + // Only sessions with a description + let sessions: Vec = sessions + .into_iter() + .filter(|session| !session.metadata.description.is_empty()) + .collect(); + + // Map: (week, day) -> count + let mut heatmap: std::collections::HashMap<(usize, usize), usize> = + std::collections::HashMap::new(); + + for session in &sessions { + if let Ok(date) = + chrono::NaiveDateTime::parse_from_str(&session.modified, "%Y-%m-%d %H:%M:%S UTC") + { + let date = date.date(); + let week = date.iso_week().week() as usize - 1; // 0-based week + let day = date.weekday().num_days_from_sunday() as usize; // 0=Sun, 6=Sat + *heatmap.entry((week, day)).or_insert(0) += 1; + } + } + + let mut result = Vec::new(); + for ((week, day), count) in heatmap { + result.push(ActivityHeatmapCell { week, day, count }); + } + + Ok(Json(result)) +} + // Configure routes for this module pub fn routes(state: Arc) -> Router { Router::new() .route("/sessions", get(list_sessions)) .route("/sessions/{session_id}", get(get_session_history)) + .route("/sessions/insights", get(get_session_insights)) + .route("/sessions/activity-heatmap", get(get_activity_heatmap)) .with_state(state) } diff --git a/crates/goose/src/lib.rs b/crates/goose/src/lib.rs index 83c4934d76fa..490825645ce0 100644 --- a/crates/goose/src/lib.rs +++ b/crates/goose/src/lib.rs @@ -4,6 +4,7 @@ pub mod context_mgmt; pub mod message; pub mod model; pub mod permission; +pub mod project; pub mod prompt_template; pub mod providers; pub mod recipe; diff --git a/crates/goose/src/message.rs b/crates/goose/src/message.rs index 87c4ae9a247d..b764775e3334 100644 --- a/crates/goose/src/message.rs +++ b/crates/goose/src/message.rs @@ -8,7 +8,7 @@ use std::collections::HashSet; /// The content of the messages uses MCP types to avoid additional conversions /// when interacting with MCP servers. use chrono::Utc; -use mcp_core::content::{Content, ImageContent, TextContent}; +use mcp_core::content::{Content, EmbeddedResource, ImageContent, TextContent}; use mcp_core::handler::ToolResult; use mcp_core::prompt::{PromptMessage, PromptMessageContent, PromptMessageRole}; use mcp_core::resource::ResourceContents; @@ -102,6 +102,7 @@ pub struct SummarizationRequested { pub enum MessageContent { Text(TextContent), Image(ImageContent), + EmbeddedResource(EmbeddedResource), ToolRequest(ToolRequest), ToolResponse(ToolResponse), ToolConfirmationRequest(ToolConfirmationRequest), @@ -238,6 +239,14 @@ impl MessageContent { } } + /// Get the embedded resource if this is an EmbeddedResource variant + pub fn as_embedded_resource(&self) -> Option<&EmbeddedResource> { + match self { + MessageContent::EmbeddedResource(resource) => Some(resource), + _ => None, + } + } + /// Get the thinking content if this is a ThinkingContent variant pub fn as_thinking(&self) -> Option<&ThinkingContent> { match self { @@ -260,10 +269,34 @@ impl From for MessageContent { match content { Content::Text(text) => MessageContent::Text(text), Content::Image(image) => MessageContent::Image(image), - Content::Resource(resource) => MessageContent::Text(TextContent { - text: resource.get_text(), - annotations: None, - }), + Content::Resource(resource) => { + // Check if this is a special embedded resource that should be preserved + match &resource.resource { + ResourceContents::TextResourceContents { + uri, + text, + mime_type: _, + } => { + // For special URIs like goose://checkpoint, preserve as resource + if uri.starts_with("goose://") { + MessageContent::EmbeddedResource(resource) + } else { + // For regular resources, convert to text as before + MessageContent::Text(TextContent { + text: text.clone(), + annotations: resource.annotations, + }) + } + } + _ => { + // For non-text resources, convert to text as before + MessageContent::Text(TextContent { + text: resource.get_text(), + annotations: resource.annotations, + }) + } + } + } } } } diff --git a/crates/goose/src/project/mod.rs b/crates/goose/src/project/mod.rs new file mode 100644 index 000000000000..601b47df01c3 --- /dev/null +++ b/crates/goose/src/project/mod.rs @@ -0,0 +1,68 @@ +pub mod storage; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use utoipa::ToSchema; + +/// Main project structure that holds project metadata and associated sessions +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct Project { + /// Unique identifier for the project + pub id: String, + /// Display name of the project + pub name: String, + /// Optional description of the project + pub description: Option, + /// Default working directory for sessions in this project + #[schema(value_type = String, example = "/home/user/projects/my-project")] + pub default_directory: PathBuf, + /// When the project was created + pub created_at: DateTime, + /// When the project was last updated + pub updated_at: DateTime, + /// List of session IDs associated with this project + pub session_ids: Vec, +} + +/// Simplified project metadata for listing +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProjectMetadata { + /// Unique identifier for the project + pub id: String, + /// Display name of the project + pub name: String, + /// Optional description of the project + pub description: Option, + /// Default working directory for sessions in this project + #[schema(value_type = String)] + pub default_directory: PathBuf, + /// Number of sessions in this project + pub session_count: usize, + /// When the project was created + pub created_at: DateTime, + /// When the project was last updated + pub updated_at: DateTime, +} + +impl From<&Project> for ProjectMetadata { + fn from(project: &Project) -> Self { + ProjectMetadata { + id: project.id.clone(), + name: project.name.clone(), + description: project.description.clone(), + default_directory: project.default_directory.clone(), + session_count: project.session_ids.len(), + created_at: project.created_at, + updated_at: project.updated_at, + } + } +} + +// Re-export storage functions +pub use storage::{ + add_session_to_project, create_project, delete_project, ensure_project_dir, get_project, + list_projects, remove_session_from_project, update_project, +}; diff --git a/crates/goose/src/project/storage.rs b/crates/goose/src/project/storage.rs new file mode 100644 index 000000000000..ef8e70dc2465 --- /dev/null +++ b/crates/goose/src/project/storage.rs @@ -0,0 +1,239 @@ +use crate::project::{Project, ProjectMetadata}; +use anyhow::{anyhow, Context, Result}; +use chrono::Utc; +use etcetera::{choose_app_strategy, AppStrategy, AppStrategyArgs}; +use serde_json; +use std::fs::{self, File}; +use std::io::Write; +use std::path::PathBuf; +use tracing::{error, info}; + +const APP_NAME: &str = "goose"; + +/// Ensure the project directory exists and return its path +pub fn ensure_project_dir() -> Result { + let app_strategy = AppStrategyArgs { + top_level_domain: "Block".to_string(), + author: "Block".to_string(), + app_name: APP_NAME.to_string(), + }; + + let data_dir = choose_app_strategy(app_strategy) + .context("goose requires a home dir")? + .data_dir() + .join("projects"); + + if !data_dir.exists() { + fs::create_dir_all(&data_dir)?; + } + + Ok(data_dir) +} + +/// Generate a unique project ID +fn generate_project_id() -> String { + use rand::Rng; + let timestamp = Utc::now().timestamp(); + let random: u32 = rand::thread_rng().gen(); + format!("proj_{}_{}", timestamp, random) +} + +/// Get the path for a specific project file +fn get_project_path(project_id: &str) -> Result { + let project_dir = ensure_project_dir()?; + Ok(project_dir.join(format!("{}.json", project_id))) +} + +/// Create a new project +pub fn create_project( + name: String, + description: Option, + default_directory: PathBuf, +) -> Result { + let project_dir = ensure_project_dir()?; + + // Validate the default directory exists + if !default_directory.exists() { + return Err(anyhow!( + "Default directory does not exist: {:?}", + default_directory + )); + } + + let now = Utc::now(); + let project = Project { + id: generate_project_id(), + name, + description, + default_directory, + created_at: now, + updated_at: now, + session_ids: Vec::new(), + }; + + // Save the project + let project_path = project_dir.join(format!("{}.json", project.id)); + let mut file = File::create(&project_path)?; + let json = serde_json::to_string_pretty(&project)?; + file.write_all(json.as_bytes())?; + + info!("Created project {} at {:?}", project.id, project_path); + Ok(project) +} + +/// Update an existing project +pub fn update_project( + project_id: &str, + name: Option, + description: Option>, + default_directory: Option, +) -> Result { + let project_path = get_project_path(project_id)?; + + if !project_path.exists() { + return Err(anyhow!("Project not found: {}", project_id)); + } + + // Read existing project + let mut project: Project = serde_json::from_reader(File::open(&project_path)?)?; + + // Update fields + if let Some(new_name) = name { + project.name = new_name; + } + + if let Some(new_description) = description { + project.description = new_description; + } + + if let Some(new_directory) = default_directory { + if !new_directory.exists() { + return Err(anyhow!( + "Default directory does not exist: {:?}", + new_directory + )); + } + project.default_directory = new_directory; + } + + project.updated_at = Utc::now(); + + // Save updated project + let mut file = File::create(&project_path)?; + let json = serde_json::to_string_pretty(&project)?; + file.write_all(json.as_bytes())?; + + info!("Updated project {}", project_id); + Ok(project) +} + +/// Delete a project (does not delete associated sessions) +pub fn delete_project(project_id: &str) -> Result<()> { + let project_path = get_project_path(project_id)?; + + if !project_path.exists() { + return Err(anyhow!("Project not found: {}", project_id)); + } + + fs::remove_file(&project_path)?; + info!("Deleted project {}", project_id); + Ok(()) +} + +/// List all projects +pub fn list_projects() -> Result> { + let project_dir = ensure_project_dir()?; + let mut projects = Vec::new(); + + if let Ok(entries) = fs::read_dir(&project_dir) { + for entry in entries.flatten() { + let path = entry.path(); + if path.extension().and_then(|s| s.to_str()) == Some("json") { + match serde_json::from_reader::<_, Project>(File::open(&path)?) { + Ok(project) => { + projects.push(ProjectMetadata::from(&project)); + } + Err(e) => { + error!("Failed to read project file {:?}: {}", path, e); + } + } + } + } + } + + // Sort by updated_at descending + projects.sort_by(|a, b| b.updated_at.cmp(&a.updated_at)); + + Ok(projects) +} + +/// Get a specific project +pub fn get_project(project_id: &str) -> Result { + let project_path = get_project_path(project_id)?; + + if !project_path.exists() { + return Err(anyhow!("Project not found: {}", project_id)); + } + + let project: Project = serde_json::from_reader(File::open(&project_path)?)?; + Ok(project) +} + +/// Add a session to a project +pub fn add_session_to_project(project_id: &str, session_id: &str) -> Result<()> { + let project_path = get_project_path(project_id)?; + + if !project_path.exists() { + return Err(anyhow!("Project not found: {}", project_id)); + } + + // Read project + let mut project: Project = serde_json::from_reader(File::open(&project_path)?)?; + + // Check if session already exists in project + if project.session_ids.contains(&session_id.to_string()) { + return Ok(()); // Already added + } + + // Add session and update timestamp + project.session_ids.push(session_id.to_string()); + project.updated_at = Utc::now(); + + // Save updated project + let mut file = File::create(&project_path)?; + let json = serde_json::to_string_pretty(&project)?; + file.write_all(json.as_bytes())?; + + info!("Added session {} to project {}", session_id, project_id); + Ok(()) +} + +/// Remove a session from a project +pub fn remove_session_from_project(project_id: &str, session_id: &str) -> Result<()> { + let project_path = get_project_path(project_id)?; + + if !project_path.exists() { + return Err(anyhow!("Project not found: {}", project_id)); + } + + // Read project + let mut project: Project = serde_json::from_reader(File::open(&project_path)?)?; + + // Remove session + let original_len = project.session_ids.len(); + project.session_ids.retain(|id| id != session_id); + + if project.session_ids.len() == original_len { + return Ok(()); // Session wasn't in project + } + + project.updated_at = Utc::now(); + + // Save updated project + let mut file = File::create(&project_path)?; + let json = serde_json::to_string_pretty(&project)?; + file.write_all(json.as_bytes())?; + + info!("Removed session {} from project {}", session_id, project_id); + Ok(()) +} diff --git a/crates/goose/src/providers/formats/anthropic.rs b/crates/goose/src/providers/formats/anthropic.rs index 661d2bce5156..b405a919145c 100644 --- a/crates/goose/src/providers/formats/anthropic.rs +++ b/crates/goose/src/providers/formats/anthropic.rs @@ -124,6 +124,10 @@ pub fn format_messages(messages: &[Message]) -> Vec { })); } } + MessageContent::EmbeddedResource(_) => { + // Skip embedded resources - they are for UI purposes only + continue; + } } } diff --git a/crates/goose/src/providers/formats/bedrock.rs b/crates/goose/src/providers/formats/bedrock.rs index 29b3491585de..9898222de6e7 100644 --- a/crates/goose/src/providers/formats/bedrock.rs +++ b/crates/goose/src/providers/formats/bedrock.rs @@ -105,6 +105,10 @@ pub fn to_bedrock_message_content(content: &MessageContent) -> Result { + // Skip embedded resources - they are for UI purposes only + bedrock::ContentBlock::Text("".to_string()) + } }) } diff --git a/crates/goose/src/providers/formats/databricks.rs b/crates/goose/src/providers/formats/databricks.rs index c74c4cbbe033..ab658df8c84c 100644 --- a/crates/goose/src/providers/formats/databricks.rs +++ b/crates/goose/src/providers/formats/databricks.rs @@ -215,6 +215,10 @@ pub fn format_messages(messages: &[Message], image_format: &ImageFormat) -> Vec< })); } } + MessageContent::EmbeddedResource(_) => { + // Skip embedded resources - they are for UI purposes only + continue; + } } } diff --git a/crates/goose/src/providers/formats/google.rs b/crates/goose/src/providers/formats/google.rs index 47b774df5a65..8ede502704f1 100644 --- a/crates/goose/src/providers/formats/google.rs +++ b/crates/goose/src/providers/formats/google.rs @@ -111,6 +111,10 @@ pub fn format_messages(messages: &[Message]) -> Vec { } } + MessageContent::EmbeddedResource(_) => { + // Skip embedded resources - they are for UI purposes only + } + _ => {} } } diff --git a/crates/goose/src/providers/formats/openai.rs b/crates/goose/src/providers/formats/openai.rs index 402e09cfee99..b82691c9abc6 100644 --- a/crates/goose/src/providers/formats/openai.rs +++ b/crates/goose/src/providers/formats/openai.rs @@ -183,6 +183,10 @@ pub fn format_messages(messages: &[Message], image_format: &ImageFormat) -> Vec< })); } }, + MessageContent::EmbeddedResource(_) => { + // Skip embedded resources - they are for UI purposes only + continue; + } } } diff --git a/crates/goose/src/providers/formats/snowflake.rs b/crates/goose/src/providers/formats/snowflake.rs index c7cfe5924bb4..abe47dd7bea5 100644 --- a/crates/goose/src/providers/formats/snowflake.rs +++ b/crates/goose/src/providers/formats/snowflake.rs @@ -73,6 +73,10 @@ pub fn format_messages(messages: &[Message]) -> Vec { MessageContent::FrontendToolRequest(_tool_request) => { // Skip frontend tool requests } + MessageContent::EmbeddedResource(_) => { + // Skip embedded resources - they are for UI purposes only + continue; + } } } diff --git a/crates/goose/src/scheduler.rs b/crates/goose/src/scheduler.rs index 64648722545c..95b4e75291c5 100644 --- a/crates/goose/src/scheduler.rs +++ b/crates/goose/src/scheduler.rs @@ -1267,6 +1267,7 @@ async fn run_scheduled_job_internal( working_dir: current_dir.clone(), description: String::new(), schedule_id: Some(job.id.clone()), + project_id: None, message_count: all_session_messages.len(), total_tokens: None, input_tokens: None, diff --git a/crates/goose/src/session/storage.rs b/crates/goose/src/session/storage.rs index b176511f300b..3f321597dd95 100644 --- a/crates/goose/src/session/storage.rs +++ b/crates/goose/src/session/storage.rs @@ -40,6 +40,8 @@ pub struct SessionMetadata { pub description: String, /// ID of the schedule that triggered this session, if any pub schedule_id: Option, + /// ID of the project this session belongs to, if any + pub project_id: Option, /// Number of messages in the session pub message_count: usize, /// The total number of tokens used in the session. Retrieved from the provider's last usage. @@ -67,6 +69,7 @@ impl<'de> Deserialize<'de> for SessionMetadata { description: String, message_count: usize, schedule_id: Option, // For backward compatibility + project_id: Option, // For backward compatibility total_tokens: Option, input_tokens: Option, output_tokens: Option, @@ -88,6 +91,7 @@ impl<'de> Deserialize<'de> for SessionMetadata { description: helper.description, message_count: helper.message_count, schedule_id: helper.schedule_id, + project_id: helper.project_id, total_tokens: helper.total_tokens, input_tokens: helper.input_tokens, output_tokens: helper.output_tokens, @@ -112,6 +116,7 @@ impl SessionMetadata { working_dir, description: String::new(), schedule_id: None, + project_id: None, message_count: 0, total_tokens: None, input_tokens: None, @@ -125,7 +130,7 @@ impl SessionMetadata { impl Default for SessionMetadata { fn default() -> Self { - Self::new(get_home_dir()) + Self::new(std::env::current_dir().unwrap_or_else(|_| get_home_dir())) } } diff --git a/crates/goose/tests/test_support.rs b/crates/goose/tests/test_support.rs index cfea855b1085..a2a3a2e5f6e8 100644 --- a/crates/goose/tests/test_support.rs +++ b/crates/goose/tests/test_support.rs @@ -32,6 +32,7 @@ pub struct ConfigurableMockScheduler { sessions_data: Arc>>>, } +#[allow(dead_code)] impl ConfigurableMockScheduler { pub fn new() -> Self { Self { @@ -404,6 +405,7 @@ pub fn create_test_session_metadata(message_count: usize, working_dir: &str) -> working_dir: PathBuf::from(working_dir), description: "Test session".to_string(), schedule_id: Some("test_job".to_string()), + project_id: None, total_tokens: Some(100), input_tokens: Some(50), output_tokens: Some(50), diff --git a/ui/desktop/.goosehints b/ui/desktop/.goosehints index ab102a4c257a..08f8e40a4492 100644 --- a/ui/desktop/.goosehints +++ b/ui/desktop/.goosehints @@ -52,7 +52,7 @@ The Goose Desktop App is an Electron application built with TypeScript, React, a 2. Create a main component file (e.g., `YourFeatureView.tsx`) 3. Add your view type to the `View` type in `App.tsx` 4. Import and add your component to the render section in `App.tsx` -5. Add navigation to your view from other components (e.g., adding a button in `BottomMenu.tsx` or `MoreMenu.tsx`) +5. Add navigation to your view from other components (e.g., adding a new route or button in `App.tsx`) ## State Management diff --git a/ui/desktop/forge.config.ts b/ui/desktop/forge.config.ts index c32ce48f2f07..827e124ff52e 100644 --- a/ui/desktop/forge.config.ts +++ b/ui/desktop/forge.config.ts @@ -12,14 +12,14 @@ let cfg = { certificateFile: process.env.WINDOWS_CERTIFICATE_FILE, signingRole: process.env.WINDOW_SIGNING_ROLE, rfc3161TimeStampServer: 'http://timestamp.digicert.com', - signWithParams: '/fd sha256 /tr http://timestamp.digicert.com /td sha256' + signWithParams: '/fd sha256 /tr http://timestamp.digicert.com /td sha256', }, // Protocol registration protocols: [ { - name: "GooseProtocol", - schemes: ["goose"] - } + name: 'GooseProtocol', + schemes: ['goose'], + }, ], // macOS Info.plist extensions for drag-and-drop support extendInfo: { @@ -44,9 +44,9 @@ let cfg = { osxNotarize: { appleId: process.env['APPLE_ID'], appleIdPassword: process.env['APPLE_ID_PASSWORD'], - teamId: process.env['APPLE_TEAM_ID'] + teamId: process.env['APPLE_TEAM_ID'], }, -} +}; if (process.env['APPLE_ID'] === undefined) { delete cfg.osxNotarize; @@ -62,12 +62,12 @@ module.exports = { config: { repository: { owner: 'block', - name: 'goose' + name: 'goose', }, prerelease: false, - draft: true - } - } + draft: true, + }, + }, ], makers: [ { @@ -76,22 +76,22 @@ module.exports = { config: { arch: process.env.ELECTRON_ARCH === 'x64' ? ['x64'] : ['arm64'], options: { - icon: 'src/images/icon.ico' - } - } + icon: 'src/images/icon.ico', + }, + }, }, { name: '@electron-forge/maker-deb', config: { name: 'Goose', - bin: 'Goose' + bin: 'Goose', }, }, { name: '@electron-forge/maker-rpm', config: { name: 'Goose', - bin: 'Goose' + bin: 'Goose', }, }, ], @@ -102,17 +102,17 @@ module.exports = { build: [ { entry: 'src/main.ts', - config: 'vite.main.config.ts', + config: 'vite.main.config.mts', }, { entry: 'src/preload.ts', - config: 'vite.preload.config.ts', + config: 'vite.preload.config.mts', }, ], renderer: [ { name: 'main_window', - config: 'vite.renderer.config.ts', + config: 'vite.renderer.config.mts', }, ], }, diff --git a/ui/desktop/index.html b/ui/desktop/index.html index 564e1f0e60e3..3f8c17512877 100644 --- a/ui/desktop/index.html +++ b/ui/desktop/index.html @@ -2,6 +2,7 @@ + Goose