diff --git a/.github/actions/setup-project/action.yml b/.github/actions/setup-project/action.yml index 0fcd1e4f3..6a29f8e76 100644 --- a/.github/actions/setup-project/action.yml +++ b/.github/actions/setup-project/action.yml @@ -25,17 +25,24 @@ runs: cache: 'npm' cache-dependency-path: package-lock.json - - name: Check for SSH URLs in lockfile - if: inputs.check-lockfile == 'true' - shell: bash - run: npm run lint:lockfile - - name: Configure Git for HTTPS shell: bash # Convert SSH URLs to HTTPS for git dependencies (e.g., @electron/node-gyp) # This is needed because SSH authentication isn't available in CI run: git config --global url."https://github.com/".insteadOf "git@github.com:" + - name: Auto-fix SSH URLs in lockfile + if: inputs.check-lockfile == 'true' + shell: bash + # Auto-fix any git+ssh:// URLs in package-lock.json before linting + # This handles cases where npm reintroduces SSH URLs for git dependencies + run: node scripts/fix-lockfile-urls.mjs + + - name: Check for SSH URLs in lockfile + if: inputs.check-lockfile == 'true' + shell: bash + run: npm run lint:lockfile + - name: Install dependencies shell: bash # Use npm install instead of npm ci to correctly resolve platform-specific diff --git a/.husky/pre-commit b/.husky/pre-commit index f61fd35bd..4c156c168 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -38,6 +38,18 @@ else export PATH="$PATH:/usr/local/bin:/opt/homebrew/bin:/usr/bin" fi +# Auto-fix git+ssh:// URLs in package-lock.json if it's being committed +# This prevents CI failures from SSH URLs that npm introduces for git dependencies +if git diff --cached --name-only | grep -q "^package-lock.json$"; then + if command -v node >/dev/null 2>&1; then + if grep -q "git+ssh://" package-lock.json 2>/dev/null; then + echo "Fixing git+ssh:// URLs in package-lock.json..." + node scripts/fix-lockfile-urls.mjs + git add package-lock.json + fi + fi +fi + # Run lint-staged - works with or without nvm # Prefer npx, fallback to npm exec, both work with system-installed Node.js if command -v npx >/dev/null 2>&1; then diff --git a/apps/server/src/index.ts b/apps/server/src/index.ts index a7ad979d2..0acea6c9b 100644 --- a/apps/server/src/index.ts +++ b/apps/server/src/index.ts @@ -66,6 +66,10 @@ import { createCodexRoutes } from './routes/codex/index.js'; import { CodexUsageService } from './services/codex-usage-service.js'; import { CodexAppServerService } from './services/codex-app-server-service.js'; import { CodexModelCacheService } from './services/codex-model-cache-service.js'; +import { createZaiRoutes } from './routes/zai/index.js'; +import { ZaiUsageService } from './services/zai-usage-service.js'; +import { createGeminiRoutes } from './routes/gemini/index.js'; +import { GeminiUsageService } from './services/gemini-usage-service.js'; import { createGitHubRoutes } from './routes/github/index.js'; import { createContextRoutes } from './routes/context/index.js'; import { createBacklogPlanRoutes } from './routes/backlog-plan/index.js'; @@ -300,7 +304,7 @@ app.use( callback(null, origin); return; } - } catch (err) { + } catch { // Ignore URL parsing errors } @@ -328,6 +332,8 @@ const claudeUsageService = new ClaudeUsageService(); const codexAppServerService = new CodexAppServerService(); const codexModelCacheService = new CodexModelCacheService(DATA_DIR, codexAppServerService); const codexUsageService = new CodexUsageService(codexAppServerService); +const zaiUsageService = new ZaiUsageService(); +const geminiUsageService = new GeminiUsageService(); const mcpTestService = new MCPTestService(settingsService); const ideationService = new IdeationService(events, settingsService, featureLoader); @@ -372,7 +378,7 @@ eventHookService.initialize(events, settingsService, eventHistoryService, featur let globalSettings: Awaited> | null = null; try { globalSettings = await settingsService.getGlobalSettings(); - } catch (err) { + } catch { logger.warn('Failed to load global settings, using defaults'); } @@ -390,7 +396,7 @@ eventHookService.initialize(events, settingsService, eventHistoryService, featur const enableRequestLog = globalSettings.enableRequestLogging ?? true; setRequestLoggingEnabled(enableRequestLog); logger.info(`HTTP request logging: ${enableRequestLog ? 'enabled' : 'disabled'}`); - } catch (err) { + } catch { logger.warn('Failed to apply logging settings, using defaults'); } } @@ -417,6 +423,22 @@ eventHookService.initialize(events, settingsService, eventHistoryService, featur } else { logger.info('[STARTUP] Feature state reconciliation complete - no stale states found'); } + + // Resume interrupted features in the background after reconciliation. + // This uses the saved execution state to identify features that were running + // before the restart (their statuses have been reset to ready/backlog by + // reconciliation above). Running in background so it doesn't block startup. + if (totalReconciled > 0) { + for (const project of globalSettings.projects) { + autoModeService.resumeInterruptedFeatures(project.path).catch((err) => { + logger.warn( + `[STARTUP] Failed to resume interrupted features for ${project.path}:`, + err + ); + }); + } + logger.info('[STARTUP] Initiated background resume of interrupted features'); + } } } catch (err) { logger.warn('[STARTUP] Failed to reconcile feature states:', err); @@ -473,6 +495,8 @@ app.use('/api/terminal', createTerminalRoutes()); app.use('/api/settings', createSettingsRoutes(settingsService)); app.use('/api/claude', createClaudeRoutes(claudeUsageService)); app.use('/api/codex', createCodexRoutes(codexUsageService, codexModelCacheService)); +app.use('/api/zai', createZaiRoutes(zaiUsageService, settingsService)); +app.use('/api/gemini', createGeminiRoutes(geminiUsageService, events)); app.use('/api/github', createGitHubRoutes(events, settingsService)); app.use('/api/context', createContextRoutes(settingsService)); app.use('/api/backlog-plan', createBacklogPlanRoutes(events, settingsService)); @@ -575,7 +599,7 @@ wss.on('connection', (ws: WebSocket) => { logger.info('Sending event to client:', { type, messageLength: message.length, - sessionId: (payload as any)?.sessionId, + sessionId: (payload as Record)?.sessionId, }); ws.send(message); } else { @@ -641,8 +665,15 @@ terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage // Check if session exists const session = terminalService.getSession(sessionId); if (!session) { - logger.info(`Session ${sessionId} not found`); - ws.close(4004, 'Session not found'); + logger.warn( + `Terminal session ${sessionId} not found. ` + + `The session may have exited, been deleted, or was never created. ` + + `Active terminal sessions: ${terminalService.getSessionCount()}` + ); + ws.close( + 4004, + 'Session not found. The terminal session may have expired or been closed. Please create a new terminal.' + ); return; } diff --git a/apps/server/src/lib/cli-detection.ts b/apps/server/src/lib/cli-detection.ts index eba4c68a7..a7b5b14db 100644 --- a/apps/server/src/lib/cli-detection.ts +++ b/apps/server/src/lib/cli-detection.ts @@ -8,9 +8,6 @@ import { spawn, execSync } from 'child_process'; import * as fs from 'fs'; import * as path from 'path'; import * as os from 'os'; -import { createLogger } from '@automaker/utils'; - -const logger = createLogger('CliDetection'); export interface CliInfo { name: string; @@ -86,7 +83,7 @@ export async function detectCli( options: CliDetectionOptions = {} ): Promise { const config = CLI_CONFIGS[provider]; - const { timeout = 5000, includeWsl = false, wslDistribution } = options; + const { timeout = 5000 } = options; const issues: string[] = []; const cliInfo: CliInfo = { diff --git a/apps/server/src/lib/error-handler.ts b/apps/server/src/lib/error-handler.ts index 770f26a23..d67200984 100644 --- a/apps/server/src/lib/error-handler.ts +++ b/apps/server/src/lib/error-handler.ts @@ -40,7 +40,7 @@ export interface ErrorClassification { suggestedAction?: string; retryable: boolean; provider?: string; - context?: Record; + context?: Record; } export interface ErrorPattern { @@ -180,7 +180,7 @@ const ERROR_PATTERNS: ErrorPattern[] = [ export function classifyError( error: unknown, provider?: string, - context?: Record + context?: Record ): ErrorClassification { const errorText = getErrorText(error); @@ -281,18 +281,19 @@ function getErrorText(error: unknown): string { if (typeof error === 'object' && error !== null) { // Handle structured error objects - const errorObj = error as any; + const errorObj = error as Record; - if (errorObj.message) { + if (typeof errorObj.message === 'string') { return errorObj.message; } - if (errorObj.error?.message) { - return errorObj.error.message; + const nestedError = errorObj.error; + if (typeof nestedError === 'object' && nestedError !== null && 'message' in nestedError) { + return String((nestedError as Record).message); } - if (errorObj.error) { - return typeof errorObj.error === 'string' ? errorObj.error : JSON.stringify(errorObj.error); + if (nestedError) { + return typeof nestedError === 'string' ? nestedError : JSON.stringify(nestedError); } return JSON.stringify(error); @@ -307,7 +308,7 @@ function getErrorText(error: unknown): string { export function createErrorResponse( error: unknown, provider?: string, - context?: Record + context?: Record ): { success: false; error: string; @@ -335,7 +336,7 @@ export function logError( error: unknown, provider?: string, operation?: string, - additionalContext?: Record + additionalContext?: Record ): void { const classification = classifyError(error, provider, { operation, diff --git a/apps/server/src/lib/git-log-parser.ts b/apps/server/src/lib/git-log-parser.ts new file mode 100644 index 000000000..85b0cb58c --- /dev/null +++ b/apps/server/src/lib/git-log-parser.ts @@ -0,0 +1,62 @@ +export interface CommitFields { + hash: string; + shortHash: string; + author: string; + authorEmail: string; + date: string; + subject: string; + body: string; +} + +export function parseGitLogOutput(output: string): CommitFields[] { + const commits: CommitFields[] = []; + + // Split by NUL character to separate commits + const commitBlocks = output.split('\0').filter((block) => block.trim()); + + for (const block of commitBlocks) { + const allLines = block.split('\n'); + + // Skip leading empty lines that may appear at block boundaries + let startIndex = 0; + while (startIndex < allLines.length && allLines[startIndex].trim() === '') { + startIndex++; + } + const fields = allLines.slice(startIndex); + + // Validate we have all expected fields (at least hash, shortHash, author, authorEmail, date, subject) + if (fields.length < 6) { + continue; // Skip malformed blocks + } + + const commit: CommitFields = { + hash: fields[0].trim(), + shortHash: fields[1].trim(), + author: fields[2].trim(), + authorEmail: fields[3].trim(), + date: fields[4].trim(), + subject: fields[5].trim(), + body: fields.slice(6).join('\n').trim(), + }; + + commits.push(commit); + } + + return commits; +} + +/** + * Creates a commit object from parsed fields, matching the expected API response format + */ +export function createCommitFromFields(fields: CommitFields, files?: string[]) { + return { + hash: fields.hash, + shortHash: fields.shortHash, + author: fields.author, + authorEmail: fields.authorEmail, + date: fields.date, + subject: fields.subject, + body: fields.body, + files: files || [], + }; +} diff --git a/apps/server/src/lib/git.ts b/apps/server/src/lib/git.ts new file mode 100644 index 000000000..697d532df --- /dev/null +++ b/apps/server/src/lib/git.ts @@ -0,0 +1,208 @@ +/** + * Shared git command execution utilities. + * + * This module provides the canonical `execGitCommand` helper and common + * git utilities used across services and routes. All consumers should + * import from here rather than defining their own copy. + */ + +import fs from 'fs/promises'; +import path from 'path'; +import { spawnProcess } from '@automaker/platform'; +import { createLogger } from '@automaker/utils'; + +const logger = createLogger('GitLib'); + +// ============================================================================ +// Secure Command Execution +// ============================================================================ + +/** + * Execute git command with array arguments to prevent command injection. + * Uses spawnProcess from @automaker/platform for secure, cross-platform execution. + * + * @param args - Array of git command arguments (e.g., ['worktree', 'add', path]) + * @param cwd - Working directory to execute the command in + * @param env - Optional additional environment variables to pass to the git process. + * These are merged on top of the current process environment. Pass + * `{ LC_ALL: 'C' }` to force git to emit English output regardless of the + * system locale so that text-based output parsing remains reliable. + * @param abortController - Optional AbortController to cancel the git process. + * When the controller is aborted the underlying process is sent SIGTERM and + * the returned promise rejects with an Error whose message is 'Process aborted'. + * @returns Promise resolving to stdout output + * @throws Error with stderr/stdout message if command fails. The thrown error + * also has `stdout` and `stderr` string properties for structured access. + * + * @example + * ```typescript + * // Safe: no injection possible + * await execGitCommand(['branch', '-D', branchName], projectPath); + * + * // Force English output for reliable text parsing: + * await execGitCommand(['rebase', '--', 'main'], worktreePath, { LC_ALL: 'C' }); + * + * // With a process-level timeout: + * const controller = new AbortController(); + * const timerId = setTimeout(() => controller.abort(), 30_000); + * try { + * await execGitCommand(['fetch', '--all', '--quiet'], cwd, undefined, controller); + * } finally { + * clearTimeout(timerId); + * } + * + * // Instead of unsafe: + * // await execAsync(`git branch -D ${branchName}`, { cwd }); + * ``` + */ +export async function execGitCommand( + args: string[], + cwd: string, + env?: Record, + abortController?: AbortController +): Promise { + const result = await spawnProcess({ + command: 'git', + args, + cwd, + ...(env !== undefined ? { env } : {}), + ...(abortController !== undefined ? { abortController } : {}), + }); + + // spawnProcess returns { stdout, stderr, exitCode } + if (result.exitCode === 0) { + return result.stdout; + } else { + const errorMessage = + result.stderr || result.stdout || `Git command failed with code ${result.exitCode}`; + throw Object.assign(new Error(errorMessage), { + stdout: result.stdout, + stderr: result.stderr, + }); + } +} + +// ============================================================================ +// Common Git Utilities +// ============================================================================ + +/** + * Get the current branch name for the given worktree. + * + * This is the canonical implementation shared across services. Services + * should import this rather than duplicating the logic locally. + * + * @param worktreePath - Path to the git worktree + * @returns The current branch name (trimmed) + */ +export async function getCurrentBranch(worktreePath: string): Promise { + const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath); + return branchOutput.trim(); +} + +// ============================================================================ +// Index Lock Recovery +// ============================================================================ + +/** + * Check whether an error message indicates a stale git index lock file. + * + * Git operations that write to the index (e.g. `git stash push`) will fail + * with "could not write index" or "Unable to create ... .lock" when a + * `.git/index.lock` file exists from a previously interrupted operation. + * + * @param errorMessage - The error string from a failed git command + * @returns true if the error looks like a stale index lock issue + */ +export function isIndexLockError(errorMessage: string): boolean { + const lower = errorMessage.toLowerCase(); + return ( + lower.includes('could not write index') || + (lower.includes('unable to create') && lower.includes('index.lock')) || + lower.includes('index.lock') + ); +} + +/** + * Attempt to remove a stale `.git/index.lock` file for the given worktree. + * + * Uses `git rev-parse --git-dir` to locate the correct `.git` directory, + * which works for both regular repositories and linked worktrees. + * + * @param worktreePath - Path to the git worktree (or main repo) + * @returns true if a lock file was found and removed, false otherwise + */ +export async function removeStaleIndexLock(worktreePath: string): Promise { + try { + // Resolve the .git directory (handles worktrees correctly) + const gitDirRaw = await execGitCommand(['rev-parse', '--git-dir'], worktreePath); + const gitDir = path.resolve(worktreePath, gitDirRaw.trim()); + const lockFilePath = path.join(gitDir, 'index.lock'); + + // Check if the lock file exists + try { + await fs.access(lockFilePath); + } catch { + // Lock file does not exist — nothing to remove + return false; + } + + // Remove the stale lock file + await fs.unlink(lockFilePath); + logger.info('Removed stale index.lock file', { worktreePath, lockFilePath }); + return true; + } catch (err) { + logger.warn('Failed to remove stale index.lock file', { + worktreePath, + error: err instanceof Error ? err.message : String(err), + }); + return false; + } +} + +/** + * Execute a git command with automatic retry when a stale index.lock is detected. + * + * If the command fails with an error indicating a locked index file, this + * helper will attempt to remove the stale `.git/index.lock` and retry the + * command exactly once. + * + * This is particularly useful for `git stash push` which writes to the + * index and commonly fails when a previous git operation was interrupted. + * + * @param args - Array of git command arguments + * @param cwd - Working directory to execute the command in + * @param env - Optional additional environment variables + * @returns Promise resolving to stdout output + * @throws The original error if retry also fails, or a non-lock error + */ +export async function execGitCommandWithLockRetry( + args: string[], + cwd: string, + env?: Record +): Promise { + try { + return await execGitCommand(args, cwd, env); + } catch (error: unknown) { + const err = error as { message?: string; stderr?: string }; + const errorMessage = err.stderr || err.message || ''; + + if (!isIndexLockError(errorMessage)) { + throw error; + } + + logger.info('Git command failed due to index lock, attempting cleanup and retry', { + cwd, + args: args.join(' '), + }); + + const removed = await removeStaleIndexLock(cwd); + if (!removed) { + // Could not remove the lock file — re-throw the original error + throw error; + } + + // Retry the command once after removing the lock file + return await execGitCommand(args, cwd, env); + } +} diff --git a/apps/server/src/lib/permission-enforcer.ts b/apps/server/src/lib/permission-enforcer.ts index 003608ee4..714f7d40a 100644 --- a/apps/server/src/lib/permission-enforcer.ts +++ b/apps/server/src/lib/permission-enforcer.ts @@ -12,11 +12,18 @@ export interface PermissionCheckResult { reason?: string; } +/** Minimal shape of a Cursor tool call used for permission checking */ +interface CursorToolCall { + shellToolCall?: { args?: { command: string } }; + readToolCall?: { args?: { path: string } }; + writeToolCall?: { args?: { path: string } }; +} + /** * Check if a tool call is allowed based on permissions */ export function checkToolCallPermission( - toolCall: any, + toolCall: CursorToolCall, permissions: CursorCliConfigFile | null ): PermissionCheckResult { if (!permissions || !permissions.permissions) { @@ -152,7 +159,11 @@ function matchesRule(toolName: string, rule: string): boolean { /** * Log permission violations */ -export function logPermissionViolation(toolCall: any, reason: string, sessionId?: string): void { +export function logPermissionViolation( + toolCall: CursorToolCall, + reason: string, + sessionId?: string +): void { const sessionIdStr = sessionId ? ` [${sessionId}]` : ''; if (toolCall.shellToolCall?.args?.command) { diff --git a/apps/server/src/lib/worktree-metadata.ts b/apps/server/src/lib/worktree-metadata.ts index 4742a5b08..aa6e24870 100644 --- a/apps/server/src/lib/worktree-metadata.ts +++ b/apps/server/src/lib/worktree-metadata.ts @@ -78,7 +78,7 @@ export async function readWorktreeMetadata( const metadataPath = getWorktreeMetadataPath(projectPath, branch); const content = (await secureFs.readFile(metadataPath, 'utf-8')) as string; return JSON.parse(content) as WorktreeMetadata; - } catch (error) { + } catch (_error) { // File doesn't exist or can't be read return null; } diff --git a/apps/server/src/providers/claude-provider.ts b/apps/server/src/providers/claude-provider.ts index 3d4f88cde..aab9ec97b 100644 --- a/apps/server/src/providers/claude-provider.ts +++ b/apps/server/src/providers/claude-provider.ts @@ -5,11 +5,10 @@ * with the provider architecture. */ -import { query, type Options } from '@anthropic-ai/claude-agent-sdk'; +import { query, type Options, type SDKUserMessage } from '@anthropic-ai/claude-agent-sdk'; import { BaseProvider } from './base-provider.js'; import { classifyError, getUserFriendlyErrorMessage, createLogger } from '@automaker/utils'; - -const logger = createLogger('ClaudeProvider'); +import { getClaudeAuthIndicators } from '@automaker/platform'; import { getThinkingTokenBudget, validateBareModelId, @@ -17,6 +16,14 @@ import { type ClaudeCompatibleProvider, type Credentials, } from '@automaker/types'; +import type { + ExecuteOptions, + ProviderMessage, + InstallationStatus, + ModelDefinition, +} from './types.js'; + +const logger = createLogger('ClaudeProvider'); /** * ProviderConfig - Union type for provider configuration @@ -25,37 +32,6 @@ import { * Both share the same connection settings structure. */ type ProviderConfig = ClaudeApiProfile | ClaudeCompatibleProvider; -import type { - ExecuteOptions, - ProviderMessage, - InstallationStatus, - ModelDefinition, -} from './types.js'; - -// Explicit allowlist of environment variables to pass to the SDK. -// Only these vars are passed - nothing else from process.env leaks through. -const ALLOWED_ENV_VARS = [ - // Authentication - 'ANTHROPIC_API_KEY', - 'ANTHROPIC_AUTH_TOKEN', - // Endpoint configuration - 'ANTHROPIC_BASE_URL', - 'API_TIMEOUT_MS', - // Model mappings - 'ANTHROPIC_DEFAULT_HAIKU_MODEL', - 'ANTHROPIC_DEFAULT_SONNET_MODEL', - 'ANTHROPIC_DEFAULT_OPUS_MODEL', - // Traffic control - 'CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC', - // System vars (always from process.env) - 'PATH', - 'HOME', - 'SHELL', - 'TERM', - 'USER', - 'LANG', - 'LC_ALL', -]; // System vars are always passed from process.env regardless of profile const SYSTEM_ENV_VARS = ['PATH', 'HOME', 'SHELL', 'TERM', 'USER', 'LANG', 'LC_ALL']; @@ -258,14 +234,14 @@ export class ClaudeProvider extends BaseProvider { }; // Build prompt payload - let promptPayload: string | AsyncIterable; + let promptPayload: string | AsyncIterable; if (Array.isArray(prompt)) { // Multi-part prompt (with images) promptPayload = (async function* () { - const multiPartPrompt = { + const multiPartPrompt: SDKUserMessage = { type: 'user' as const, - session_id: '', + session_id: sdkSessionId || '', message: { role: 'user' as const, content: prompt, @@ -317,12 +293,16 @@ export class ClaudeProvider extends BaseProvider { ? `${userMessage}\n\nTip: If you're running multiple features in auto-mode, consider reducing concurrency (maxConcurrency setting) to avoid hitting rate limits.` : userMessage; - const enhancedError = new Error(message); - (enhancedError as any).originalError = error; - (enhancedError as any).type = errorInfo.type; + const enhancedError = new Error(message) as Error & { + originalError: unknown; + type: string; + retryAfter?: number; + }; + enhancedError.originalError = error; + enhancedError.type = errorInfo.type; if (errorInfo.isRateLimit) { - (enhancedError as any).retryAfter = errorInfo.retryAfter; + enhancedError.retryAfter = errorInfo.retryAfter; } throw enhancedError; @@ -334,13 +314,37 @@ export class ClaudeProvider extends BaseProvider { */ async detectInstallation(): Promise { // Claude SDK is always available since it's a dependency - const hasApiKey = !!process.env.ANTHROPIC_API_KEY; + // Check all four supported auth methods, mirroring the logic in buildEnv(): + // 1. ANTHROPIC_API_KEY environment variable + // 2. ANTHROPIC_AUTH_TOKEN environment variable + // 3. credentials?.apiKeys?.anthropic (credentials file, checked via platform indicators) + // 4. Claude Max CLI OAuth (SDK handles this automatically; detected via getClaudeAuthIndicators) + const hasEnvApiKey = !!process.env.ANTHROPIC_API_KEY; + const hasEnvAuthToken = !!process.env.ANTHROPIC_AUTH_TOKEN; + + // Check credentials file and CLI OAuth indicators (same sources used by buildEnv) + let hasCredentialsApiKey = false; + let hasCliOAuth = false; + try { + const indicators = await getClaudeAuthIndicators(); + hasCredentialsApiKey = !!indicators.credentials?.hasApiKey; + hasCliOAuth = !!( + indicators.credentials?.hasOAuthToken || + indicators.hasStatsCacheWithActivity || + (indicators.hasSettingsFile && indicators.hasProjectsSessions) + ); + } catch { + // If we can't check indicators, fall back to env vars only + } + + const hasApiKey = hasEnvApiKey || hasCredentialsApiKey; + const authenticated = hasEnvApiKey || hasEnvAuthToken || hasCredentialsApiKey || hasCliOAuth; const status: InstallationStatus = { installed: true, method: 'sdk', hasApiKey, - authenticated: hasApiKey, + authenticated, }; return status; @@ -364,6 +368,18 @@ export class ClaudeProvider extends BaseProvider { tier: 'premium' as const, default: true, }, + { + id: 'claude-sonnet-4-6', + name: 'Claude Sonnet 4.6', + modelString: 'claude-sonnet-4-6', + provider: 'anthropic', + description: 'Balanced performance and cost with enhanced reasoning', + contextWindow: 200000, + maxOutputTokens: 64000, + supportsVision: true, + supportsTools: true, + tier: 'standard' as const, + }, { id: 'claude-sonnet-4-20250514', name: 'Claude Sonnet 4', diff --git a/apps/server/src/providers/codex-models.ts b/apps/server/src/providers/codex-models.ts index 7840888b0..22839e28b 100644 --- a/apps/server/src/providers/codex-models.ts +++ b/apps/server/src/providers/codex-models.ts @@ -32,6 +32,19 @@ export const CODEX_MODELS: ModelDefinition[] = [ default: true, hasReasoning: true, }, + { + id: CODEX_MODEL_MAP.gpt53CodexSpark, + name: 'GPT-5.3-Codex-Spark', + modelString: CODEX_MODEL_MAP.gpt53CodexSpark, + provider: 'openai', + description: 'Near-instant real-time coding model, 1000+ tokens/sec.', + contextWindow: CONTEXT_WINDOW_256K, + maxOutputTokens: MAX_OUTPUT_32K, + supportsVision: true, + supportsTools: true, + tier: 'premium' as const, + hasReasoning: true, + }, { id: CODEX_MODEL_MAP.gpt52Codex, name: 'GPT-5.2-Codex', @@ -71,6 +84,45 @@ export const CODEX_MODELS: ModelDefinition[] = [ tier: 'basic' as const, hasReasoning: false, }, + { + id: CODEX_MODEL_MAP.gpt51Codex, + name: 'GPT-5.1-Codex', + modelString: CODEX_MODEL_MAP.gpt51Codex, + provider: 'openai', + description: 'Original GPT-5.1 Codex agentic coding model.', + contextWindow: CONTEXT_WINDOW_256K, + maxOutputTokens: MAX_OUTPUT_32K, + supportsVision: true, + supportsTools: true, + tier: 'standard' as const, + hasReasoning: true, + }, + { + id: CODEX_MODEL_MAP.gpt5Codex, + name: 'GPT-5-Codex', + modelString: CODEX_MODEL_MAP.gpt5Codex, + provider: 'openai', + description: 'Original GPT-5 Codex model.', + contextWindow: CONTEXT_WINDOW_128K, + maxOutputTokens: MAX_OUTPUT_16K, + supportsVision: true, + supportsTools: true, + tier: 'standard' as const, + hasReasoning: true, + }, + { + id: CODEX_MODEL_MAP.gpt5CodexMini, + name: 'GPT-5-Codex-Mini', + modelString: CODEX_MODEL_MAP.gpt5CodexMini, + provider: 'openai', + description: 'Smaller, cheaper GPT-5 Codex variant.', + contextWindow: CONTEXT_WINDOW_128K, + maxOutputTokens: MAX_OUTPUT_16K, + supportsVision: true, + supportsTools: true, + tier: 'basic' as const, + hasReasoning: false, + }, // ========== General-Purpose GPT Models ========== { @@ -99,6 +151,19 @@ export const CODEX_MODELS: ModelDefinition[] = [ tier: 'standard' as const, hasReasoning: true, }, + { + id: CODEX_MODEL_MAP.gpt5, + name: 'GPT-5', + modelString: CODEX_MODEL_MAP.gpt5, + provider: 'openai', + description: 'Base GPT-5 model.', + contextWindow: CONTEXT_WINDOW_128K, + maxOutputTokens: MAX_OUTPUT_16K, + supportsVision: true, + supportsTools: true, + tier: 'standard' as const, + hasReasoning: true, + }, ]; /** diff --git a/apps/server/src/providers/codex-provider.ts b/apps/server/src/providers/codex-provider.ts index 5c200ea54..f4211b4f6 100644 --- a/apps/server/src/providers/codex-provider.ts +++ b/apps/server/src/providers/codex-provider.ts @@ -30,7 +30,6 @@ import type { ModelDefinition, } from './types.js'; import { - CODEX_MODEL_MAP, supportsReasoningEffort, validateBareModelId, calculateReasoningTimeout, @@ -56,15 +55,10 @@ const CODEX_EXEC_SUBCOMMAND = 'exec'; const CODEX_JSON_FLAG = '--json'; const CODEX_MODEL_FLAG = '--model'; const CODEX_VERSION_FLAG = '--version'; -const CODEX_SANDBOX_FLAG = '--sandbox'; -const CODEX_APPROVAL_FLAG = '--ask-for-approval'; -const CODEX_SEARCH_FLAG = '--search'; -const CODEX_OUTPUT_SCHEMA_FLAG = '--output-schema'; const CODEX_CONFIG_FLAG = '--config'; -const CODEX_IMAGE_FLAG = '--image'; const CODEX_ADD_DIR_FLAG = '--add-dir'; +const CODEX_OUTPUT_SCHEMA_FLAG = '--output-schema'; const CODEX_SKIP_GIT_REPO_CHECK_FLAG = '--skip-git-repo-check'; -const CODEX_RESUME_FLAG = 'resume'; const CODEX_REASONING_EFFORT_KEY = 'reasoning_effort'; const CODEX_YOLO_FLAG = '--dangerously-bypass-approvals-and-sandbox'; const OPENAI_API_KEY_ENV = 'OPENAI_API_KEY'; @@ -106,9 +100,6 @@ const TEXT_ENCODING = 'utf-8'; */ const CODEX_CLI_TIMEOUT_MS = DEFAULT_TIMEOUT_MS; const CODEX_FEATURE_GENERATION_BASE_TIMEOUT_MS = 300000; // 5 minutes for feature generation -const CONTEXT_WINDOW_256K = 256000; -const MAX_OUTPUT_32K = 32000; -const MAX_OUTPUT_16K = 16000; const SYSTEM_PROMPT_SEPARATOR = '\n\n'; const CODEX_INSTRUCTIONS_DIR = '.codex'; const CODEX_INSTRUCTIONS_SECTION = 'Codex Project Instructions'; @@ -210,16 +201,42 @@ function isSdkEligible(options: ExecuteOptions): boolean { return isNoToolsRequested(options) && !hasMcpServersConfigured(options); } +function isSdkEligibleWithApiKey(options: ExecuteOptions): boolean { + // When using an API key (not CLI OAuth), prefer SDK over CLI to avoid OAuth issues. + // SDK mode is used when MCP servers are not configured (MCP requires CLI). + // Tool requests are handled by the SDK, so we allow SDK mode even with tools. + return !hasMcpServersConfigured(options); +} + async function resolveCodexExecutionPlan(options: ExecuteOptions): Promise { const cliPath = await findCodexCliPath(); const authIndicators = await getCodexAuthIndicators(); const openAiApiKey = await resolveOpenAiApiKey(); const hasApiKey = Boolean(openAiApiKey); - const cliAuthenticated = authIndicators.hasOAuthToken || authIndicators.hasApiKey || hasApiKey; - const sdkEligible = isSdkEligible(options); const cliAvailable = Boolean(cliPath); + // CLI OAuth login takes priority: if the user has logged in via `codex login`, + // use the CLI regardless of whether an API key is also stored. + // hasOAuthToken = OAuth session from `codex login` + // authIndicators.hasApiKey = API key stored in Codex's own auth file (via `codex login --api-key`) + // Both are "CLI-native" auth — distinct from an API key stored in Automaker's credentials. + const hasCliNativeAuth = authIndicators.hasOAuthToken || authIndicators.hasApiKey; + const sdkEligible = isSdkEligible(options); + + // If CLI is available and the user authenticated via the CLI (`codex login`), + // prefer CLI mode over SDK. This ensures `codex login` sessions take priority + // over API keys stored in Automaker's credentials. + if (cliAvailable && hasCliNativeAuth) { + return { + mode: CODEX_EXECUTION_MODE_CLI, + cliPath, + openAiApiKey, + }; + } - if (hasApiKey) { + // No CLI-native auth — prefer SDK when an API key is available. + // Using SDK with an API key avoids OAuth issues that can arise with the CLI. + // MCP servers still require CLI mode since the SDK doesn't support MCP. + if (hasApiKey && isSdkEligibleWithApiKey(options)) { return { mode: CODEX_EXECUTION_MODE_SDK, cliPath, @@ -227,6 +244,16 @@ async function resolveCodexExecutionPlan(options: ExecuteOptions): Promise): string | null { @@ -758,15 +779,12 @@ export class CodexProvider extends BaseProvider { options.cwd, codexSettings.sandboxMode !== 'danger-full-access' ); - const resolvedSandboxMode = sandboxCheck.enabled - ? codexSettings.sandboxMode - : 'danger-full-access'; if (!sandboxCheck.enabled && sandboxCheck.message) { console.warn(`[CodexProvider] ${sandboxCheck.message}`); } const searchEnabled = codexSettings.enableWebSearch || resolveSearchEnabled(resolvedAllowedTools, restrictTools); - const outputSchemaPath = await writeOutputSchemaFile(options.cwd, options.outputFormat); + const schemaPath = await writeOutputSchemaFile(options.cwd, options.outputFormat); const imageBlocks = codexSettings.enableImages ? extractImageBlocks(options.prompt) : []; const imagePaths = await writeImageFiles(options.cwd, imageBlocks); const approvalPolicy = @@ -801,7 +819,7 @@ export class CodexProvider extends BaseProvider { overrides.push({ key: 'features.web_search_request', value: true }); } - const configOverrides = buildConfigOverrides(overrides); + const configOverrideArgs = buildConfigOverrides(overrides); const preExecArgs: string[] = []; // Add additional directories with write access @@ -811,6 +829,12 @@ export class CodexProvider extends BaseProvider { } } + // If images were written to disk, add the image directory so the CLI can access them + if (imagePaths.length > 0) { + const imageDir = path.join(options.cwd, CODEX_INSTRUCTIONS_DIR, IMAGE_TEMP_DIR); + preExecArgs.push(CODEX_ADD_DIR_FLAG, imageDir); + } + // Model is already bare (no prefix) - validated by executeQuery const args = [ CODEX_EXEC_SUBCOMMAND, @@ -820,6 +844,8 @@ export class CodexProvider extends BaseProvider { CODEX_MODEL_FLAG, options.model, CODEX_JSON_FLAG, + ...configOverrideArgs, + ...(schemaPath ? [CODEX_OUTPUT_SCHEMA_FLAG, schemaPath] : []), '-', // Read prompt from stdin to avoid shell escaping issues ]; @@ -866,16 +892,36 @@ export class CodexProvider extends BaseProvider { // Enhance error message with helpful context let enhancedError = errorText; - if (errorText.toLowerCase().includes('rate limit')) { + const errorLower = errorText.toLowerCase(); + if (errorLower.includes('rate limit')) { enhancedError = `${errorText}\n\nTip: You're being rate limited. Try reducing concurrent tasks or waiting a few minutes before retrying.`; + } else if (errorLower.includes('authentication') || errorLower.includes('unauthorized')) { + enhancedError = `${errorText}\n\nTip: Check that your OPENAI_API_KEY is set correctly or run 'codex login' to authenticate.`; + } else if ( + errorLower.includes('model does not exist') || + errorLower.includes('requested model does not exist') || + errorLower.includes('do not have access') || + errorLower.includes('model_not_found') || + errorLower.includes('invalid_model') + ) { + enhancedError = + `${errorText}\n\nTip: The model '${options.model}' may not be available on your OpenAI plan. ` + + `See https://platform.openai.com/docs/models for available models. ` + + `Some models require a ChatGPT Pro/Plus subscription—authenticate with 'codex login' instead of an API key.`; } else if ( - errorText.toLowerCase().includes('authentication') || - errorText.toLowerCase().includes('unauthorized') + errorLower.includes('stream disconnected') || + errorLower.includes('stream ended') || + errorLower.includes('connection reset') ) { - enhancedError = `${errorText}\n\nTip: Check that your OPENAI_API_KEY is set correctly or run 'codex auth login' to authenticate.`; + enhancedError = + `${errorText}\n\nTip: The connection to OpenAI was interrupted. This can happen due to:\n` + + `- Network instability\n` + + `- The model not being available on your plan\n` + + `- Server-side timeouts for long-running requests\n` + + `Try again, or switch to a different model.`; } else if ( - errorText.toLowerCase().includes('not found') || - errorText.toLowerCase().includes('command not found') + errorLower.includes('command not found') || + errorLower.includes('is not recognized as an internal or external command') ) { enhancedError = `${errorText}\n\nTip: Make sure the Codex CLI is installed. Run 'npm install -g @openai/codex-cli' to install.`; } @@ -1033,7 +1079,6 @@ export class CodexProvider extends BaseProvider { async detectInstallation(): Promise { const cliPath = await findCodexCliPath(); const hasApiKey = Boolean(await resolveOpenAiApiKey()); - const authIndicators = await getCodexAuthIndicators(); const installed = !!cliPath; let version = ''; @@ -1045,7 +1090,7 @@ export class CodexProvider extends BaseProvider { cwd: process.cwd(), }); version = result.stdout.trim(); - } catch (error) { + } catch { version = ''; } } diff --git a/apps/server/src/providers/codex-sdk-client.ts b/apps/server/src/providers/codex-sdk-client.ts index 51f7c0d20..bc885c721 100644 --- a/apps/server/src/providers/codex-sdk-client.ts +++ b/apps/server/src/providers/codex-sdk-client.ts @@ -15,6 +15,9 @@ const SDK_HISTORY_HEADER = 'Current request:\n'; const DEFAULT_RESPONSE_TEXT = ''; const SDK_ERROR_DETAILS_LABEL = 'Details:'; +type SdkReasoningEffort = 'minimal' | 'low' | 'medium' | 'high' | 'xhigh'; +const SDK_REASONING_EFFORTS = new Set(['minimal', 'low', 'medium', 'high', 'xhigh']); + type PromptBlock = { type: string; text?: string; @@ -99,38 +102,52 @@ export async function* executeCodexSdkQuery( const apiKey = resolveApiKey(); const codex = new Codex({ apiKey }); + // Build thread options with model + // The model must be passed to startThread/resumeThread so the SDK + // knows which model to use for the conversation. Without this, + // the SDK may use a default model that the user doesn't have access to. + const threadOptions: { + model?: string; + modelReasoningEffort?: SdkReasoningEffort; + } = {}; + + if (options.model) { + threadOptions.model = options.model; + } + + // Add reasoning effort to thread options if model supports it + if ( + options.reasoningEffort && + options.model && + supportsReasoningEffort(options.model) && + options.reasoningEffort !== 'none' && + SDK_REASONING_EFFORTS.has(options.reasoningEffort) + ) { + threadOptions.modelReasoningEffort = options.reasoningEffort as SdkReasoningEffort; + } + // Resume existing thread or start new one let thread; if (options.sdkSessionId) { try { - thread = codex.resumeThread(options.sdkSessionId); + thread = codex.resumeThread(options.sdkSessionId, threadOptions); } catch { // If resume fails, start a new thread - thread = codex.startThread(); + thread = codex.startThread(threadOptions); } } else { - thread = codex.startThread(); + thread = codex.startThread(threadOptions); } const promptText = buildPromptText(options, systemPrompt); - // Build run options with reasoning effort if supported + // Build run options const runOptions: { signal?: AbortSignal; - reasoning?: { effort: string }; } = { signal: options.abortController?.signal, }; - // Add reasoning effort if model supports it and reasoningEffort is specified - if ( - options.reasoningEffort && - supportsReasoningEffort(options.model) && - options.reasoningEffort !== 'none' - ) { - runOptions.reasoning = { effort: options.reasoningEffort }; - } - // Run the query const result = await thread.run(promptText, runOptions); @@ -160,10 +177,42 @@ export async function* executeCodexSdkQuery( } catch (error) { const errorInfo = classifyError(error); const userMessage = getUserFriendlyErrorMessage(error); - const combinedMessage = buildSdkErrorMessage(errorInfo.message, userMessage); + let combinedMessage = buildSdkErrorMessage(errorInfo.message, userMessage); + + // Enhance error messages with actionable tips for common Codex issues + // Normalize inputs to avoid crashes from nullish values + const errorLower = (errorInfo?.message ?? '').toLowerCase(); + const modelLabel = options?.model ?? ''; + + if ( + errorLower.includes('does not exist') || + errorLower.includes('model_not_found') || + errorLower.includes('invalid_model') + ) { + // Model not found - provide helpful guidance + combinedMessage += + `\n\nTip: The model '${modelLabel}' may not be available on your OpenAI plan. ` + + `Some models (like gpt-5.3-codex) require a ChatGPT Pro/Plus subscription and OAuth login via 'codex login'. ` + + `Try using a different model (e.g., gpt-5.1 or gpt-5.2), or authenticate with 'codex login' instead of an API key.`; + } else if ( + errorLower.includes('stream disconnected') || + errorLower.includes('stream ended') || + errorLower.includes('connection reset') || + errorLower.includes('socket hang up') + ) { + // Stream disconnection - provide helpful guidance + combinedMessage += + `\n\nTip: The connection to OpenAI was interrupted. This can happen due to:\n` + + `- Network instability\n` + + `- The model not being available on your plan (try 'codex login' for OAuth authentication)\n` + + `- Server-side timeouts for long-running requests\n` + + `Try again, or switch to a different model.`; + } + console.error('[CodexSDK] executeQuery() error during execution:', { type: errorInfo.type, message: errorInfo.message, + model: options.model, isRateLimit: errorInfo.isRateLimit, retryAfter: errorInfo.retryAfter, stack: error instanceof Error ? error.stack : undefined, diff --git a/apps/server/src/providers/copilot-provider.ts b/apps/server/src/providers/copilot-provider.ts index 64423047b..34cfcbce2 100644 --- a/apps/server/src/providers/copilot-provider.ts +++ b/apps/server/src/providers/copilot-provider.ts @@ -42,7 +42,7 @@ import { const logger = createLogger('CopilotProvider'); // Default bare model (without copilot- prefix) for SDK calls -const DEFAULT_BARE_MODEL = 'claude-sonnet-4.5'; +const DEFAULT_BARE_MODEL = 'claude-sonnet-4.6'; // ============================================================================= // SDK Event Types (from @github/copilot-sdk) @@ -85,10 +85,6 @@ interface SdkToolExecutionEndEvent extends SdkEvent { }; } -interface SdkSessionIdleEvent extends SdkEvent { - type: 'session.idle'; -} - interface SdkSessionErrorEvent extends SdkEvent { type: 'session.error'; data: { diff --git a/apps/server/src/providers/cursor-provider.ts b/apps/server/src/providers/cursor-provider.ts index a2e813c0c..8684417a2 100644 --- a/apps/server/src/providers/cursor-provider.ts +++ b/apps/server/src/providers/cursor-provider.ts @@ -31,7 +31,7 @@ import type { } from './types.js'; import { validateBareModelId } from '@automaker/types'; import { validateApiKey } from '../lib/auth-utils.js'; -import { getEffectivePermissions } from '../services/cursor-config-service.js'; +import { getEffectivePermissions, detectProfile } from '../services/cursor-config-service.js'; import { type CursorStreamEvent, type CursorSystemEvent, @@ -69,6 +69,7 @@ interface CursorToolHandler { * Registry of Cursor tool handlers * Each handler knows how to normalize its specific tool call type */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any -- handler registry stores heterogeneous tool type parameters const CURSOR_TOOL_HANDLERS: Record> = { readToolCall: { name: 'Read', @@ -877,8 +878,12 @@ export class CursorProvider extends CliProvider { logger.debug(`CursorProvider.executeQuery called with model: "${options.model}"`); - // Get effective permissions for this project + // Get effective permissions for this project and detect the active profile const effectivePermissions = await getEffectivePermissions(options.cwd || process.cwd()); + const activeProfile = detectProfile(effectivePermissions); + logger.debug( + `Active permission profile: ${activeProfile ?? 'none'}, permissions: ${JSON.stringify(effectivePermissions)}` + ); // Debug: log raw events when AUTOMAKER_DEBUG_RAW_OUTPUT is enabled const debugRawEvents = diff --git a/apps/server/src/providers/gemini-provider.ts b/apps/server/src/providers/gemini-provider.ts index 09f16c16e..764c57eba 100644 --- a/apps/server/src/providers/gemini-provider.ts +++ b/apps/server/src/providers/gemini-provider.ts @@ -20,7 +20,6 @@ import type { ProviderMessage, InstallationStatus, ModelDefinition, - ContentBlock, } from './types.js'; import { validateBareModelId } from '@automaker/types'; import { GEMINI_MODEL_MAP, type GeminiAuthStatus } from '@automaker/types'; diff --git a/apps/server/src/providers/opencode-provider.ts b/apps/server/src/providers/opencode-provider.ts index d2fa13d94..8c58da156 100644 --- a/apps/server/src/providers/opencode-provider.ts +++ b/apps/server/src/providers/opencode-provider.ts @@ -192,6 +192,28 @@ export interface OpenCodeToolErrorEvent extends OpenCodeBaseEvent { part?: OpenCodePart & { error: string }; } +/** + * Tool use event - The actual format emitted by OpenCode CLI when a tool is invoked. + * Contains the tool name, call ID, and the complete state (input, output, status). + * Note: OpenCode CLI emits 'tool_use' (not 'tool_call') as the event type. + */ +export interface OpenCodeToolUseEvent extends OpenCodeBaseEvent { + type: 'tool_use'; + part: OpenCodePart & { + type: 'tool'; + callID?: string; + tool?: string; + state?: { + status?: string; + input?: unknown; + output?: string; + title?: string; + metadata?: unknown; + time?: { start: number; end: number }; + }; + }; +} + /** * Union type of all OpenCode stream events */ @@ -200,6 +222,7 @@ export type OpenCodeStreamEvent = | OpenCodeStepStartEvent | OpenCodeStepFinishEvent | OpenCodeToolCallEvent + | OpenCodeToolUseEvent | OpenCodeToolResultEvent | OpenCodeErrorEvent | OpenCodeToolErrorEvent; @@ -311,8 +334,8 @@ export class OpencodeProvider extends CliProvider { * Arguments built: * - 'run' subcommand for executing queries * - '--format', 'json' for JSONL streaming output - * - '-c', '' for working directory (using opencode's -c flag) * - '--model', '' for model selection (if specified) + * - '--session', '' for continuing an existing session (if sdkSessionId is set) * * The prompt is passed via stdin (piped) to avoid shell escaping issues. * OpenCode CLI automatically reads from stdin when input is piped. @@ -326,6 +349,14 @@ export class OpencodeProvider extends CliProvider { // Add JSON output format for JSONL parsing (not 'stream-json') args.push('--format', 'json'); + // Handle session resumption for conversation continuity. + // The opencode CLI supports `--session ` to continue an existing session. + // The sdkSessionId is captured from the sessionID field in previous stream events + // and persisted by AgentService for use in follow-up messages. + if (options.sdkSessionId) { + args.push('--session', options.sdkSessionId); + } + // Handle model selection // Convert canonical prefix format (opencode-xxx) to CLI slash format (opencode/xxx) // OpenCode CLI expects provider/model format (e.g., 'opencode/big-model') @@ -398,15 +429,225 @@ export class OpencodeProvider extends CliProvider { return subprocessOptions; } + /** + * Check if an error message indicates a session-not-found condition. + * + * Centralizes the pattern matching for session errors to avoid duplication. + * Strips ANSI escape codes first since opencode CLI uses colored stderr output + * (e.g. "\x1b[91m\x1b[1mError: \x1b[0mSession not found"). + * + * IMPORTANT: Patterns must be specific enough to avoid false positives. + * Generic patterns like "notfounderror" or "resource not found" match + * non-session errors (e.g. "ProviderModelNotFoundError") which would + * trigger unnecessary retries that fail identically, producing confusing + * error messages like "OpenCode session could not be created". + * + * @param errorText - Raw error text (may contain ANSI codes) + * @returns true if the error indicates the session was not found + */ + private static isSessionNotFoundError(errorText: string): boolean { + const cleaned = OpencodeProvider.stripAnsiCodes(errorText).toLowerCase(); + + // Explicit session-related phrases — high confidence + if ( + cleaned.includes('session not found') || + cleaned.includes('session does not exist') || + cleaned.includes('invalid session') || + cleaned.includes('session expired') || + cleaned.includes('no such session') + ) { + return true; + } + + // Generic "NotFoundError" / "resource not found" are only session errors + // when the message also references a session path or session ID. + // Without this guard, errors like "ProviderModelNotFoundError" or + // "Resource not found: /path/to/config.json" would false-positive. + if (cleaned.includes('notfounderror') || cleaned.includes('resource not found')) { + return cleaned.includes('/session/') || /\bsession\b/.test(cleaned); + } + + return false; + } + + /** + * Strip ANSI escape codes from a string. + * + * The OpenCode CLI uses colored stderr output (e.g. "\x1b[91m\x1b[1mError: \x1b[0m"). + * These escape codes render as garbled text like "[91m[1mError: [0m" in the UI + * when passed through as-is. This utility removes them so error messages are + * clean and human-readable. + */ + private static stripAnsiCodes(text: string): string { + return text.replace(/\x1b\[[0-9;]*m/g, ''); + } + + /** + * Clean a CLI error message for display. + * + * Strips ANSI escape codes AND removes the redundant "Error: " prefix that + * the OpenCode CLI prepends to error messages in its colored stderr output + * (e.g. "\x1b[91m\x1b[1mError: \x1b[0mSession not found" → "Session not found"). + * + * Without this, consumers that wrap the message in their own "Error: " prefix + * (like AgentService or AgentExecutor) produce garbled double-prefixed output: + * "Error: Error: Session not found". + */ + private static cleanErrorMessage(text: string): string { + let cleaned = OpencodeProvider.stripAnsiCodes(text).trim(); + // Remove leading "Error: " prefix (case-insensitive) if present. + // The CLI formats errors as: \x1b[91m\x1b[1mError: \x1b[0m + // After ANSI stripping this becomes: "Error: " + cleaned = cleaned.replace(/^Error:\s*/i, '').trim(); + return cleaned || text; + } + + /** + * Execute a query with automatic session resumption fallback. + * + * When a sdkSessionId is provided, the CLI receives `--session `. + * If the session no longer exists on disk the CLI will fail with a + * "NotFoundError" / "Resource not found" / "Session not found" error. + * + * The opencode CLI writes this to **stderr** and exits non-zero. + * `spawnJSONLProcess` collects stderr and **yields** it as + * `{ type: 'error', error: }` — it is NOT thrown. + * After `normalizeEvent`, the error becomes a yielded `ProviderMessage` + * with `type: 'error'`. A simple try/catch therefore cannot intercept it. + * + * This override iterates the parent stream, intercepts yielded error + * messages that match the session-not-found pattern, and retries the + * entire query WITHOUT the `--session` flag so a fresh session is started. + * + * Session-not-found retry is ONLY attempted when `sdkSessionId` is set. + * Without the `--session` flag the CLI always creates a fresh session, so + * retrying without it would be identical to the first attempt and would + * fail the same way — producing a confusing "session could not be created" + * message for what is actually a different error (model not found, auth + * failure, etc.). + * + * All error messages (session or not) are cleaned of ANSI codes and the + * CLI's redundant "Error: " prefix before being yielded to consumers. + * + * After a successful retry, the consumer (AgentService) will receive a new + * session_id from the fresh stream events, which it persists to metadata — + * replacing the stale sdkSessionId and preventing repeated failures. + */ + async *executeQuery(options: ExecuteOptions): AsyncGenerator { + // When no sdkSessionId is set, there is nothing to "retry without" — just + // stream normally and clean error messages as they pass through. + if (!options.sdkSessionId) { + for await (const msg of super.executeQuery(options)) { + // Clean error messages so consumers don't get ANSI or double "Error:" prefix + if (msg.type === 'error' && msg.error && typeof msg.error === 'string') { + msg.error = OpencodeProvider.cleanErrorMessage(msg.error); + } + yield msg; + } + return; + } + + // sdkSessionId IS set — the CLI will receive `--session `. + // If that session no longer exists, intercept the error and retry fresh. + // + // To avoid buffering the entire stream in memory for long-lived sessions, + // we only buffer an initial window of messages until we observe a healthy + // (non-error) message. Once a healthy message is seen, we flush the buffer + // and switch to direct passthrough, while still watching for session errors + // via isSessionNotFoundError on any subsequent error messages. + const buffered: ProviderMessage[] = []; + let sessionError = false; + let seenHealthyMessage = false; + + try { + for await (const msg of super.executeQuery(options)) { + if (msg.type === 'error') { + const errorText = msg.error || ''; + if (OpencodeProvider.isSessionNotFoundError(errorText)) { + sessionError = true; + opencodeLogger.info( + `OpenCode session error detected (session "${options.sdkSessionId}") ` + + `— retrying without --session to start fresh` + ); + break; // stop consuming the failed stream + } + + // Non-session error — clean it + if (msg.error && typeof msg.error === 'string') { + msg.error = OpencodeProvider.cleanErrorMessage(msg.error); + } + } else { + // A non-error message is a healthy signal — stop buffering after this + seenHealthyMessage = true; + } + + if (seenHealthyMessage && buffered.length > 0) { + // Flush the pre-healthy buffer first, then switch to passthrough + for (const bufferedMsg of buffered) { + yield bufferedMsg; + } + buffered.length = 0; + } + + if (seenHealthyMessage) { + // Passthrough mode — yield directly without buffering + yield msg; + } else { + // Still in initial window — buffer until we see a healthy message + buffered.push(msg); + } + } + } catch (error) { + // Also handle thrown exceptions (e.g. from mapError in cli-provider) + const errMsg = error instanceof Error ? error.message : String(error); + if (OpencodeProvider.isSessionNotFoundError(errMsg)) { + sessionError = true; + opencodeLogger.info( + `OpenCode session error detected (thrown, session "${options.sdkSessionId}") ` + + `— retrying without --session to start fresh` + ); + } else { + throw error; + } + } + + if (sessionError) { + // Retry the entire query without the stale session ID. + const retryOptions = { ...options, sdkSessionId: undefined }; + opencodeLogger.info('Retrying OpenCode query without --session flag...'); + + // Stream the retry directly to the consumer. + // If the retry also fails, it's a genuine error (not session-related) + // and should be surfaced as-is rather than masked with a misleading + // "session could not be created" message. + for await (const retryMsg of super.executeQuery(retryOptions)) { + if (retryMsg.type === 'error' && retryMsg.error && typeof retryMsg.error === 'string') { + retryMsg.error = OpencodeProvider.cleanErrorMessage(retryMsg.error); + } + yield retryMsg; + } + } else if (buffered.length > 0) { + // No session error and still have buffered messages (stream ended before + // any healthy message was observed) — flush them to the consumer + for (const msg of buffered) { + yield msg; + } + } + // If seenHealthyMessage is true, all messages have already been yielded + // directly in passthrough mode — nothing left to flush. + } + /** * Normalize a raw CLI event to ProviderMessage format * * Maps OpenCode event types to the standard ProviderMessage structure: * - text -> type: 'assistant', content with type: 'text' * - step_start -> null (informational, no message needed) - * - step_finish with reason 'stop' -> type: 'result', subtype: 'success' + * - step_finish with reason 'stop'/'end_turn' -> type: 'result', subtype: 'success' + * - step_finish with reason 'tool-calls' -> null (intermediate step, not final) * - step_finish with error -> type: 'error' - * - tool_call -> type: 'assistant', content with type: 'tool_use' + * - tool_use -> type: 'assistant', content with type: 'tool_use' (OpenCode CLI format) + * - tool_call -> type: 'assistant', content with type: 'tool_use' (legacy format) * - tool_result -> type: 'assistant', content with type: 'tool_result' * - error -> type: 'error' * @@ -459,7 +700,7 @@ export class OpencodeProvider extends CliProvider { return { type: 'error', session_id: finishEvent.sessionID, - error: finishEvent.part.error, + error: OpencodeProvider.cleanErrorMessage(finishEvent.part.error), }; } @@ -468,15 +709,40 @@ export class OpencodeProvider extends CliProvider { return { type: 'error', session_id: finishEvent.sessionID, - error: 'Step execution failed', + error: OpencodeProvider.cleanErrorMessage('Step execution failed'), + }; + } + + // Intermediate step completion (reason: 'tool-calls') — the agent loop + // is continuing because the model requested tool calls. Skip these so + // consumers don't mistake them for final results. + if (finishEvent.part?.reason === 'tool-calls') { + return null; + } + + // Only treat an explicit allowlist of reasons as true success. + // Reasons like 'length' (context-window truncation) or 'content-filter' + // indicate the model stopped abnormally and must not be surfaced as + // successful completions. + const SUCCESS_REASONS = new Set(['stop', 'end_turn']); + const reason = finishEvent.part?.reason; + + if (reason === undefined || SUCCESS_REASONS.has(reason)) { + // Final completion (reason: 'stop', 'end_turn', or unset) + return { + type: 'result', + subtype: 'success', + session_id: finishEvent.sessionID, + result: (finishEvent.part as OpenCodePart & { result?: string })?.result, }; } - // Successful completion (reason: 'stop' or 'end_turn') + // Non-success, non-tool-calls reason (e.g. 'length', 'content-filter') return { type: 'result', - subtype: 'success', + subtype: 'error', session_id: finishEvent.sessionID, + error: `Step finished with non-success reason: ${reason}`, result: (finishEvent.part as OpenCodePart & { result?: string })?.result, }; } @@ -484,8 +750,10 @@ export class OpencodeProvider extends CliProvider { case 'tool_error': { const toolErrorEvent = openCodeEvent as OpenCodeBaseEvent; - // Extract error message from part.error - const errorMessage = toolErrorEvent.part?.error || 'Tool execution failed'; + // Extract error message from part.error and clean ANSI codes + const errorMessage = OpencodeProvider.cleanErrorMessage( + toolErrorEvent.part?.error || 'Tool execution failed' + ); return { type: 'error', @@ -494,6 +762,45 @@ export class OpencodeProvider extends CliProvider { }; } + // OpenCode CLI emits 'tool_use' events (not 'tool_call') when the model invokes a tool. + // The event format includes the tool name, call ID, and state with input/output. + // Handle both 'tool_use' (actual CLI format) and 'tool_call' (legacy/alternative) for robustness. + case 'tool_use': { + const toolUseEvent = openCodeEvent as OpenCodeToolUseEvent; + const part = toolUseEvent.part; + + // Generate a tool use ID if not provided + const toolUseId = part?.callID || part?.call_id || generateToolUseId(); + const toolName = part?.tool || part?.name || 'unknown'; + + const content: ContentBlock[] = [ + { + type: 'tool_use', + name: toolName, + tool_use_id: toolUseId, + input: part?.state?.input || part?.args, + }, + ]; + + // If the tool has already completed (state.status === 'completed'), also emit the result + if (part?.state?.status === 'completed' && part?.state?.output) { + content.push({ + type: 'tool_result', + tool_use_id: toolUseId, + content: part.state.output, + }); + } + + return { + type: 'assistant', + session_id: toolUseEvent.sessionID, + message: { + role: 'assistant', + content, + }, + }; + } + case 'tool_call': { const toolEvent = openCodeEvent as OpenCodeToolCallEvent; @@ -560,6 +867,13 @@ export class OpencodeProvider extends CliProvider { errorMessage = errorEvent.part.error; } + // Clean error messages: strip ANSI escape codes AND the redundant "Error: " + // prefix the CLI adds. The OpenCode CLI outputs colored stderr like: + // \x1b[91m\x1b[1mError: \x1b[0mSession not found + // Without cleaning, consumers that wrap in their own "Error: " prefix + // produce "Error: Error: Session not found". + errorMessage = OpencodeProvider.cleanErrorMessage(errorMessage); + return { type: 'error', session_id: errorEvent.sessionID, @@ -623,9 +937,9 @@ export class OpencodeProvider extends CliProvider { default: true, }, { - id: 'opencode/glm-4.7-free', - name: 'GLM 4.7 Free', - modelString: 'opencode/glm-4.7-free', + id: 'opencode/glm-5-free', + name: 'GLM 5 Free', + modelString: 'opencode/glm-5-free', provider: 'opencode', description: 'OpenCode free tier GLM model', supportsTools: true, @@ -643,19 +957,19 @@ export class OpencodeProvider extends CliProvider { tier: 'basic', }, { - id: 'opencode/grok-code', - name: 'Grok Code (Free)', - modelString: 'opencode/grok-code', + id: 'opencode/kimi-k2.5-free', + name: 'Kimi K2.5 Free', + modelString: 'opencode/kimi-k2.5-free', provider: 'opencode', - description: 'OpenCode free tier Grok model for coding', + description: 'OpenCode free tier Kimi model for coding', supportsTools: true, supportsVision: false, tier: 'basic', }, { - id: 'opencode/minimax-m2.1-free', - name: 'MiniMax M2.1 Free', - modelString: 'opencode/minimax-m2.1-free', + id: 'opencode/minimax-m2.5-free', + name: 'MiniMax M2.5 Free', + modelString: 'opencode/minimax-m2.5-free', provider: 'opencode', description: 'OpenCode free tier MiniMax model', supportsTools: true, @@ -777,7 +1091,7 @@ export class OpencodeProvider extends CliProvider { * * OpenCode CLI output format (one model per line): * opencode/big-pickle - * opencode/glm-4.7-free + * opencode/glm-5-free * anthropic/claude-3-5-haiku-20241022 * github-copilot/claude-3.5-sonnet * ... diff --git a/apps/server/src/providers/simple-query-service.ts b/apps/server/src/providers/simple-query-service.ts index 85c252351..5ebe4db97 100644 --- a/apps/server/src/providers/simple-query-service.ts +++ b/apps/server/src/providers/simple-query-service.ts @@ -16,8 +16,6 @@ import { ProviderFactory } from './provider-factory.js'; import type { - ProviderMessage, - ContentBlock, ThinkingLevel, ReasoningEffort, ClaudeApiProfile, @@ -96,7 +94,7 @@ export interface StreamingQueryOptions extends SimpleQueryOptions { /** * Default model to use when none specified */ -const DEFAULT_MODEL = 'claude-sonnet-4-20250514'; +const DEFAULT_MODEL = 'claude-sonnet-4-6'; /** * Execute a simple query and return the text result diff --git a/apps/server/src/routes/agent/routes/history.ts b/apps/server/src/routes/agent/routes/history.ts index 0859a1420..e11578d71 100644 --- a/apps/server/src/routes/agent/routes/history.ts +++ b/apps/server/src/routes/agent/routes/history.ts @@ -16,7 +16,7 @@ export function createHistoryHandler(agentService: AgentService) { return; } - const result = agentService.getHistory(sessionId); + const result = await agentService.getHistory(sessionId); res.json(result); } catch (error) { logError(error, 'Get history failed'); diff --git a/apps/server/src/routes/agent/routes/queue-list.ts b/apps/server/src/routes/agent/routes/queue-list.ts index 1096c7015..7299e8716 100644 --- a/apps/server/src/routes/agent/routes/queue-list.ts +++ b/apps/server/src/routes/agent/routes/queue-list.ts @@ -19,7 +19,7 @@ export function createQueueListHandler(agentService: AgentService) { return; } - const result = agentService.getQueue(sessionId); + const result = await agentService.getQueue(sessionId); res.json(result); } catch (error) { logError(error, 'List queue failed'); diff --git a/apps/server/src/routes/agent/routes/send.ts b/apps/server/src/routes/agent/routes/send.ts index 15e97f633..4f6e527cc 100644 --- a/apps/server/src/routes/agent/routes/send.ts +++ b/apps/server/src/routes/agent/routes/send.ts @@ -53,7 +53,15 @@ export function createSendHandler(agentService: AgentService) { thinkingLevel, }) .catch((error) => { - logger.error('Background error in sendMessage():', error); + const errorMsg = (error as Error).message || 'Unknown error'; + logger.error(`Background error in sendMessage() for session ${sessionId}:`, errorMsg); + + // Emit error via WebSocket so the UI is notified even though + // the HTTP response already returned 200. This is critical for + // session-not-found errors where sendMessage() throws before it + // can emit its own error event (no in-memory session to emit from). + agentService.emitSessionError(sessionId, errorMsg); + logError(error, 'Send message failed (background)'); }); diff --git a/apps/server/src/routes/agent/routes/start.ts b/apps/server/src/routes/agent/routes/start.ts index 1023fa389..dd9b7e419 100644 --- a/apps/server/src/routes/agent/routes/start.ts +++ b/apps/server/src/routes/agent/routes/start.ts @@ -6,7 +6,7 @@ import type { Request, Response } from 'express'; import { AgentService } from '../../../services/agent-service.js'; import { createLogger } from '@automaker/utils'; import { getErrorMessage, logError } from '../common.js'; -const logger = createLogger('Agent'); +const _logger = createLogger('Agent'); export function createStartHandler(agentService: AgentService) { return async (req: Request, res: Response): Promise => { diff --git a/apps/server/src/routes/app-spec/common.ts b/apps/server/src/routes/app-spec/common.ts index 1a48fc6a8..0731a7ddd 100644 --- a/apps/server/src/routes/app-spec/common.ts +++ b/apps/server/src/routes/app-spec/common.ts @@ -128,7 +128,7 @@ export function logAuthStatus(context: string): void { */ export function logError(error: unknown, context: string): void { logger.error(`❌ ${context}:`); - logger.error('Error name:', (error as any)?.name); + logger.error('Error name:', (error as Error)?.name); logger.error('Error message:', (error as Error)?.message); logger.error('Error stack:', (error as Error)?.stack); logger.error('Full error object:', JSON.stringify(error, Object.getOwnPropertyNames(error), 2)); diff --git a/apps/server/src/routes/app-spec/generate-features-from-spec.ts b/apps/server/src/routes/app-spec/generate-features-from-spec.ts index 6558256b9..93daeb8ef 100644 --- a/apps/server/src/routes/app-spec/generate-features-from-spec.ts +++ b/apps/server/src/routes/app-spec/generate-features-from-spec.ts @@ -30,7 +30,7 @@ const DEFAULT_MAX_FEATURES = 50; * Timeout for Codex models when generating features (5 minutes). * Codex models are slower and need more time to generate 50+ features. */ -const CODEX_FEATURE_GENERATION_TIMEOUT_MS = 300000; // 5 minutes +const _CODEX_FEATURE_GENERATION_TIMEOUT_MS = 300000; // 5 minutes /** * Type for extracted features JSON response diff --git a/apps/server/src/routes/app-spec/sync-spec.ts b/apps/server/src/routes/app-spec/sync-spec.ts index d1ba139d5..53bdc91a3 100644 --- a/apps/server/src/routes/app-spec/sync-spec.ts +++ b/apps/server/src/routes/app-spec/sync-spec.ts @@ -29,7 +29,6 @@ import { updateTechnologyStack, updateRoadmapPhaseStatus, type ImplementedFeature, - type RoadmapPhase, } from '../../lib/xml-extractor.js'; import { getNotificationService } from '../../services/notification-service.js'; diff --git a/apps/server/src/routes/backlog-plan/generate-plan.ts b/apps/server/src/routes/backlog-plan/generate-plan.ts index 2bd3a6a75..c2548f24c 100644 --- a/apps/server/src/routes/backlog-plan/generate-plan.ts +++ b/apps/server/src/routes/backlog-plan/generate-plan.ts @@ -6,7 +6,7 @@ */ import type { EventEmitter } from '../../lib/events.js'; -import type { Feature, BacklogPlanResult, BacklogChange, DependencyUpdate } from '@automaker/types'; +import type { Feature, BacklogPlanResult } from '@automaker/types'; import { DEFAULT_PHASE_MODELS, isCursorModel, diff --git a/apps/server/src/routes/backlog-plan/routes/apply.ts b/apps/server/src/routes/backlog-plan/routes/apply.ts index 1a238d17f..e0fb71227 100644 --- a/apps/server/src/routes/backlog-plan/routes/apply.ts +++ b/apps/server/src/routes/backlog-plan/routes/apply.ts @@ -3,7 +3,7 @@ */ import type { Request, Response } from 'express'; -import type { BacklogPlanResult, BacklogChange, Feature } from '@automaker/types'; +import type { BacklogPlanResult } from '@automaker/types'; import { FeatureLoader } from '../../../services/feature-loader.js'; import { clearBacklogPlan, getErrorMessage, logError, logger } from '../common.js'; @@ -58,6 +58,9 @@ export function createApplyHandler() { if (feature.dependencies?.includes(change.featureId)) { const newDeps = feature.dependencies.filter((d) => d !== change.featureId); await featureLoader.update(projectPath, feature.id, { dependencies: newDeps }); + // Mutate the in-memory feature object so subsequent deletions use the updated + // dependency list and don't reintroduce already-removed dependency IDs. + feature.dependencies = newDeps; logger.info( `[BacklogPlan] Removed dependency ${change.featureId} from ${feature.id}` ); diff --git a/apps/server/src/routes/features/routes/export.ts b/apps/server/src/routes/features/routes/export.ts index c767dda47..28a048b4b 100644 --- a/apps/server/src/routes/features/routes/export.ts +++ b/apps/server/src/routes/features/routes/export.ts @@ -36,7 +36,7 @@ interface ExportRequest { }; } -export function createExportHandler(featureLoader: FeatureLoader) { +export function createExportHandler(_featureLoader: FeatureLoader) { const exportService = getFeatureExportService(); return async (req: Request, res: Response): Promise => { diff --git a/apps/server/src/routes/features/routes/generate-title.ts b/apps/server/src/routes/features/routes/generate-title.ts index 4e5e0dcbe..a84680b0c 100644 --- a/apps/server/src/routes/features/routes/generate-title.ts +++ b/apps/server/src/routes/features/routes/generate-title.ts @@ -34,7 +34,7 @@ export function createGenerateTitleHandler( ): (req: Request, res: Response) => Promise { return async (req: Request, res: Response): Promise => { try { - const { description, projectPath } = req.body as GenerateTitleRequestBody; + const { description } = req.body as GenerateTitleRequestBody; if (!description || typeof description !== 'string') { const response: GenerateTitleErrorResponse = { diff --git a/apps/server/src/routes/features/routes/import.ts b/apps/server/src/routes/features/routes/import.ts index 85fb6d9be..aa8cfce14 100644 --- a/apps/server/src/routes/features/routes/import.ts +++ b/apps/server/src/routes/features/routes/import.ts @@ -33,7 +33,7 @@ interface ConflictInfo { hasConflict: boolean; } -export function createImportHandler(featureLoader: FeatureLoader) { +export function createImportHandler(_featureLoader: FeatureLoader) { const exportService = getFeatureExportService(); return async (req: Request, res: Response): Promise => { diff --git a/apps/server/src/routes/fs/index.ts b/apps/server/src/routes/fs/index.ts index 58732b3a9..9991c3461 100644 --- a/apps/server/src/routes/fs/index.ts +++ b/apps/server/src/routes/fs/index.ts @@ -19,6 +19,7 @@ import { createBrowseHandler } from './routes/browse.js'; import { createImageHandler } from './routes/image.js'; import { createSaveBoardBackgroundHandler } from './routes/save-board-background.js'; import { createDeleteBoardBackgroundHandler } from './routes/delete-board-background.js'; +import { createBrowseProjectFilesHandler } from './routes/browse-project-files.js'; export function createFsRoutes(_events: EventEmitter): Router { const router = Router(); @@ -37,6 +38,7 @@ export function createFsRoutes(_events: EventEmitter): Router { router.get('/image', createImageHandler()); router.post('/save-board-background', createSaveBoardBackgroundHandler()); router.post('/delete-board-background', createDeleteBoardBackgroundHandler()); + router.post('/browse-project-files', createBrowseProjectFilesHandler()); return router; } diff --git a/apps/server/src/routes/fs/routes/browse-project-files.ts b/apps/server/src/routes/fs/routes/browse-project-files.ts new file mode 100644 index 000000000..50afee0d2 --- /dev/null +++ b/apps/server/src/routes/fs/routes/browse-project-files.ts @@ -0,0 +1,191 @@ +/** + * POST /browse-project-files endpoint - Browse files and directories within a project + * + * Unlike /browse which only lists directories (for project folder selection), + * this endpoint lists both files and directories relative to a project root. + * Used by the file selector for "Copy files to worktree" settings. + * + * Features: + * - Lists both files and directories + * - Hides .git, .worktrees, node_modules, and other build artifacts + * - Returns entries relative to the project root + * - Supports navigating into subdirectories + * - Security: prevents path traversal outside project root + */ + +import type { Request, Response } from 'express'; +import * as secureFs from '../../../lib/secure-fs.js'; +import path from 'path'; +import { PathNotAllowedError } from '@automaker/platform'; +import { getErrorMessage, logError } from '../common.js'; + +// Directories to hide from the listing (build artifacts, caches, etc.) +const HIDDEN_DIRECTORIES = new Set([ + '.git', + '.worktrees', + 'node_modules', + '.automaker', + '__pycache__', + '.cache', + '.next', + '.nuxt', + '.svelte-kit', + '.turbo', + '.vercel', + '.output', + 'coverage', + '.nyc_output', + 'dist', + 'build', + 'out', + '.tmp', + 'tmp', + '.venv', + 'venv', + 'target', + 'vendor', + '.gradle', + '.idea', + '.vscode', +]); + +interface ProjectFileEntry { + name: string; + relativePath: string; + isDirectory: boolean; + isFile: boolean; +} + +export function createBrowseProjectFilesHandler() { + return async (req: Request, res: Response): Promise => { + try { + const { projectPath, relativePath } = req.body as { + projectPath: string; + relativePath?: string; // Relative path within the project to browse (empty = project root) + }; + + if (!projectPath) { + res.status(400).json({ success: false, error: 'projectPath is required' }); + return; + } + + const resolvedProjectPath = path.resolve(projectPath); + + // Determine the target directory to browse + let targetPath = resolvedProjectPath; + let currentRelativePath = ''; + + if (relativePath) { + // Security: normalize and validate the relative path + const normalized = path.normalize(relativePath); + if (normalized.startsWith('..') || path.isAbsolute(normalized)) { + res.status(400).json({ + success: false, + error: 'Invalid relative path - must be within the project directory', + }); + return; + } + targetPath = path.join(resolvedProjectPath, normalized); + currentRelativePath = normalized; + + // Double-check the resolved path is within the project + // Use a separator-terminated prefix to prevent matching sibling dirs + // that share the same prefix (e.g. /projects/foo vs /projects/foobar). + const resolvedTarget = path.resolve(targetPath); + const projectPrefix = resolvedProjectPath.endsWith(path.sep) + ? resolvedProjectPath + : resolvedProjectPath + path.sep; + if (!resolvedTarget.startsWith(projectPrefix) && resolvedTarget !== resolvedProjectPath) { + res.status(400).json({ + success: false, + error: 'Path traversal detected', + }); + return; + } + } + + // Determine parent relative path + let parentRelativePath: string | null = null; + if (currentRelativePath) { + const parent = path.dirname(currentRelativePath); + parentRelativePath = parent === '.' ? '' : parent; + } + + try { + const stat = await secureFs.stat(targetPath); + + if (!stat.isDirectory()) { + res.status(400).json({ success: false, error: 'Path is not a directory' }); + return; + } + + // Read directory contents + const dirEntries = await secureFs.readdir(targetPath, { withFileTypes: true }); + + // Filter and map entries + const entries: ProjectFileEntry[] = dirEntries + .filter((entry) => { + // Skip hidden directories (build artifacts, etc.) + if (entry.isDirectory() && HIDDEN_DIRECTORIES.has(entry.name)) { + return false; + } + // Skip entries starting with . (hidden files) except common config files + // We keep hidden files visible since users often need .env, .eslintrc, etc. + return true; + }) + .map((entry) => { + const entryRelativePath = currentRelativePath + ? path.posix.join(currentRelativePath.replace(/\\/g, '/'), entry.name) + : entry.name; + + return { + name: entry.name, + relativePath: entryRelativePath, + isDirectory: entry.isDirectory(), + isFile: entry.isFile(), + }; + }) + // Sort: directories first, then files, alphabetically within each group + .sort((a, b) => { + if (a.isDirectory !== b.isDirectory) { + return a.isDirectory ? -1 : 1; + } + return a.name.localeCompare(b.name); + }); + + res.json({ + success: true, + currentRelativePath, + parentRelativePath, + entries, + }); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Failed to read directory'; + const isPermissionError = errorMessage.includes('EPERM') || errorMessage.includes('EACCES'); + + if (isPermissionError) { + res.json({ + success: true, + currentRelativePath, + parentRelativePath, + entries: [], + warning: 'Permission denied - unable to read this directory', + }); + } else { + res.status(400).json({ + success: false, + error: errorMessage, + }); + } + } + } catch (error) { + if (error instanceof PathNotAllowedError) { + res.status(403).json({ success: false, error: getErrorMessage(error) }); + return; + } + + logError(error, 'Browse project files failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/fs/routes/mkdir.ts b/apps/server/src/routes/fs/routes/mkdir.ts index 04d0a8362..f813abcd6 100644 --- a/apps/server/src/routes/fs/routes/mkdir.ts +++ b/apps/server/src/routes/fs/routes/mkdir.ts @@ -35,9 +35,9 @@ export function createMkdirHandler() { error: 'Path exists and is not a directory', }); return; - } catch (statError: any) { + } catch (statError: unknown) { // ENOENT means path doesn't exist - we should create it - if (statError.code !== 'ENOENT') { + if ((statError as NodeJS.ErrnoException).code !== 'ENOENT') { // Some other error (could be ELOOP in parent path) throw statError; } @@ -47,7 +47,7 @@ export function createMkdirHandler() { await secureFs.mkdir(resolvedPath, { recursive: true }); res.json({ success: true }); - } catch (error: any) { + } catch (error: unknown) { // Path not allowed - return 403 Forbidden if (error instanceof PathNotAllowedError) { res.status(403).json({ success: false, error: getErrorMessage(error) }); @@ -55,7 +55,7 @@ export function createMkdirHandler() { } // Handle ELOOP specifically - if (error.code === 'ELOOP') { + if ((error as NodeJS.ErrnoException).code === 'ELOOP') { logError(error, 'Create directory failed - symlink loop detected'); res.status(400).json({ success: false, diff --git a/apps/server/src/routes/fs/routes/resolve-directory.ts b/apps/server/src/routes/fs/routes/resolve-directory.ts index 5e4147db9..be5a5b0d2 100644 --- a/apps/server/src/routes/fs/routes/resolve-directory.ts +++ b/apps/server/src/routes/fs/routes/resolve-directory.ts @@ -10,7 +10,11 @@ import { getErrorMessage, logError } from '../common.js'; export function createResolveDirectoryHandler() { return async (req: Request, res: Response): Promise => { try { - const { directoryName, sampleFiles, fileCount } = req.body as { + const { + directoryName, + sampleFiles, + fileCount: _fileCount, + } = req.body as { directoryName: string; sampleFiles?: string[]; fileCount?: number; diff --git a/apps/server/src/routes/fs/routes/save-board-background.ts b/apps/server/src/routes/fs/routes/save-board-background.ts index a0c2164a8..e8b82169a 100644 --- a/apps/server/src/routes/fs/routes/save-board-background.ts +++ b/apps/server/src/routes/fs/routes/save-board-background.ts @@ -11,10 +11,9 @@ import { getBoardDir } from '@automaker/platform'; export function createSaveBoardBackgroundHandler() { return async (req: Request, res: Response): Promise => { try { - const { data, filename, mimeType, projectPath } = req.body as { + const { data, filename, projectPath } = req.body as { data: string; filename: string; - mimeType: string; projectPath: string; }; diff --git a/apps/server/src/routes/fs/routes/save-image.ts b/apps/server/src/routes/fs/routes/save-image.ts index 695a8dedf..4d48661cf 100644 --- a/apps/server/src/routes/fs/routes/save-image.ts +++ b/apps/server/src/routes/fs/routes/save-image.ts @@ -12,10 +12,9 @@ import { sanitizeFilename } from '@automaker/utils'; export function createSaveImageHandler() { return async (req: Request, res: Response): Promise => { try { - const { data, filename, mimeType, projectPath } = req.body as { + const { data, filename, projectPath } = req.body as { data: string; filename: string; - mimeType: string; projectPath: string; }; diff --git a/apps/server/src/routes/fs/routes/validate-path.ts b/apps/server/src/routes/fs/routes/validate-path.ts index 8659eb5ac..9405e0c1e 100644 --- a/apps/server/src/routes/fs/routes/validate-path.ts +++ b/apps/server/src/routes/fs/routes/validate-path.ts @@ -5,7 +5,7 @@ import type { Request, Response } from 'express'; import * as secureFs from '../../../lib/secure-fs.js'; import path from 'path'; -import { isPathAllowed, PathNotAllowedError, getAllowedRootDirectory } from '@automaker/platform'; +import { isPathAllowed, getAllowedRootDirectory } from '@automaker/platform'; import { getErrorMessage, logError } from '../common.js'; export function createValidatePathHandler() { diff --git a/apps/server/src/routes/gemini/index.ts b/apps/server/src/routes/gemini/index.ts new file mode 100644 index 000000000..f49ef634c --- /dev/null +++ b/apps/server/src/routes/gemini/index.ts @@ -0,0 +1,66 @@ +import { Router, Request, Response } from 'express'; +import { GeminiProvider } from '../../providers/gemini-provider.js'; +import { GeminiUsageService } from '../../services/gemini-usage-service.js'; +import { createLogger } from '@automaker/utils'; +import type { EventEmitter } from '../../lib/events.js'; + +const logger = createLogger('Gemini'); + +export function createGeminiRoutes( + usageService: GeminiUsageService, + _events: EventEmitter +): Router { + const router = Router(); + + // Get current usage/quota data from Google Cloud API + router.get('/usage', async (_req: Request, res: Response) => { + try { + const usageData = await usageService.fetchUsageData(); + + res.json(usageData); + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error'; + logger.error('Error fetching Gemini usage:', error); + + // Return error in a format the UI expects + res.status(200).json({ + authenticated: false, + authMethod: 'none', + usedPercent: 0, + remainingPercent: 100, + lastUpdated: new Date().toISOString(), + error: `Failed to fetch Gemini usage: ${message}`, + }); + } + }); + + // Check if Gemini is available + router.get('/status', async (_req: Request, res: Response) => { + try { + const provider = new GeminiProvider(); + const status = await provider.detectInstallation(); + + // Derive authMethod from typed InstallationStatus fields + const authMethod = status.authenticated + ? status.hasApiKey + ? 'api_key' + : 'cli_login' + : 'none'; + + res.json({ + success: true, + installed: status.installed, + version: status.version || null, + path: status.path || null, + authenticated: status.authenticated || false, + authMethod, + hasCredentialsFile: false, + }); + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error'; + res.status(500).json({ success: false, error: message }); + } + }); + + return router; +} diff --git a/apps/server/src/routes/git/index.ts b/apps/server/src/routes/git/index.ts index 5e959ec97..e6bf5a0c2 100644 --- a/apps/server/src/routes/git/index.ts +++ b/apps/server/src/routes/git/index.ts @@ -6,12 +6,18 @@ import { Router } from 'express'; import { validatePathParams } from '../../middleware/validate-paths.js'; import { createDiffsHandler } from './routes/diffs.js'; import { createFileDiffHandler } from './routes/file-diff.js'; +import { createStageFilesHandler } from './routes/stage-files.js'; export function createGitRoutes(): Router { const router = Router(); router.post('/diffs', validatePathParams('projectPath'), createDiffsHandler()); router.post('/file-diff', validatePathParams('projectPath', 'filePath'), createFileDiffHandler()); + router.post( + '/stage-files', + validatePathParams('projectPath', 'files[]'), + createStageFilesHandler() + ); return router; } diff --git a/apps/server/src/routes/git/routes/stage-files.ts b/apps/server/src/routes/git/routes/stage-files.ts new file mode 100644 index 000000000..98ca44c1a --- /dev/null +++ b/apps/server/src/routes/git/routes/stage-files.ts @@ -0,0 +1,67 @@ +/** + * POST /stage-files endpoint - Stage or unstage files in the main project + */ + +import type { Request, Response } from 'express'; +import { getErrorMessage, logError } from '../common.js'; +import { stageFiles, StageFilesValidationError } from '../../../services/stage-files-service.js'; + +export function createStageFilesHandler() { + return async (req: Request, res: Response): Promise => { + try { + const { projectPath, files, operation } = req.body as { + projectPath: string; + files: string[]; + operation: 'stage' | 'unstage'; + }; + + if (!projectPath) { + res.status(400).json({ + success: false, + error: 'projectPath required', + }); + return; + } + + if (!Array.isArray(files) || files.length === 0) { + res.status(400).json({ + success: false, + error: 'files array required and must not be empty', + }); + return; + } + + for (const file of files) { + if (typeof file !== 'string' || file.trim() === '') { + res.status(400).json({ + success: false, + error: 'Each element of files must be a non-empty string', + }); + return; + } + } + + if (operation !== 'stage' && operation !== 'unstage') { + res.status(400).json({ + success: false, + error: 'operation must be "stage" or "unstage"', + }); + return; + } + + const result = await stageFiles(projectPath, files, operation); + + res.json({ + success: true, + result, + }); + } catch (error) { + if (error instanceof StageFilesValidationError) { + res.status(400).json({ success: false, error: error.message }); + return; + } + logError(error, `${(req.body as { operation?: string })?.operation ?? 'stage'} files failed`); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/github/routes/validation-endpoints.ts b/apps/server/src/routes/github/routes/validation-endpoints.ts index 218597371..1f3c23161 100644 --- a/apps/server/src/routes/github/routes/validation-endpoints.ts +++ b/apps/server/src/routes/github/routes/validation-endpoints.ts @@ -6,7 +6,6 @@ import type { Request, Response } from 'express'; import type { EventEmitter } from '../../../lib/events.js'; import type { IssueValidationEvent } from '@automaker/types'; import { - isValidationRunning, getValidationStatus, getRunningValidations, abortValidation, @@ -15,7 +14,6 @@ import { logger, } from './validation-common.js'; import { - readValidation, getAllValidations, getValidationWithFreshness, deleteValidation, diff --git a/apps/server/src/routes/models/routes/providers.ts b/apps/server/src/routes/models/routes/providers.ts index 174a1faca..fa4d2828d 100644 --- a/apps/server/src/routes/models/routes/providers.ts +++ b/apps/server/src/routes/models/routes/providers.ts @@ -12,7 +12,7 @@ export function createProvidersHandler() { // Get installation status from all providers const statuses = await ProviderFactory.checkAllProviders(); - const providers: Record = { + const providers: Record> = { anthropic: { available: statuses.claude?.installed || false, hasApiKey: !!process.env.ANTHROPIC_API_KEY, diff --git a/apps/server/src/routes/settings/routes/update-global.ts b/apps/server/src/routes/settings/routes/update-global.ts index 817b5c1da..2bc1c2fa2 100644 --- a/apps/server/src/routes/settings/routes/update-global.ts +++ b/apps/server/src/routes/settings/routes/update-global.ts @@ -46,16 +46,14 @@ export function createUpdateGlobalHandler(settingsService: SettingsService) { } // Minimal debug logging to help diagnose accidental wipes. - const projectsLen = Array.isArray((updates as any).projects) - ? (updates as any).projects.length - : undefined; - const trashedLen = Array.isArray((updates as any).trashedProjects) - ? (updates as any).trashedProjects.length + const projectsLen = Array.isArray(updates.projects) ? updates.projects.length : undefined; + const trashedLen = Array.isArray(updates.trashedProjects) + ? updates.trashedProjects.length : undefined; logger.info( `[SERVER_SETTINGS_UPDATE] Request received: projects=${projectsLen ?? 'n/a'}, trashedProjects=${trashedLen ?? 'n/a'}, theme=${ - (updates as any).theme ?? 'n/a' - }, localStorageMigrated=${(updates as any).localStorageMigrated ?? 'n/a'}` + updates.theme ?? 'n/a' + }, localStorageMigrated=${updates.localStorageMigrated ?? 'n/a'}` ); // Get old settings to detect theme changes diff --git a/apps/server/src/routes/setup/routes/auth-claude.ts b/apps/server/src/routes/setup/routes/auth-claude.ts index 97a170f48..9eac09895 100644 --- a/apps/server/src/routes/setup/routes/auth-claude.ts +++ b/apps/server/src/routes/setup/routes/auth-claude.ts @@ -4,13 +4,9 @@ import type { Request, Response } from 'express'; import { getErrorMessage, logError } from '../common.js'; -import { exec } from 'child_process'; -import { promisify } from 'util'; import * as fs from 'fs'; import * as path from 'path'; -const execAsync = promisify(exec); - export function createAuthClaudeHandler() { return async (_req: Request, res: Response): Promise => { try { diff --git a/apps/server/src/routes/setup/routes/auth-opencode.ts b/apps/server/src/routes/setup/routes/auth-opencode.ts index 7d7f35e25..dce314bf3 100644 --- a/apps/server/src/routes/setup/routes/auth-opencode.ts +++ b/apps/server/src/routes/setup/routes/auth-opencode.ts @@ -4,13 +4,9 @@ import type { Request, Response } from 'express'; import { logError, getErrorMessage } from '../common.js'; -import { exec } from 'child_process'; -import { promisify } from 'util'; import * as fs from 'fs'; import * as path from 'path'; -const execAsync = promisify(exec); - export function createAuthOpencodeHandler() { return async (_req: Request, res: Response): Promise => { try { diff --git a/apps/server/src/routes/setup/routes/copilot-models.ts b/apps/server/src/routes/setup/routes/copilot-models.ts index 5a3da128c..08b9eda90 100644 --- a/apps/server/src/routes/setup/routes/copilot-models.ts +++ b/apps/server/src/routes/setup/routes/copilot-models.ts @@ -10,9 +10,6 @@ import type { Request, Response } from 'express'; import { CopilotProvider } from '../../../providers/copilot-provider.js'; import { getErrorMessage, logError } from '../common.js'; import type { ModelDefinition } from '@automaker/types'; -import { createLogger } from '@automaker/utils'; - -const logger = createLogger('CopilotModelsRoute'); // Singleton provider instance for caching let providerInstance: CopilotProvider | null = null; diff --git a/apps/server/src/routes/setup/routes/opencode-models.ts b/apps/server/src/routes/setup/routes/opencode-models.ts index a3b2b7bee..e7909bf99 100644 --- a/apps/server/src/routes/setup/routes/opencode-models.ts +++ b/apps/server/src/routes/setup/routes/opencode-models.ts @@ -14,9 +14,6 @@ import { } from '../../../providers/opencode-provider.js'; import { getErrorMessage, logError } from '../common.js'; import type { ModelDefinition } from '@automaker/types'; -import { createLogger } from '@automaker/utils'; - -const logger = createLogger('OpenCodeModelsRoute'); // Singleton provider instance for caching let providerInstance: OpencodeProvider | null = null; diff --git a/apps/server/src/routes/setup/routes/verify-claude-auth.ts b/apps/server/src/routes/setup/routes/verify-claude-auth.ts index 7df27c3dc..18a40bf81 100644 --- a/apps/server/src/routes/setup/routes/verify-claude-auth.ts +++ b/apps/server/src/routes/setup/routes/verify-claude-auth.ts @@ -110,6 +110,7 @@ export function createVerifyClaudeAuthHandler() { let authenticated = false; let errorMessage = ''; let receivedAnyContent = false; + let cleanupEnv: (() => void) | undefined; // Create secure auth session const sessionId = `claude-auth-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`; @@ -151,13 +152,13 @@ export function createVerifyClaudeAuthHandler() { AuthSessionManager.createSession(sessionId, authMethod || 'api_key', apiKey, 'anthropic'); // Create temporary environment override for SDK call - const cleanupEnv = createTempEnvOverride(authEnv); + cleanupEnv = createTempEnvOverride(authEnv); // Run a minimal query to verify authentication const stream = query({ prompt: "Reply with only the word 'ok'", options: { - model: 'claude-sonnet-4-20250514', + model: 'claude-sonnet-4-6', maxTurns: 1, allowedTools: [], abortController, @@ -194,8 +195,10 @@ export function createVerifyClaudeAuthHandler() { } // Check specifically for assistant messages with text content - if (msg.type === 'assistant' && (msg as any).message?.content) { - const content = (msg as any).message.content; + const msgRecord = msg as Record; + const msgMessage = msgRecord.message as Record | undefined; + if (msg.type === 'assistant' && msgMessage?.content) { + const content = msgMessage.content; if (Array.isArray(content)) { for (const block of content) { if (block.type === 'text' && block.text) { @@ -311,6 +314,8 @@ export function createVerifyClaudeAuthHandler() { } } finally { clearTimeout(timeoutId); + // Restore process.env to its original state + cleanupEnv?.(); // Clean up the auth session AuthSessionManager.destroySession(sessionId); } diff --git a/apps/server/src/routes/terminal/common.ts b/apps/server/src/routes/terminal/common.ts index 6121e3453..5e8b6b329 100644 --- a/apps/server/src/routes/terminal/common.ts +++ b/apps/server/src/routes/terminal/common.ts @@ -5,7 +5,6 @@ import { randomBytes } from 'crypto'; import { createLogger } from '@automaker/utils'; import type { Request, Response, NextFunction } from 'express'; -import { getTerminalService } from '../../services/terminal-service.js'; const logger = createLogger('Terminal'); diff --git a/apps/server/src/routes/terminal/routes/auth.ts b/apps/server/src/routes/terminal/routes/auth.ts index 1d6156bd1..0aa29b345 100644 --- a/apps/server/src/routes/terminal/routes/auth.ts +++ b/apps/server/src/routes/terminal/routes/auth.ts @@ -9,7 +9,6 @@ import { generateToken, addToken, getTokenExpiryMs, - getErrorMessage, } from '../common.js'; export function createAuthHandler() { diff --git a/apps/server/src/routes/worktree/common.ts b/apps/server/src/routes/worktree/common.ts index 75c3a437f..43c66bce8 100644 --- a/apps/server/src/routes/worktree/common.ts +++ b/apps/server/src/routes/worktree/common.ts @@ -2,59 +2,21 @@ * Common utilities for worktree routes */ -import { createLogger } from '@automaker/utils'; -import { spawnProcess } from '@automaker/platform'; +import { createLogger, isValidBranchName, MAX_BRANCH_NAME_LENGTH } from '@automaker/utils'; import { exec } from 'child_process'; import { promisify } from 'util'; import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js'; +// Re-export execGitCommand from the canonical shared module so any remaining +// consumers that import from this file continue to work. +export { execGitCommand } from '../../lib/git.js'; + const logger = createLogger('Worktree'); export const execAsync = promisify(exec); -// ============================================================================ -// Secure Command Execution -// ============================================================================ - -/** - * Execute git command with array arguments to prevent command injection. - * Uses spawnProcess from @automaker/platform for secure, cross-platform execution. - * - * @param args - Array of git command arguments (e.g., ['worktree', 'add', path]) - * @param cwd - Working directory to execute the command in - * @returns Promise resolving to stdout output - * @throws Error with stderr message if command fails - * - * @example - * ```typescript - * // Safe: no injection possible - * await execGitCommand(['branch', '-D', branchName], projectPath); - * - * // Instead of unsafe: - * // await execAsync(`git branch -D ${branchName}`, { cwd }); - * ``` - */ -export async function execGitCommand(args: string[], cwd: string): Promise { - const result = await spawnProcess({ - command: 'git', - args, - cwd, - }); - - // spawnProcess returns { stdout, stderr, exitCode } - if (result.exitCode === 0) { - return result.stdout; - } else { - const errorMessage = result.stderr || `Git command failed with code ${result.exitCode}`; - throw new Error(errorMessage); - } -} - -// ============================================================================ -// Constants -// ============================================================================ - -/** Maximum allowed length for git branch names */ -export const MAX_BRANCH_NAME_LENGTH = 250; +// Re-export git validation utilities from the canonical shared module so +// existing consumers that import from this file continue to work. +export { isValidBranchName, MAX_BRANCH_NAME_LENGTH }; // ============================================================================ // Extended PATH configuration for Electron apps @@ -98,17 +60,23 @@ export const execEnv = { PATH: extendedPath, }; -// ============================================================================ -// Validation utilities -// ============================================================================ - /** - * Validate branch name to prevent command injection. - * Git branch names cannot contain: space, ~, ^, :, ?, *, [, \, or control chars. - * We also reject shell metacharacters for safety. + * Validate git remote name to prevent command injection. + * Matches the strict validation used in add-remote.ts: + * - Rejects empty strings and names that are too long + * - Disallows names that start with '-' or '.' + * - Forbids the substring '..' + * - Rejects '/' characters + * - Rejects NUL bytes + * - Must consist only of alphanumerics, hyphens, underscores, and dots */ -export function isValidBranchName(name: string): boolean { - return /^[a-zA-Z0-9._\-/]+$/.test(name) && name.length < MAX_BRANCH_NAME_LENGTH; +export function isValidRemoteName(name: string): boolean { + if (!name || name.length === 0 || name.length >= MAX_BRANCH_NAME_LENGTH) return false; + if (name.startsWith('-') || name.startsWith('.')) return false; + if (name.includes('..')) return false; + if (name.includes('/')) return false; + if (name.includes('\0')) return false; + return /^[a-zA-Z0-9._-]+$/.test(name); } /** diff --git a/apps/server/src/routes/worktree/index.ts b/apps/server/src/routes/worktree/index.ts index a7df37bb3..a788bb483 100644 --- a/apps/server/src/routes/worktree/index.ts +++ b/apps/server/src/routes/worktree/index.ts @@ -51,9 +51,22 @@ import { createDeleteInitScriptHandler, createRunInitScriptHandler, } from './routes/init-script.js'; +import { createCommitLogHandler } from './routes/commit-log.js'; import { createDiscardChangesHandler } from './routes/discard-changes.js'; import { createListRemotesHandler } from './routes/list-remotes.js'; import { createAddRemoteHandler } from './routes/add-remote.js'; +import { createStashPushHandler } from './routes/stash-push.js'; +import { createStashListHandler } from './routes/stash-list.js'; +import { createStashApplyHandler } from './routes/stash-apply.js'; +import { createStashDropHandler } from './routes/stash-drop.js'; +import { createCherryPickHandler } from './routes/cherry-pick.js'; +import { createBranchCommitLogHandler } from './routes/branch-commit-log.js'; +import { createGeneratePRDescriptionHandler } from './routes/generate-pr-description.js'; +import { createRebaseHandler } from './routes/rebase.js'; +import { createAbortOperationHandler } from './routes/abort-operation.js'; +import { createContinueOperationHandler } from './routes/continue-operation.js'; +import { createStageFilesHandler } from './routes/stage-files.js'; +import { createCheckChangesHandler } from './routes/check-changes.js'; import type { SettingsService } from '../../services/settings-service.js'; export function createWorktreeRoutes( @@ -71,9 +84,13 @@ export function createWorktreeRoutes( '/merge', validatePathParams('projectPath'), requireValidProject, - createMergeHandler() + createMergeHandler(events) + ); + router.post( + '/create', + validatePathParams('projectPath'), + createCreateHandler(events, settingsService) ); - router.post('/create', validatePathParams('projectPath'), createCreateHandler(events)); router.post('/delete', validatePathParams('projectPath', 'worktreePath'), createDeleteHandler()); router.post('/create-pr', createCreatePRHandler()); router.post('/pr-info', createPRInfoHandler()); @@ -105,7 +122,13 @@ export function createWorktreeRoutes( '/checkout-branch', validatePathParams('worktreePath'), requireValidWorktree, - createCheckoutBranchHandler() + createCheckoutBranchHandler(events) + ); + router.post( + '/check-changes', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createCheckChangesHandler() ); router.post( '/list-branches', @@ -113,7 +136,12 @@ export function createWorktreeRoutes( requireValidWorktree, createListBranchesHandler() ); - router.post('/switch-branch', requireValidWorktree, createSwitchBranchHandler()); + router.post( + '/switch-branch', + validatePathParams('worktreePath'), + requireValidWorktree, + createSwitchBranchHandler(events) + ); router.post('/open-in-editor', validatePathParams('worktreePath'), createOpenInEditorHandler()); router.post( '/open-in-terminal', @@ -192,5 +220,95 @@ export function createWorktreeRoutes( createAddRemoteHandler() ); + // Commit log route + router.post( + '/commit-log', + validatePathParams('worktreePath'), + requireValidWorktree, + createCommitLogHandler(events) + ); + + // Stash routes + router.post( + '/stash-push', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createStashPushHandler(events) + ); + router.post( + '/stash-list', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createStashListHandler(events) + ); + router.post( + '/stash-apply', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createStashApplyHandler(events) + ); + router.post( + '/stash-drop', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createStashDropHandler(events) + ); + + // Cherry-pick route + router.post( + '/cherry-pick', + validatePathParams('worktreePath'), + requireValidWorktree, + createCherryPickHandler(events) + ); + + // Generate PR description route + router.post( + '/generate-pr-description', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createGeneratePRDescriptionHandler(settingsService) + ); + + // Branch commit log route (get commits from a specific branch) + router.post( + '/branch-commit-log', + validatePathParams('worktreePath'), + requireValidWorktree, + createBranchCommitLogHandler(events) + ); + + // Rebase route + router.post( + '/rebase', + validatePathParams('worktreePath'), + requireValidWorktree, + createRebaseHandler(events) + ); + + // Abort in-progress merge/rebase/cherry-pick + router.post( + '/abort-operation', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createAbortOperationHandler(events) + ); + + // Continue in-progress merge/rebase/cherry-pick after resolving conflicts + router.post( + '/continue-operation', + validatePathParams('worktreePath'), + requireGitRepoOnly, + createContinueOperationHandler(events) + ); + + // Stage/unstage files route + router.post( + '/stage-files', + validatePathParams('worktreePath', 'files[]'), + requireGitRepoOnly, + createStageFilesHandler() + ); + return router; } diff --git a/apps/server/src/routes/worktree/routes/abort-operation.ts b/apps/server/src/routes/worktree/routes/abort-operation.ts new file mode 100644 index 000000000..297e2ac8a --- /dev/null +++ b/apps/server/src/routes/worktree/routes/abort-operation.ts @@ -0,0 +1,117 @@ +/** + * POST /abort-operation endpoint - Abort an in-progress merge, rebase, or cherry-pick + * + * Detects which operation (merge, rebase, or cherry-pick) is in progress + * and aborts it, returning the repository to a clean state. + */ + +import type { Request, Response } from 'express'; +import path from 'path'; +import * as fs from 'fs/promises'; +import { getErrorMessage, logError, execAsync } from '../common.js'; +import type { EventEmitter } from '../../../lib/events.js'; + +/** + * Detect what type of conflict operation is currently in progress + */ +async function detectOperation( + worktreePath: string +): Promise<'merge' | 'rebase' | 'cherry-pick' | null> { + try { + const { stdout: gitDirRaw } = await execAsync('git rev-parse --git-dir', { + cwd: worktreePath, + }); + const gitDir = path.resolve(worktreePath, gitDirRaw.trim()); + + const [rebaseMergeExists, rebaseApplyExists, mergeHeadExists, cherryPickHeadExists] = + await Promise.all([ + fs + .access(path.join(gitDir, 'rebase-merge')) + .then(() => true) + .catch(() => false), + fs + .access(path.join(gitDir, 'rebase-apply')) + .then(() => true) + .catch(() => false), + fs + .access(path.join(gitDir, 'MERGE_HEAD')) + .then(() => true) + .catch(() => false), + fs + .access(path.join(gitDir, 'CHERRY_PICK_HEAD')) + .then(() => true) + .catch(() => false), + ]); + + if (rebaseMergeExists || rebaseApplyExists) return 'rebase'; + if (mergeHeadExists) return 'merge'; + if (cherryPickHeadExists) return 'cherry-pick'; + return null; + } catch { + return null; + } +} + +export function createAbortOperationHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath } = req.body as { + worktreePath: string; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath is required', + }); + return; + } + + const resolvedWorktreePath = path.resolve(worktreePath); + + // Detect what operation is in progress + const operation = await detectOperation(resolvedWorktreePath); + + if (!operation) { + res.status(400).json({ + success: false, + error: 'No merge, rebase, or cherry-pick in progress', + }); + return; + } + + // Abort the operation + let abortCommand: string; + switch (operation) { + case 'merge': + abortCommand = 'git merge --abort'; + break; + case 'rebase': + abortCommand = 'git rebase --abort'; + break; + case 'cherry-pick': + abortCommand = 'git cherry-pick --abort'; + break; + } + + await execAsync(abortCommand, { cwd: resolvedWorktreePath }); + + // Emit event + events.emit('conflict:aborted', { + worktreePath: resolvedWorktreePath, + operation, + }); + + res.json({ + success: true, + result: { + operation, + message: `${operation.charAt(0).toUpperCase() + operation.slice(1)} aborted successfully`, + }, + }); + } catch (error) { + logError(error, 'Abort operation failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/branch-commit-log.ts b/apps/server/src/routes/worktree/routes/branch-commit-log.ts new file mode 100644 index 000000000..60562d975 --- /dev/null +++ b/apps/server/src/routes/worktree/routes/branch-commit-log.ts @@ -0,0 +1,92 @@ +/** + * POST /branch-commit-log endpoint - Get recent commit history for a specific branch + * + * Similar to commit-log but allows specifying a branch name to get commits from + * any branch, not just the currently checked out one. Useful for cherry-pick workflows + * where you need to browse commits from other branches. + * + * The handler only validates input, invokes the service, streams lifecycle events + * via the EventEmitter, and sends the final JSON response. + * + * Note: Git repository validation (isGitRepo, hasCommits) is handled by + * the requireValidWorktree middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import type { EventEmitter } from '../../../lib/events.js'; +import { getErrorMessage, logError } from '../common.js'; +import { getBranchCommitLog } from '../../../services/branch-commit-log-service.js'; +import { isValidBranchName } from '@automaker/utils'; + +export function createBranchCommitLogHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { + worktreePath, + branchName, + limit = 20, + } = req.body as { + worktreePath: string; + branchName?: string; + limit?: number; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + // Validate branchName before forwarding to execGitCommand. + // Reject values that start with '-', contain NUL, contain path-traversal + // sequences, or include characters outside the safe whitelist. + // An absent branchName is allowed (the service defaults it to HEAD). + if (branchName !== undefined && !isValidBranchName(branchName)) { + res.status(400).json({ + success: false, + error: 'Invalid branchName: value contains unsafe characters or sequences', + }); + return; + } + + // Emit start event so the frontend can observe progress + events.emit('branchCommitLog:start', { + worktreePath, + branchName: branchName || 'HEAD', + limit, + }); + + // Delegate all Git work to the service + const result = await getBranchCommitLog(worktreePath, branchName, limit); + + // Emit progress with the number of commits fetched + events.emit('branchCommitLog:progress', { + worktreePath, + branchName: result.branch, + commitsLoaded: result.total, + }); + + // Emit done event + events.emit('branchCommitLog:done', { + worktreePath, + branchName: result.branch, + total: result.total, + }); + + res.json({ + success: true, + result, + }); + } catch (error) { + // Emit error event so the frontend can react + events.emit('branchCommitLog:error', { + error: getErrorMessage(error), + }); + + logError(error, 'Get branch commit log failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/branch-tracking.ts b/apps/server/src/routes/worktree/routes/branch-tracking.ts index 1c9f069a3..4144b94a3 100644 --- a/apps/server/src/routes/worktree/routes/branch-tracking.ts +++ b/apps/server/src/routes/worktree/routes/branch-tracking.ts @@ -31,8 +31,8 @@ export async function getTrackedBranches(projectPath: string): Promise " separator + const rawPath = line.slice(3); + const filePath = rawPath.includes(' -> ') ? rawPath.split(' -> ')[1] : rawPath; + + if (x === '?' && y === '?') { + untracked.push(filePath); + } else { + if (x !== ' ' && x !== '?') { + staged.push(filePath); + } + if (y !== ' ' && y !== '?') { + unstaged.push(filePath); + } + } + } + + return { staged, unstaged, untracked }; +} + +export function createCheckChangesHandler() { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath } = req.body as { + worktreePath: string; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + // Get porcelain status (includes staged, unstaged, and untracked files) + const stdout = await execGitCommand(['status', '--porcelain'], worktreePath); + + const { staged, unstaged, untracked } = parseStatusOutput(stdout); + + const hasChanges = staged.length > 0 || unstaged.length > 0 || untracked.length > 0; + + // Deduplicate file paths across staged, unstaged, and untracked arrays + // to avoid double-counting partially staged files + const uniqueFilePaths = new Set([...staged, ...unstaged, ...untracked]); + + res.json({ + success: true, + result: { + hasChanges, + staged, + unstaged, + untracked, + totalFiles: uniqueFilePaths.size, + }, + }); + } catch (error) { + logError(error, 'Check changes failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/checkout-branch.ts b/apps/server/src/routes/worktree/routes/checkout-branch.ts index 239634801..d8a9d828c 100644 --- a/apps/server/src/routes/worktree/routes/checkout-branch.ts +++ b/apps/server/src/routes/worktree/routes/checkout-branch.ts @@ -1,6 +1,14 @@ /** * POST /checkout-branch endpoint - Create and checkout a new branch * + * Supports automatic stash handling: when `stashChanges` is true, local changes + * are stashed before creating the branch and reapplied after. If the stash pop + * results in merge conflicts, returns a special response so the UI can create a + * conflict resolution task. + * + * Git business logic is delegated to checkout-branch-service.ts when stash + * handling is requested. Otherwise, falls back to the original simple flow. + * * Note: Git repository validation (isGitRepo, hasCommits) is handled by * the requireValidWorktree middleware in index.ts. * Path validation (ALLOWED_ROOT_DIRECTORY) is handled by validatePathParams @@ -10,14 +18,22 @@ import type { Request, Response } from 'express'; import path from 'path'; import { stat } from 'fs/promises'; -import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js'; +import { getErrorMessage, logError, isValidBranchName } from '../common.js'; +import { execGitCommand } from '../../../lib/git.js'; +import type { EventEmitter } from '../../../lib/events.js'; +import { performCheckoutBranch } from '../../../services/checkout-branch-service.js'; -export function createCheckoutBranchHandler() { +export function createCheckoutBranchHandler(events?: EventEmitter) { return async (req: Request, res: Response): Promise => { try { - const { worktreePath, branchName } = req.body as { + const { worktreePath, branchName, baseBranch, stashChanges, includeUntracked } = req.body as { worktreePath: string; branchName: string; + baseBranch?: string; + /** When true, stash local changes before checkout and reapply after */ + stashChanges?: boolean; + /** When true, include untracked files in the stash (defaults to true) */ + includeUntracked?: boolean; }; if (!worktreePath) { @@ -46,9 +62,17 @@ export function createCheckoutBranchHandler() { return; } + // Validate base branch if provided + if (baseBranch && !isValidBranchName(baseBranch) && baseBranch !== 'HEAD') { + res.status(400).json({ + success: false, + error: + 'Invalid base branch name. Must contain only letters, numbers, dots, dashes, underscores, or slashes.', + }); + return; + } + // Resolve and validate worktreePath to prevent traversal attacks. - // The validatePathParams middleware checks against ALLOWED_ROOT_DIRECTORY, - // but we also resolve the path and verify it exists as a directory. const resolvedPath = path.resolve(worktreePath); try { const stats = await stat(resolvedPath); @@ -67,7 +91,42 @@ export function createCheckoutBranchHandler() { return; } - // Get current branch for reference (using argument array to avoid shell injection) + // Use the service for stash-aware checkout + if (stashChanges) { + const result = await performCheckoutBranch( + resolvedPath, + branchName, + baseBranch, + { + stashChanges: true, + includeUntracked: includeUntracked ?? true, + }, + events + ); + + if (!result.success) { + const statusCode = isBranchError(result.error) ? 400 : 500; + res.status(statusCode).json({ + success: false, + error: result.error, + ...(result.stashPopConflicts !== undefined && { + stashPopConflicts: result.stashPopConflicts, + }), + ...(result.stashPopConflictMessage && { + stashPopConflictMessage: result.stashPopConflictMessage, + }), + }); + return; + } + + res.json({ + success: true, + result: result.result, + }); + return; + } + + // Original simple flow (no stash handling) const currentBranchOutput = await execGitCommand( ['rev-parse', '--abbrev-ref', 'HEAD'], resolvedPath @@ -77,7 +136,6 @@ export function createCheckoutBranchHandler() { // Check if branch already exists try { await execGitCommand(['rev-parse', '--verify', branchName], resolvedPath); - // Branch exists res.status(400).json({ success: false, error: `Branch '${branchName}' already exists`, @@ -87,8 +145,25 @@ export function createCheckoutBranchHandler() { // Branch doesn't exist, good to create } - // Create and checkout the new branch (using argument array to avoid shell injection) - await execGitCommand(['checkout', '-b', branchName], resolvedPath); + // If baseBranch is provided, verify it exists before using it + if (baseBranch) { + try { + await execGitCommand(['rev-parse', '--verify', baseBranch], resolvedPath); + } catch { + res.status(400).json({ + success: false, + error: `Base branch '${baseBranch}' does not exist`, + }); + return; + } + } + + // Create and checkout the new branch + const checkoutArgs = ['checkout', '-b', branchName]; + if (baseBranch) { + checkoutArgs.push(baseBranch); + } + await execGitCommand(checkoutArgs, resolvedPath); res.json({ success: true, @@ -99,8 +174,22 @@ export function createCheckoutBranchHandler() { }, }); } catch (error) { + events?.emit('switch:error', { + error: getErrorMessage(error), + }); + logError(error, 'Checkout branch failed'); res.status(500).json({ success: false, error: getErrorMessage(error) }); } }; } + +/** + * Determine whether an error message represents a client error (400). + * Stash failures are server-side errors and are intentionally excluded here + * so they are returned as HTTP 500 rather than HTTP 400. + */ +function isBranchError(error?: string): boolean { + if (!error) return false; + return error.includes('already exists') || error.includes('does not exist'); +} diff --git a/apps/server/src/routes/worktree/routes/cherry-pick.ts b/apps/server/src/routes/worktree/routes/cherry-pick.ts new file mode 100644 index 000000000..8f404a0f5 --- /dev/null +++ b/apps/server/src/routes/worktree/routes/cherry-pick.ts @@ -0,0 +1,107 @@ +/** + * POST /cherry-pick endpoint - Cherry-pick one or more commits into the current branch + * + * Applies commits from another branch onto the current branch. + * Supports single or multiple commit cherry-picks. + * + * Git business logic is delegated to cherry-pick-service.ts. + * Events are emitted at key lifecycle points for WebSocket subscribers. + * The global event emitter is passed into the service so all lifecycle + * events (started, success, conflict, abort, verify-failed) are broadcast + * to WebSocket clients. + * + * Note: Git repository validation (isGitRepo, hasCommits) is handled by + * the requireValidWorktree middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import path from 'path'; +import { getErrorMessage, logError } from '../common.js'; +import type { EventEmitter } from '../../../lib/events.js'; +import { verifyCommits, runCherryPick } from '../../../services/cherry-pick-service.js'; + +export function createCherryPickHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, commitHashes, options } = req.body as { + worktreePath: string; + commitHashes: string[]; + options?: { + noCommit?: boolean; + }; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath is required', + }); + return; + } + + // Normalize the path to prevent path traversal and ensure consistent paths + const resolvedWorktreePath = path.resolve(worktreePath); + + if (!commitHashes || !Array.isArray(commitHashes) || commitHashes.length === 0) { + res.status(400).json({ + success: false, + error: 'commitHashes array is required and must contain at least one commit hash', + }); + return; + } + + // Validate each commit hash format (should be hex string) + for (const hash of commitHashes) { + if (!/^[a-fA-F0-9]+$/.test(hash)) { + res.status(400).json({ + success: false, + error: `Invalid commit hash format: "${hash}"`, + }); + return; + } + } + + // Verify each commit exists via the service; emits cherry-pick:verify-failed if any hash is missing + const invalidHash = await verifyCommits(resolvedWorktreePath, commitHashes, events); + if (invalidHash !== null) { + res.status(400).json({ + success: false, + error: `Commit "${invalidHash}" does not exist`, + }); + return; + } + + // Execute the cherry-pick via the service. + // The service emits: cherry-pick:started, cherry-pick:success, cherry-pick:conflict, + // and cherry-pick:abort at the appropriate lifecycle points. + const result = await runCherryPick(resolvedWorktreePath, commitHashes, options, events); + + if (result.success) { + res.json({ + success: true, + result: { + cherryPicked: result.cherryPicked, + commitHashes: result.commitHashes, + branch: result.branch, + message: result.message, + }, + }); + } else if (result.hasConflicts) { + res.status(409).json({ + success: false, + error: result.error, + hasConflicts: true, + aborted: result.aborted, + }); + } + } catch (error) { + // Emit failure event for unexpected (non-conflict) errors + events.emit('cherry-pick:failure', { + error: getErrorMessage(error), + }); + + logError(error, 'Cherry-pick failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/commit-log.ts b/apps/server/src/routes/worktree/routes/commit-log.ts new file mode 100644 index 000000000..dbdce1c31 --- /dev/null +++ b/apps/server/src/routes/worktree/routes/commit-log.ts @@ -0,0 +1,72 @@ +/** + * POST /commit-log endpoint - Get recent commit history for a worktree + * + * The handler only validates input, invokes the service, streams lifecycle + * events via the EventEmitter, and sends the final JSON response. + * + * Git business logic is delegated to commit-log-service.ts. + * Events are emitted at key lifecycle points for WebSocket subscribers. + * + * Note: Git repository validation (isGitRepo, hasCommits) is handled by + * the requireValidWorktree middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import type { EventEmitter } from '../../../lib/events.js'; +import { getErrorMessage, logError } from '../common.js'; +import { getCommitLog } from '../../../services/commit-log-service.js'; + +export function createCommitLogHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, limit = 20 } = req.body as { + worktreePath: string; + limit?: number; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + // Emit start event so the frontend can observe progress + events.emit('commitLog:start', { + worktreePath, + limit, + }); + + // Delegate all Git work to the service + const result = await getCommitLog(worktreePath, limit); + + // Emit progress with the number of commits fetched + events.emit('commitLog:progress', { + worktreePath, + branch: result.branch, + commitsLoaded: result.total, + }); + + // Emit complete event + events.emit('commitLog:complete', { + worktreePath, + branch: result.branch, + total: result.total, + }); + + res.json({ + success: true, + result, + }); + } catch (error) { + // Emit error event so the frontend can react + events.emit('commitLog:error', { + error: getErrorMessage(error), + }); + + logError(error, 'Get commit log failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/commit.ts b/apps/server/src/routes/worktree/routes/commit.ts index f33cd94b7..1bfbfd583 100644 --- a/apps/server/src/routes/worktree/routes/commit.ts +++ b/apps/server/src/routes/worktree/routes/commit.ts @@ -6,18 +6,20 @@ */ import type { Request, Response } from 'express'; -import { exec } from 'child_process'; +import { exec, execFile } from 'child_process'; import { promisify } from 'util'; import { getErrorMessage, logError } from '../common.js'; const execAsync = promisify(exec); +const execFileAsync = promisify(execFile); export function createCommitHandler() { return async (req: Request, res: Response): Promise => { try { - const { worktreePath, message } = req.body as { + const { worktreePath, message, files } = req.body as { worktreePath: string; message: string; + files?: string[]; }; if (!worktreePath || !message) { @@ -44,11 +46,21 @@ export function createCommitHandler() { return; } - // Stage all changes - await execAsync('git add -A', { cwd: worktreePath }); + // Stage changes - either specific files or all changes + if (files && files.length > 0) { + // Reset any previously staged changes first + await execFileAsync('git', ['reset', 'HEAD'], { cwd: worktreePath }).catch(() => { + // Ignore errors from reset (e.g., if nothing is staged) + }); + // Stage only the selected files (args array avoids shell injection) + await execFileAsync('git', ['add', ...files], { cwd: worktreePath }); + } else { + // Stage all changes (original behavior) + await execFileAsync('git', ['add', '-A'], { cwd: worktreePath }); + } - // Create commit - await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, { + // Create commit (pass message as arg to avoid shell injection) + await execFileAsync('git', ['commit', '-m', message], { cwd: worktreePath, }); diff --git a/apps/server/src/routes/worktree/routes/continue-operation.ts b/apps/server/src/routes/worktree/routes/continue-operation.ts new file mode 100644 index 000000000..e7582c02a --- /dev/null +++ b/apps/server/src/routes/worktree/routes/continue-operation.ts @@ -0,0 +1,151 @@ +/** + * POST /continue-operation endpoint - Continue an in-progress merge, rebase, or cherry-pick + * + * After conflicts have been resolved, this endpoint continues the operation. + * For merge: performs git commit (merge is auto-committed after conflict resolution) + * For rebase: runs git rebase --continue + * For cherry-pick: runs git cherry-pick --continue + */ + +import type { Request, Response } from 'express'; +import path from 'path'; +import * as fs from 'fs/promises'; +import { getErrorMessage, logError, execAsync } from '../common.js'; +import type { EventEmitter } from '../../../lib/events.js'; + +/** + * Detect what type of conflict operation is currently in progress + */ +async function detectOperation( + worktreePath: string +): Promise<'merge' | 'rebase' | 'cherry-pick' | null> { + try { + const { stdout: gitDirRaw } = await execAsync('git rev-parse --git-dir', { + cwd: worktreePath, + }); + const gitDir = path.resolve(worktreePath, gitDirRaw.trim()); + + const [rebaseMergeExists, rebaseApplyExists, mergeHeadExists, cherryPickHeadExists] = + await Promise.all([ + fs + .access(path.join(gitDir, 'rebase-merge')) + .then(() => true) + .catch(() => false), + fs + .access(path.join(gitDir, 'rebase-apply')) + .then(() => true) + .catch(() => false), + fs + .access(path.join(gitDir, 'MERGE_HEAD')) + .then(() => true) + .catch(() => false), + fs + .access(path.join(gitDir, 'CHERRY_PICK_HEAD')) + .then(() => true) + .catch(() => false), + ]); + + if (rebaseMergeExists || rebaseApplyExists) return 'rebase'; + if (mergeHeadExists) return 'merge'; + if (cherryPickHeadExists) return 'cherry-pick'; + return null; + } catch { + return null; + } +} + +/** + * Check if there are still unmerged paths (unresolved conflicts) + */ +async function hasUnmergedPaths(worktreePath: string): Promise { + try { + const { stdout: statusOutput } = await execAsync('git status --porcelain', { + cwd: worktreePath, + }); + return statusOutput.split('\n').some((line) => /^(UU|AA|DD|AU|UA|DU|UD)/.test(line)); + } catch { + return false; + } +} + +export function createContinueOperationHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath } = req.body as { + worktreePath: string; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath is required', + }); + return; + } + + const resolvedWorktreePath = path.resolve(worktreePath); + + // Detect what operation is in progress + const operation = await detectOperation(resolvedWorktreePath); + + if (!operation) { + res.status(400).json({ + success: false, + error: 'No merge, rebase, or cherry-pick in progress', + }); + return; + } + + // Check for unresolved conflicts + if (await hasUnmergedPaths(resolvedWorktreePath)) { + res.status(409).json({ + success: false, + error: + 'There are still unresolved conflicts. Please resolve all conflicts before continuing.', + hasUnresolvedConflicts: true, + }); + return; + } + + // Stage all resolved files first + await execAsync('git add -A', { cwd: resolvedWorktreePath }); + + // Continue the operation + let continueCommand: string; + switch (operation) { + case 'merge': + // For merge, we need to commit after resolving conflicts + continueCommand = 'git commit --no-edit'; + break; + case 'rebase': + continueCommand = 'git rebase --continue'; + break; + case 'cherry-pick': + continueCommand = 'git cherry-pick --continue'; + break; + } + + await execAsync(continueCommand, { + cwd: resolvedWorktreePath, + env: { ...process.env, GIT_EDITOR: 'true' }, // Prevent editor from opening + }); + + // Emit event + events.emit('conflict:resolved', { + worktreePath: resolvedWorktreePath, + operation, + }); + + res.json({ + success: true, + result: { + operation, + message: `${operation.charAt(0).toUpperCase() + operation.slice(1)} continued successfully`, + }, + }); + } catch (error) { + logError(error, 'Continue operation failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/create-pr.ts b/apps/server/src/routes/worktree/routes/create-pr.ts index 87777c69f..af608cef5 100644 --- a/apps/server/src/routes/worktree/routes/create-pr.ts +++ b/apps/server/src/routes/worktree/routes/create-pr.ts @@ -9,8 +9,11 @@ import { execAsync, execEnv, isValidBranchName, + isValidRemoteName, isGhCliAvailable, } from '../common.js'; +import { execGitCommand } from '../../../lib/git.js'; +import { spawnProcess } from '@automaker/platform'; import { updateWorktreePRInfo } from '../../../lib/worktree-metadata.js'; import { createLogger } from '@automaker/utils'; import { validatePRState } from '@automaker/types'; @@ -20,16 +23,25 @@ const logger = createLogger('CreatePR'); export function createCreatePRHandler() { return async (req: Request, res: Response): Promise => { try { - const { worktreePath, projectPath, commitMessage, prTitle, prBody, baseBranch, draft } = - req.body as { - worktreePath: string; - projectPath?: string; - commitMessage?: string; - prTitle?: string; - prBody?: string; - baseBranch?: string; - draft?: boolean; - }; + const { + worktreePath, + projectPath, + commitMessage, + prTitle, + prBody, + baseBranch, + draft, + remote, + } = req.body as { + worktreePath: string; + projectPath?: string; + commitMessage?: string; + prTitle?: string; + prBody?: string; + baseBranch?: string; + draft?: boolean; + remote?: string; + }; if (!worktreePath) { res.status(400).json({ @@ -82,12 +94,9 @@ export function createCreatePRHandler() { logger.debug(`Running: git add -A`); await execAsync('git add -A', { cwd: worktreePath, env: execEnv }); - // Create commit + // Create commit — pass message as a separate arg to avoid shell injection logger.debug(`Running: git commit`); - await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, { - cwd: worktreePath, - env: execEnv, - }); + await execGitCommand(['commit', '-m', message], worktreePath); // Get commit hash const { stdout: hashOutput } = await execAsync('git rev-parse HEAD', { @@ -110,17 +119,27 @@ export function createCreatePRHandler() { } } - // Push the branch to remote + // Validate remote name before use to prevent command injection + if (remote !== undefined && !isValidRemoteName(remote)) { + res.status(400).json({ + success: false, + error: 'Invalid remote name contains unsafe characters', + }); + return; + } + + // Push the branch to remote (use selected remote or default to 'origin') + const pushRemote = remote || 'origin'; let pushError: string | null = null; try { - await execAsync(`git push -u origin ${branchName}`, { + await execAsync(`git push ${pushRemote} ${branchName}`, { cwd: worktreePath, env: execEnv, }); - } catch (error: unknown) { + } catch { // If push fails, try with --set-upstream try { - await execAsync(`git push --set-upstream origin ${branchName}`, { + await execAsync(`git push --set-upstream ${pushRemote} ${branchName}`, { cwd: worktreePath, env: execEnv, }); @@ -195,7 +214,7 @@ export function createCreatePRHandler() { } } } - } catch (error) { + } catch { // Couldn't parse remotes - will try fallback } @@ -216,7 +235,7 @@ export function createCreatePRHandler() { originOwner = owner; repoUrl = `https://github.com/${owner}/${repo}`; } - } catch (error) { + } catch { // Failed to get repo URL from config } } @@ -291,27 +310,35 @@ export function createCreatePRHandler() { // Only create a new PR if one doesn't already exist if (!prUrl) { try { - // Build gh pr create command - let prCmd = `gh pr create --base "${base}"`; + // Build gh pr create args as an array to avoid shell injection on + // title/body (backticks, $, \ were unsafe with string interpolation) + const prArgs = ['pr', 'create', '--base', base]; // If this is a fork (has upstream remote), specify the repo and head if (upstreamRepo && originOwner) { // For forks: --repo specifies where to create PR, --head specifies source - prCmd += ` --repo "${upstreamRepo}" --head "${originOwner}:${branchName}"`; + prArgs.push('--repo', upstreamRepo, '--head', `${originOwner}:${branchName}`); } else { // Not a fork, just specify the head branch - prCmd += ` --head "${branchName}"`; + prArgs.push('--head', branchName); } - prCmd += ` --title "${title.replace(/"/g, '\\"')}" --body "${body.replace(/"/g, '\\"')}" ${draftFlag}`; - prCmd = prCmd.trim(); + prArgs.push('--title', title, '--body', body); + if (draft) prArgs.push('--draft'); - logger.debug(`Creating PR with command: ${prCmd}`); - const { stdout: prOutput } = await execAsync(prCmd, { + logger.debug(`Creating PR with args: gh ${prArgs.join(' ')}`); + const prResult = await spawnProcess({ + command: 'gh', + args: prArgs, cwd: worktreePath, env: execEnv, }); - prUrl = prOutput.trim(); + if (prResult.exitCode !== 0) { + throw Object.assign(new Error(prResult.stderr || 'gh pr create failed'), { + stderr: prResult.stderr, + }); + } + prUrl = prResult.stdout.trim(); logger.info(`PR created: ${prUrl}`); // Extract PR number and store metadata for newly created PR diff --git a/apps/server/src/routes/worktree/routes/create.ts b/apps/server/src/routes/worktree/routes/create.ts index 061fa8015..81243d812 100644 --- a/apps/server/src/routes/worktree/routes/create.ts +++ b/apps/server/src/routes/worktree/routes/create.ts @@ -13,6 +13,8 @@ import { promisify } from 'util'; import path from 'path'; import * as secureFs from '../../../lib/secure-fs.js'; import type { EventEmitter } from '../../../lib/events.js'; +import type { SettingsService } from '../../../services/settings-service.js'; +import { WorktreeService } from '../../../services/worktree-service.js'; import { isGitRepo } from '@automaker/git-utils'; import { getErrorMessage, @@ -20,8 +22,8 @@ import { normalizePath, ensureInitialCommit, isValidBranchName, - execGitCommand, } from '../common.js'; +import { execGitCommand } from '../../../lib/git.js'; import { trackBranch } from './branch-tracking.js'; import { createLogger } from '@automaker/utils'; import { runInitScript } from '../../../services/init-script-service.js'; @@ -81,7 +83,9 @@ async function findExistingWorktreeForBranch( } } -export function createCreateHandler(events: EventEmitter) { +export function createCreateHandler(events: EventEmitter, settingsService?: SettingsService) { + const worktreeService = new WorktreeService(); + return async (req: Request, res: Response): Promise => { try { const { projectPath, branchName, baseBranch } = req.body as { @@ -200,6 +204,20 @@ export function createCreateHandler(events: EventEmitter) { // normalizePath converts to forward slashes for API consistency const absoluteWorktreePath = path.resolve(worktreePath); + // Copy configured files into the new worktree before responding + // This runs synchronously to ensure files are in place before any init script + try { + await worktreeService.copyConfiguredFiles( + projectPath, + absoluteWorktreePath, + settingsService, + events + ); + } catch (copyErr) { + // Log but don't fail worktree creation – files may be partially copied + logger.warn('Some configured files failed to copy to worktree:', copyErr); + } + // Respond immediately (non-blocking) res.json({ success: true, diff --git a/apps/server/src/routes/worktree/routes/delete.ts b/apps/server/src/routes/worktree/routes/delete.ts index 6814add91..06703ff13 100644 --- a/apps/server/src/routes/worktree/routes/delete.ts +++ b/apps/server/src/routes/worktree/routes/delete.ts @@ -6,7 +6,8 @@ import type { Request, Response } from 'express'; import { exec } from 'child_process'; import { promisify } from 'util'; import { isGitRepo } from '@automaker/git-utils'; -import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js'; +import { getErrorMessage, logError, isValidBranchName } from '../common.js'; +import { execGitCommand } from '../../../lib/git.js'; import { createLogger } from '@automaker/utils'; const execAsync = promisify(exec); @@ -51,7 +52,7 @@ export function createDeleteHandler() { // Remove the worktree (using array arguments to prevent injection) try { await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath); - } catch (error) { + } catch { // Try with prune if remove fails await execGitCommand(['worktree', 'prune'], projectPath); } diff --git a/apps/server/src/routes/worktree/routes/discard-changes.ts b/apps/server/src/routes/worktree/routes/discard-changes.ts index 4f15e0537..914eff677 100644 --- a/apps/server/src/routes/worktree/routes/discard-changes.ts +++ b/apps/server/src/routes/worktree/routes/discard-changes.ts @@ -1,27 +1,63 @@ /** - * POST /discard-changes endpoint - Discard all uncommitted changes in a worktree + * POST /discard-changes endpoint - Discard uncommitted changes in a worktree * - * This performs a destructive operation that: - * 1. Resets staged changes (git reset HEAD) - * 2. Discards modified tracked files (git checkout .) - * 3. Removes untracked files and directories (git clean -fd) + * Supports two modes: + * 1. Discard ALL changes (when no files array is provided) + * - Resets staged changes (git reset HEAD) + * - Discards modified tracked files (git checkout .) + * - Removes untracked files and directories (git clean -fd) + * + * 2. Discard SELECTED files (when files array is provided) + * - Unstages selected staged files (git reset HEAD -- ) + * - Reverts selected tracked file changes (git checkout -- ) + * - Removes selected untracked files (git clean -fd -- ) * * Note: Git repository validation (isGitRepo) is handled by * the requireGitRepoOnly middleware in index.ts */ import type { Request, Response } from 'express'; -import { exec } from 'child_process'; -import { promisify } from 'util'; -import { getErrorMessage, logError } from '../common.js'; +import * as path from 'path'; +import * as fs from 'fs'; +import { getErrorMessage, logError } from '@automaker/utils'; +import { execGitCommand } from '../../../lib/git.js'; -const execAsync = promisify(exec); +/** + * Validate that a file path does not escape the worktree directory. + * Prevents path traversal attacks (e.g., ../../etc/passwd) and + * rejects symlinks inside the worktree that point outside of it. + */ +function validateFilePath(filePath: string, worktreePath: string): boolean { + // Resolve the full path relative to the worktree (lexical resolution) + const resolved = path.resolve(worktreePath, filePath); + const normalizedWorktree = path.resolve(worktreePath); + + // First, perform lexical prefix check + const lexicalOk = + resolved.startsWith(normalizedWorktree + path.sep) || resolved === normalizedWorktree; + if (!lexicalOk) { + return false; + } + + // Then, attempt symlink-aware validation using realpath. + // This catches symlinks inside the worktree that point outside of it. + try { + const realResolved = fs.realpathSync(resolved); + const realWorktree = fs.realpathSync(normalizedWorktree); + return realResolved.startsWith(realWorktree + path.sep) || realResolved === realWorktree; + } catch { + // If realpath fails (e.g., target doesn't exist yet for untracked files), + // fall back to the lexical startsWith check which already passed above. + return true; + } +} export function createDiscardChangesHandler() { return async (req: Request, res: Response): Promise => { try { - const { worktreePath } = req.body as { + const { worktreePath, files } = req.body as { worktreePath: string; + files?: string[]; }; if (!worktreePath) { @@ -33,9 +69,7 @@ export function createDiscardChangesHandler() { } // Check for uncommitted changes first - const { stdout: status } = await execAsync('git status --porcelain', { - cwd: worktreePath, - }); + const status = await execGitCommand(['status', '--porcelain'], worktreePath); if (!status.trim()) { res.json({ @@ -48,61 +82,195 @@ export function createDiscardChangesHandler() { return; } - // Count the files that will be affected - const lines = status.trim().split('\n').filter(Boolean); - const fileCount = lines.length; - // Get branch name before discarding - const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', { - cwd: worktreePath, - }); + const branchOutput = await execGitCommand( + ['rev-parse', '--abbrev-ref', 'HEAD'], + worktreePath + ); const branchName = branchOutput.trim(); - // Discard all changes: - // 1. Reset any staged changes - await execAsync('git reset HEAD', { cwd: worktreePath }).catch(() => { - // Ignore errors - might fail if there's nothing staged + // Parse the status output to categorize files + // Git --porcelain format: XY PATH where X=index status, Y=worktree status + // Preserve the exact two-character XY status (no trim) to keep index vs worktree info + const statusLines = status.trim().split('\n').filter(Boolean); + const allFiles = statusLines.map((line) => { + const fileStatus = line.substring(0, 2); + const filePath = line.slice(3).trim(); + return { status: fileStatus, path: filePath }; }); - // 2. Discard changes in tracked files - await execAsync('git checkout .', { cwd: worktreePath }).catch(() => { - // Ignore errors - might fail if there are no tracked changes - }); + // Determine which files to discard + const isSelectiveDiscard = files && files.length > 0 && files.length < allFiles.length; - // 3. Remove untracked files and directories - await execAsync('git clean -fd', { cwd: worktreePath }).catch(() => { - // Ignore errors - might fail if there are no untracked files - }); + if (isSelectiveDiscard) { + // Selective discard: only discard the specified files + const filesToDiscard = new Set(files); - // Verify all changes were discarded - const { stdout: finalStatus } = await execAsync('git status --porcelain', { - cwd: worktreePath, - }); + // Validate all requested file paths stay within the worktree + const invalidPaths = files.filter((f) => !validateFilePath(f, worktreePath)); + if (invalidPaths.length > 0) { + res.status(400).json({ + success: false, + error: `Invalid file paths detected (path traversal): ${invalidPaths.join(', ')}`, + }); + return; + } + + // Separate files into categories for proper git operations + const trackedModified: string[] = []; // Modified/deleted tracked files + const stagedFiles: string[] = []; // Files that are staged + const untrackedFiles: string[] = []; // Untracked files (?) + const warnings: string[] = []; + + for (const file of allFiles) { + if (!filesToDiscard.has(file.path)) continue; + + // file.status is the raw two-character XY git porcelain status (no trim) + // X = index/staging status, Y = worktree status + const xy = file.status.substring(0, 2); + const indexStatus = xy.charAt(0); + const workTreeStatus = xy.charAt(1); + + if (indexStatus === '?' && workTreeStatus === '?') { + untrackedFiles.push(file.path); + } else if (indexStatus === 'A') { + // Staged-new file: must be reset (unstaged) then cleaned (deleted). + // Never pass to trackedModified — the file has no HEAD version to + // check out, so `git checkout --` would fail or do nothing. + stagedFiles.push(file.path); + untrackedFiles.push(file.path); + } else { + // Check if the file has staged changes (index status X) + if (indexStatus !== ' ' && indexStatus !== '?') { + stagedFiles.push(file.path); + } + // Check for working tree changes (worktree status Y): handles MM, MD, etc. + if (workTreeStatus !== ' ' && workTreeStatus !== '?') { + trackedModified.push(file.path); + } + } + } + + // 1. Unstage selected staged files (using execFile to bypass shell) + if (stagedFiles.length > 0) { + try { + await execGitCommand(['reset', 'HEAD', '--', ...stagedFiles], worktreePath); + } catch (error) { + const msg = getErrorMessage(error); + logError(error, `Failed to unstage files: ${msg}`); + warnings.push(`Failed to unstage some files: ${msg}`); + } + } + + // 2. Revert selected tracked file changes + if (trackedModified.length > 0) { + try { + await execGitCommand(['checkout', '--', ...trackedModified], worktreePath); + } catch (error) { + const msg = getErrorMessage(error); + logError(error, `Failed to revert tracked files: ${msg}`); + warnings.push(`Failed to revert some tracked files: ${msg}`); + } + } + + // 3. Remove selected untracked files + if (untrackedFiles.length > 0) { + try { + await execGitCommand(['clean', '-fd', '--', ...untrackedFiles], worktreePath); + } catch (error) { + const msg = getErrorMessage(error); + logError(error, `Failed to clean untracked files: ${msg}`); + warnings.push(`Failed to remove some untracked files: ${msg}`); + } + } + + const fileCount = files.length; + + // Verify the remaining state + const finalStatus = await execGitCommand(['status', '--porcelain'], worktreePath); + + const remainingCount = finalStatus.trim() + ? finalStatus.trim().split('\n').filter(Boolean).length + : 0; + const actualDiscarded = allFiles.length - remainingCount; + + let message = + actualDiscarded < fileCount + ? `Discarded ${actualDiscarded} of ${fileCount} selected files, ${remainingCount} files remaining` + : `Discarded ${actualDiscarded} ${actualDiscarded === 1 ? 'file' : 'files'}`; - if (finalStatus.trim()) { - // Some changes couldn't be discarded (possibly ignored files or permission issues) - const remainingCount = finalStatus.trim().split('\n').filter(Boolean).length; res.json({ success: true, result: { discarded: true, - filesDiscarded: fileCount - remainingCount, + filesDiscarded: actualDiscarded, filesRemaining: remainingCount, branch: branchName, - message: `Discarded ${fileCount - remainingCount} files, ${remainingCount} files could not be removed`, + message, + ...(warnings.length > 0 && { warnings }), }, }); } else { - res.json({ - success: true, - result: { - discarded: true, - filesDiscarded: fileCount, - filesRemaining: 0, - branch: branchName, - message: `Discarded ${fileCount} ${fileCount === 1 ? 'file' : 'files'}`, - }, - }); + // Discard ALL changes (original behavior) + const fileCount = allFiles.length; + const warnings: string[] = []; + + // 1. Reset any staged changes + try { + await execGitCommand(['reset', 'HEAD'], worktreePath); + } catch (error) { + const msg = getErrorMessage(error); + logError(error, `git reset HEAD failed: ${msg}`); + warnings.push(`Failed to unstage changes: ${msg}`); + } + + // 2. Discard changes in tracked files + try { + await execGitCommand(['checkout', '.'], worktreePath); + } catch (error) { + const msg = getErrorMessage(error); + logError(error, `git checkout . failed: ${msg}`); + warnings.push(`Failed to revert tracked changes: ${msg}`); + } + + // 3. Remove untracked files and directories + try { + await execGitCommand(['clean', '-fd'], worktreePath); + } catch (error) { + const msg = getErrorMessage(error); + logError(error, `git clean -fd failed: ${msg}`); + warnings.push(`Failed to remove untracked files: ${msg}`); + } + + // Verify all changes were discarded + const finalStatus = await execGitCommand(['status', '--porcelain'], worktreePath); + + if (finalStatus.trim()) { + const remainingCount = finalStatus.trim().split('\n').filter(Boolean).length; + res.json({ + success: true, + result: { + discarded: true, + filesDiscarded: fileCount - remainingCount, + filesRemaining: remainingCount, + branch: branchName, + message: `Discarded ${fileCount - remainingCount} files, ${remainingCount} files could not be removed`, + ...(warnings.length > 0 && { warnings }), + }, + }); + } else { + res.json({ + success: true, + result: { + discarded: true, + filesDiscarded: fileCount, + filesRemaining: 0, + branch: branchName, + message: `Discarded ${fileCount} ${fileCount === 1 ? 'file' : 'files'}`, + ...(warnings.length > 0 && { warnings }), + }, + }); + } } } catch (error) { logError(error, 'Discard changes failed'); diff --git a/apps/server/src/routes/worktree/routes/generate-pr-description.ts b/apps/server/src/routes/worktree/routes/generate-pr-description.ts new file mode 100644 index 000000000..0f272e71e --- /dev/null +++ b/apps/server/src/routes/worktree/routes/generate-pr-description.ts @@ -0,0 +1,453 @@ +/** + * POST /worktree/generate-pr-description endpoint - Generate an AI PR description from git diff + * + * Uses the configured model (via phaseModels.commitMessageModel) to generate a pull request + * title and description from the branch's changes compared to the base branch. + * Defaults to Claude Haiku for speed. + */ + +import type { Request, Response } from 'express'; +import { execFile } from 'child_process'; +import { promisify } from 'util'; +import { existsSync } from 'fs'; +import { join } from 'path'; +import { createLogger } from '@automaker/utils'; +import { isCursorModel, stripProviderPrefix } from '@automaker/types'; +import { resolvePhaseModel } from '@automaker/model-resolver'; +import { ProviderFactory } from '../../../providers/provider-factory.js'; +import type { SettingsService } from '../../../services/settings-service.js'; +import { getErrorMessage, logError } from '../common.js'; +import { getPhaseModelWithOverrides } from '../../../lib/settings-helpers.js'; + +const logger = createLogger('GeneratePRDescription'); +const execFileAsync = promisify(execFile); + +/** Timeout for AI provider calls in milliseconds (30 seconds) */ +const AI_TIMEOUT_MS = 30_000; + +/** Max diff size to send to AI (characters) */ +const MAX_DIFF_SIZE = 15_000; + +const PR_DESCRIPTION_SYSTEM_PROMPT = `You are a pull request description generator. Your task is to create a clear, well-structured PR title and description based on the git diff and branch information provided. + +Output your response in EXACTLY this format (including the markers): +---TITLE--- + +---BODY--- +## Summary +<1-3 bullet points describing the key changes> + +## Changes + + +Rules: +- The title should be concise and descriptive (50-72 characters) +- Use imperative mood for the title (e.g., "Add dark mode toggle" not "Added dark mode toggle") +- The description should explain WHAT changed and WHY +- Group related changes together +- Use markdown formatting for the body +- Do NOT include the branch name in the title +- Focus on the user-facing impact when possible +- If there are breaking changes, mention them prominently +- The diff may include both committed changes and uncommitted working directory changes. Treat all changes as part of the PR since uncommitted changes will be committed when the PR is created +- Do NOT distinguish between committed and uncommitted changes in the output - describe all changes as a unified set of PR changes`; + +/** + * Wraps an async generator with a timeout. + */ +async function* withTimeout( + generator: AsyncIterable, + timeoutMs: number +): AsyncGenerator { + let timerId: ReturnType | undefined; + + const timeoutPromise = new Promise((_, reject) => { + timerId = setTimeout( + () => reject(new Error(`AI provider timed out after ${timeoutMs}ms`)), + timeoutMs + ); + }); + + const iterator = generator[Symbol.asyncIterator](); + let done = false; + + try { + while (!done) { + const result = await Promise.race([iterator.next(), timeoutPromise]).catch(async (err) => { + // Timeout (or other error) — attempt to gracefully close the source generator + await iterator.return?.(); + throw err; + }); + if (result.done) { + done = true; + } else { + yield result.value; + } + } + } finally { + clearTimeout(timerId); + } +} + +interface GeneratePRDescriptionRequestBody { + worktreePath: string; + baseBranch?: string; +} + +interface GeneratePRDescriptionSuccessResponse { + success: true; + title: string; + body: string; +} + +interface GeneratePRDescriptionErrorResponse { + success: false; + error: string; +} + +export function createGeneratePRDescriptionHandler( + settingsService?: SettingsService +): (req: Request, res: Response) => Promise { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, baseBranch } = req.body as GeneratePRDescriptionRequestBody; + + if (!worktreePath || typeof worktreePath !== 'string') { + const response: GeneratePRDescriptionErrorResponse = { + success: false, + error: 'worktreePath is required and must be a string', + }; + res.status(400).json(response); + return; + } + + // Validate that the directory exists + if (!existsSync(worktreePath)) { + const response: GeneratePRDescriptionErrorResponse = { + success: false, + error: 'worktreePath does not exist', + }; + res.status(400).json(response); + return; + } + + // Validate that it's a git repository + const gitPath = join(worktreePath, '.git'); + if (!existsSync(gitPath)) { + const response: GeneratePRDescriptionErrorResponse = { + success: false, + error: 'worktreePath is not a git repository', + }; + res.status(400).json(response); + return; + } + + // Validate baseBranch to allow only safe branch name characters + if (baseBranch !== undefined && !/^[\w.\-/]+$/.test(baseBranch)) { + const response: GeneratePRDescriptionErrorResponse = { + success: false, + error: 'baseBranch contains invalid characters', + }; + res.status(400).json(response); + return; + } + + logger.info(`Generating PR description for worktree: ${worktreePath}`); + + // Get current branch name + const { stdout: branchOutput } = await execFileAsync( + 'git', + ['rev-parse', '--abbrev-ref', 'HEAD'], + { cwd: worktreePath } + ); + const branchName = branchOutput.trim(); + + // Determine the base branch for comparison + const base = baseBranch || 'main'; + + // Get the diff between current branch and base branch (committed changes) + // Track whether the diff method used only includes committed changes. + // `git diff base...HEAD` and `git diff origin/base...HEAD` only show committed changes, + // while the fallback methods (`git diff HEAD`, `git diff --cached + git diff`) already + // include uncommitted working directory changes. + let diff = ''; + let diffIncludesUncommitted = false; + try { + // First, try to get diff against the base branch + const { stdout: branchDiff } = await execFileAsync('git', ['diff', `${base}...HEAD`], { + cwd: worktreePath, + maxBuffer: 1024 * 1024 * 5, // 5MB buffer + }); + diff = branchDiff; + // git diff base...HEAD only shows committed changes + diffIncludesUncommitted = false; + } catch { + // If branch comparison fails (e.g., base branch doesn't exist locally), + // try fetching and comparing against remote base + try { + const { stdout: remoteDiff } = await execFileAsync( + 'git', + ['diff', `origin/${base}...HEAD`], + { + cwd: worktreePath, + maxBuffer: 1024 * 1024 * 5, + } + ); + diff = remoteDiff; + // git diff origin/base...HEAD only shows committed changes + diffIncludesUncommitted = false; + } catch { + // Fall back to getting all uncommitted + committed changes + try { + const { stdout: allDiff } = await execFileAsync('git', ['diff', 'HEAD'], { + cwd: worktreePath, + maxBuffer: 1024 * 1024 * 5, + }); + diff = allDiff; + // git diff HEAD includes uncommitted changes + diffIncludesUncommitted = true; + } catch { + // Last resort: get staged + unstaged changes + const { stdout: stagedDiff } = await execFileAsync('git', ['diff', '--cached'], { + cwd: worktreePath, + maxBuffer: 1024 * 1024 * 5, + }); + const { stdout: unstagedDiff } = await execFileAsync('git', ['diff'], { + cwd: worktreePath, + maxBuffer: 1024 * 1024 * 5, + }); + diff = stagedDiff + unstagedDiff; + // These already include uncommitted changes + diffIncludesUncommitted = true; + } + } + } + + // Check for uncommitted changes (staged + unstaged) to include in the description. + // When creating a PR, uncommitted changes will be auto-committed, so they should be + // reflected in the generated description. We only need to fetch uncommitted diffs + // when the primary diff method (base...HEAD) was used, since it only shows committed changes. + let hasUncommittedChanges = false; + try { + const { stdout: statusOutput } = await execFileAsync('git', ['status', '--porcelain'], { + cwd: worktreePath, + }); + hasUncommittedChanges = statusOutput.trim().length > 0; + + if (hasUncommittedChanges && !diffIncludesUncommitted) { + logger.info('Uncommitted changes detected, including in PR description context'); + + let uncommittedDiff = ''; + + // Get staged changes + try { + const { stdout: stagedDiff } = await execFileAsync('git', ['diff', '--cached'], { + cwd: worktreePath, + maxBuffer: 1024 * 1024 * 5, + }); + if (stagedDiff.trim()) { + uncommittedDiff += stagedDiff; + } + } catch { + // Ignore staged diff errors + } + + // Get unstaged changes (tracked files only) + try { + const { stdout: unstagedDiff } = await execFileAsync('git', ['diff'], { + cwd: worktreePath, + maxBuffer: 1024 * 1024 * 5, + }); + if (unstagedDiff.trim()) { + uncommittedDiff += unstagedDiff; + } + } catch { + // Ignore unstaged diff errors + } + + // Get list of untracked files for context + const untrackedFiles = statusOutput + .split('\n') + .filter((line) => line.startsWith('??')) + .map((line) => line.substring(3).trim()); + + if (untrackedFiles.length > 0) { + // Add a summary of untracked (new) files as context + uncommittedDiff += `\n# New untracked files:\n${untrackedFiles.map((f) => `# + ${f}`).join('\n')}\n`; + } + + // Append uncommitted changes to the committed diff + if (uncommittedDiff.trim()) { + diff = diff + uncommittedDiff; + } + } + } catch { + // Ignore errors checking for uncommitted changes + } + + // Also get the commit log for context + let commitLog = ''; + try { + const { stdout: logOutput } = await execFileAsync( + 'git', + ['log', `${base}..HEAD`, '--oneline', '--no-decorate'], + { + cwd: worktreePath, + maxBuffer: 1024 * 1024, + } + ); + commitLog = logOutput.trim(); + } catch { + // If comparing against base fails, fall back to recent commits + try { + const { stdout: logOutput } = await execFileAsync( + 'git', + ['log', '--oneline', '-10', '--no-decorate'], + { + cwd: worktreePath, + maxBuffer: 1024 * 1024, + } + ); + commitLog = logOutput.trim(); + } catch { + // Ignore commit log errors + } + } + + if (!diff.trim() && !commitLog.trim()) { + const response: GeneratePRDescriptionErrorResponse = { + success: false, + error: 'No changes found to generate a PR description from', + }; + res.status(400).json(response); + return; + } + + // Truncate diff if too long + const truncatedDiff = + diff.length > MAX_DIFF_SIZE + ? diff.substring(0, MAX_DIFF_SIZE) + '\n\n[... diff truncated ...]' + : diff; + + // Build the user prompt + let userPrompt = `Generate a pull request title and description for the following changes.\n\nBranch: ${branchName}\nBase Branch: ${base}\n`; + + if (commitLog) { + userPrompt += `\nCommit History:\n${commitLog}\n`; + } + + if (hasUncommittedChanges) { + userPrompt += `\nNote: This branch has uncommitted changes that will be included in the PR.\n`; + } + + if (truncatedDiff) { + userPrompt += `\n\`\`\`diff\n${truncatedDiff}\n\`\`\``; + } + + // Get model from phase settings with provider info + const { + phaseModel: phaseModelEntry, + provider: claudeCompatibleProvider, + credentials, + } = await getPhaseModelWithOverrides( + 'commitMessageModel', + settingsService, + worktreePath, + '[GeneratePRDescription]' + ); + const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry); + + logger.info( + `Using model for PR description: ${model}`, + claudeCompatibleProvider ? `via provider: ${claudeCompatibleProvider.name}` : 'direct API' + ); + + // Get provider for the model type + const aiProvider = ProviderFactory.getProviderForModel(model); + const bareModel = stripProviderPrefix(model); + + // For Cursor models, combine prompts + const effectivePrompt = isCursorModel(model) + ? `${PR_DESCRIPTION_SYSTEM_PROMPT}\n\n${userPrompt}` + : userPrompt; + const effectiveSystemPrompt = isCursorModel(model) ? undefined : PR_DESCRIPTION_SYSTEM_PROMPT; + + logger.info(`Using ${aiProvider.getName()} provider for model: ${model}`); + + let responseText = ''; + const stream = aiProvider.executeQuery({ + prompt: effectivePrompt, + model: bareModel, + cwd: worktreePath, + systemPrompt: effectiveSystemPrompt, + maxTurns: 1, + allowedTools: [], + readOnly: true, + thinkingLevel, + claudeCompatibleProvider, + credentials, + }); + + // Wrap with timeout + for await (const msg of withTimeout(stream, AI_TIMEOUT_MS)) { + if (msg.type === 'assistant' && msg.message?.content) { + for (const block of msg.message.content) { + if (block.type === 'text' && block.text) { + responseText += block.text; + } + } + } else if (msg.type === 'result' && msg.subtype === 'success' && msg.result) { + responseText = msg.result; + } + } + + const fullResponse = responseText.trim(); + + if (!fullResponse || fullResponse.length === 0) { + logger.warn('Received empty response from model'); + const response: GeneratePRDescriptionErrorResponse = { + success: false, + error: 'Failed to generate PR description - empty response', + }; + res.status(500).json(response); + return; + } + + // Parse the response to extract title and body + let title = ''; + let body = ''; + + const titleMatch = fullResponse.match(/---TITLE---\s*\n([\s\S]*?)(?=---BODY---|$)/); + const bodyMatch = fullResponse.match(/---BODY---\s*\n([\s\S]*?)$/); + + if (titleMatch && bodyMatch) { + title = titleMatch[1].trim(); + body = bodyMatch[1].trim(); + } else { + // Fallback: treat first line as title, rest as body + const lines = fullResponse.split('\n'); + title = lines[0].trim(); + body = lines.slice(1).join('\n').trim(); + } + + // Clean up title - remove any markdown or quotes + title = title.replace(/^#+\s*/, '').replace(/^["']|["']$/g, ''); + + logger.info(`Generated PR title: ${title.substring(0, 100)}...`); + + const response: GeneratePRDescriptionSuccessResponse = { + success: true, + title, + body, + }; + res.json(response); + } catch (error) { + logError(error, 'Generate PR description failed'); + const response: GeneratePRDescriptionErrorResponse = { + success: false, + error: getErrorMessage(error), + }; + res.status(500).json(response); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/list-branches.ts b/apps/server/src/routes/worktree/routes/list-branches.ts index 2e6a34f50..68e0bca82 100644 --- a/apps/server/src/routes/worktree/routes/list-branches.ts +++ b/apps/server/src/routes/worktree/routes/list-branches.ts @@ -6,11 +6,12 @@ */ import type { Request, Response } from 'express'; -import { exec } from 'child_process'; +import { exec, execFile } from 'child_process'; import { promisify } from 'util'; import { getErrorMessage, logWorktreeError } from '../common.js'; const execAsync = promisify(exec); +const execFileAsync = promisify(execFile); interface BranchInfo { name: string; @@ -92,6 +93,9 @@ export function createListBranchesHandler() { // Skip HEAD pointers like "origin/HEAD" if (cleanName.includes('/HEAD')) return; + // Skip bare remote names without a branch (e.g. "origin" by itself) + if (!cleanName.includes('/')) return; + // Only add remote branches if a branch with the exact same name isn't already // in the list. This avoids duplicates if a local branch is named like a remote one. // Note: We intentionally include remote branches even when a local branch with the @@ -128,15 +132,17 @@ export function createListBranchesHandler() { let hasRemoteBranch = false; try { // First check if there's a remote tracking branch - const { stdout: upstreamOutput } = await execAsync( - `git rev-parse --abbrev-ref ${currentBranch}@{upstream}`, + const { stdout: upstreamOutput } = await execFileAsync( + 'git', + ['rev-parse', '--abbrev-ref', `${currentBranch}@{upstream}`], { cwd: worktreePath } ); if (upstreamOutput.trim()) { hasRemoteBranch = true; - const { stdout: aheadBehindOutput } = await execAsync( - `git rev-list --left-right --count ${currentBranch}@{upstream}...HEAD`, + const { stdout: aheadBehindOutput } = await execFileAsync( + 'git', + ['rev-list', '--left-right', '--count', `${currentBranch}@{upstream}...HEAD`], { cwd: worktreePath } ); const [behind, ahead] = aheadBehindOutput.trim().split(/\s+/).map(Number); @@ -147,8 +153,9 @@ export function createListBranchesHandler() { // No upstream branch set - check if the branch exists on any remote try { // Check if there's a matching branch on origin (most common remote) - const { stdout: remoteBranchOutput } = await execAsync( - `git ls-remote --heads origin ${currentBranch}`, + const { stdout: remoteBranchOutput } = await execFileAsync( + 'git', + ['ls-remote', '--heads', 'origin', currentBranch], { cwd: worktreePath, timeout: 5000 } ); hasRemoteBranch = remoteBranchOutput.trim().length > 0; diff --git a/apps/server/src/routes/worktree/routes/list.ts b/apps/server/src/routes/worktree/routes/list.ts index 0f8021f11..333ba7c21 100644 --- a/apps/server/src/routes/worktree/routes/list.ts +++ b/apps/server/src/routes/worktree/routes/list.ts @@ -58,6 +58,90 @@ interface WorktreeInfo { hasChanges?: boolean; changedFilesCount?: number; pr?: WorktreePRInfo; // PR info if a PR has been created for this branch + /** Whether there are actual unresolved conflict files (conflictFiles.length > 0) */ + hasConflicts?: boolean; + /** Type of git operation in progress (merge/rebase/cherry-pick), set independently of hasConflicts */ + conflictType?: 'merge' | 'rebase' | 'cherry-pick'; + /** List of files with conflicts */ + conflictFiles?: string[]; +} + +/** + * Detect if a merge, rebase, or cherry-pick is in progress for a worktree. + * Checks for the presence of state files/directories that git creates + * during these operations. + */ +async function detectConflictState(worktreePath: string): Promise<{ + hasConflicts: boolean; + conflictType?: 'merge' | 'rebase' | 'cherry-pick'; + conflictFiles?: string[]; +}> { + try { + // Find the canonical .git directory for this worktree + const { stdout: gitDirRaw } = await execAsync('git rev-parse --git-dir', { + cwd: worktreePath, + timeout: 15000, + }); + const gitDir = path.resolve(worktreePath, gitDirRaw.trim()); + + // Check for merge, rebase, and cherry-pick state files/directories + const [mergeHeadExists, rebaseMergeExists, rebaseApplyExists, cherryPickHeadExists] = + await Promise.all([ + secureFs + .access(path.join(gitDir, 'MERGE_HEAD')) + .then(() => true) + .catch(() => false), + secureFs + .access(path.join(gitDir, 'rebase-merge')) + .then(() => true) + .catch(() => false), + secureFs + .access(path.join(gitDir, 'rebase-apply')) + .then(() => true) + .catch(() => false), + secureFs + .access(path.join(gitDir, 'CHERRY_PICK_HEAD')) + .then(() => true) + .catch(() => false), + ]); + + let conflictType: 'merge' | 'rebase' | 'cherry-pick' | undefined; + if (rebaseMergeExists || rebaseApplyExists) { + conflictType = 'rebase'; + } else if (mergeHeadExists) { + conflictType = 'merge'; + } else if (cherryPickHeadExists) { + conflictType = 'cherry-pick'; + } + + if (!conflictType) { + return { hasConflicts: false }; + } + + // Get list of conflicted files using machine-readable git status + let conflictFiles: string[] = []; + try { + const { stdout: statusOutput } = await execAsync('git diff --name-only --diff-filter=U', { + cwd: worktreePath, + timeout: 15000, + }); + conflictFiles = statusOutput + .trim() + .split('\n') + .filter((f) => f.trim().length > 0); + } catch { + // Fall back to empty list if diff fails + } + + return { + hasConflicts: conflictFiles.length > 0, + conflictType, + conflictFiles, + }; + } catch { + // If anything fails, assume no conflicts + return { hasConflicts: false }; + } } async function getCurrentBranch(cwd: string): Promise { @@ -373,7 +457,7 @@ export function createListHandler() { // Read all worktree metadata to get PR info const allMetadata = await readAllWorktreeMetadata(projectPath); - // If includeDetails is requested, fetch change status for each worktree + // If includeDetails is requested, fetch change status and conflict state for each worktree if (includeDetails) { for (const worktree of worktrees) { try { @@ -390,6 +474,21 @@ export function createListHandler() { worktree.hasChanges = false; worktree.changedFilesCount = 0; } + + // Detect merge/rebase/cherry-pick in progress + try { + const conflictState = await detectConflictState(worktree.path); + // Always propagate conflictType so callers know an operation is in progress, + // even when there are no unresolved conflict files yet. + if (conflictState.conflictType) { + worktree.conflictType = conflictState.conflictType; + } + // hasConflicts is true only when there are actual unresolved files + worktree.hasConflicts = conflictState.hasConflicts; + worktree.conflictFiles = conflictState.conflictFiles; + } catch { + // Ignore conflict detection errors + } } } diff --git a/apps/server/src/routes/worktree/routes/merge.ts b/apps/server/src/routes/worktree/routes/merge.ts index 48df7893c..9f8b3bb4e 100644 --- a/apps/server/src/routes/worktree/routes/merge.ts +++ b/apps/server/src/routes/worktree/routes/merge.ts @@ -8,15 +8,11 @@ */ import type { Request, Response } from 'express'; -import { exec } from 'child_process'; -import { promisify } from 'util'; -import { getErrorMessage, logError, isValidBranchName, execGitCommand } from '../common.js'; -import { createLogger } from '@automaker/utils'; +import { getErrorMessage, logError } from '../common.js'; +import type { EventEmitter } from '../../../lib/events.js'; +import { performMerge } from '../../../services/merge-service.js'; -const execAsync = promisify(exec); -const logger = createLogger('Worktree'); - -export function createMergeHandler() { +export function createMergeHandler(events: EventEmitter) { return async (req: Request, res: Response): Promise => { try { const { projectPath, branchName, worktreePath, targetBranch, options } = req.body as { @@ -38,102 +34,41 @@ export function createMergeHandler() { // Determine the target branch (default to 'main') const mergeTo = targetBranch || 'main'; - // Validate source branch exists - try { - await execAsync(`git rev-parse --verify ${branchName}`, { cwd: projectPath }); - } catch { - res.status(400).json({ - success: false, - error: `Branch "${branchName}" does not exist`, - }); - return; - } - - // Validate target branch exists - try { - await execAsync(`git rev-parse --verify ${mergeTo}`, { cwd: projectPath }); - } catch { - res.status(400).json({ - success: false, - error: `Target branch "${mergeTo}" does not exist`, - }); - return; - } - - // Merge the feature branch into the target branch - const mergeCmd = options?.squash - ? `git merge --squash ${branchName}` - : `git merge ${branchName} -m "${options?.message || `Merge ${branchName} into ${mergeTo}`}"`; - - try { - await execAsync(mergeCmd, { cwd: projectPath }); - } catch (mergeError: unknown) { - // Check if this is a merge conflict - const err = mergeError as { stdout?: string; stderr?: string; message?: string }; - const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`; - const hasConflicts = - output.includes('CONFLICT') || output.includes('Automatic merge failed'); - - if (hasConflicts) { + // Delegate all merge logic to the service + const result = await performMerge( + projectPath, + branchName, + worktreePath, + mergeTo, + options, + events + ); + + if (!result.success) { + if (result.hasConflicts) { // Return conflict-specific error message that frontend can detect res.status(409).json({ success: false, - error: `Merge CONFLICT: Automatic merge of "${branchName}" into "${mergeTo}" failed. Please resolve conflicts manually.`, + error: result.error, hasConflicts: true, + conflictFiles: result.conflictFiles, }); return; } - // Re-throw non-conflict errors to be handled by outer catch - throw mergeError; - } - - // If squash merge, need to commit - if (options?.squash) { - await execAsync(`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`, { - cwd: projectPath, + // Non-conflict service errors (e.g. branch not found, invalid name) + res.status(400).json({ + success: false, + error: result.error, }); - } - - // Optionally delete the worktree and branch after merging - let worktreeDeleted = false; - let branchDeleted = false; - - if (options?.deleteWorktreeAndBranch) { - // Remove the worktree - try { - await execGitCommand(['worktree', 'remove', worktreePath, '--force'], projectPath); - worktreeDeleted = true; - } catch { - // Try with prune if remove fails - try { - await execGitCommand(['worktree', 'prune'], projectPath); - worktreeDeleted = true; - } catch { - logger.warn(`Failed to remove worktree: ${worktreePath}`); - } - } - - // Delete the branch (but not main/master) - if (branchName !== 'main' && branchName !== 'master') { - if (!isValidBranchName(branchName)) { - logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`); - } else { - try { - await execGitCommand(['branch', '-D', branchName], projectPath); - branchDeleted = true; - } catch { - logger.warn(`Failed to delete branch: ${branchName}`); - } - } - } + return; } res.json({ success: true, - mergedBranch: branchName, - targetBranch: mergeTo, - deleted: options?.deleteWorktreeAndBranch ? { worktreeDeleted, branchDeleted } : undefined, + mergedBranch: result.mergedBranch, + targetBranch: result.targetBranch, + deleted: result.deleted, }); } catch (error) { logError(error, 'Merge worktree failed'); diff --git a/apps/server/src/routes/worktree/routes/pull.ts b/apps/server/src/routes/worktree/routes/pull.ts index 7b9229949..7f157faf2 100644 --- a/apps/server/src/routes/worktree/routes/pull.ts +++ b/apps/server/src/routes/worktree/routes/pull.ts @@ -1,22 +1,33 @@ /** * POST /pull endpoint - Pull latest changes for a worktree/branch * + * Enhanced pull flow with stash management and conflict detection: + * 1. Checks for uncommitted local changes (staged and unstaged) + * 2. If local changes exist AND stashIfNeeded is true, automatically stashes them + * 3. Performs the git pull + * 4. If changes were stashed, attempts to reapply via git stash pop + * 5. Detects merge conflicts from both pull and stash reapplication + * 6. Returns structured conflict information for AI-assisted resolution + * + * Git business logic is delegated to pull-service.ts. + * * Note: Git repository validation (isGitRepo, hasCommits) is handled by * the requireValidWorktree middleware in index.ts */ import type { Request, Response } from 'express'; -import { exec } from 'child_process'; -import { promisify } from 'util'; import { getErrorMessage, logError } from '../common.js'; - -const execAsync = promisify(exec); +import { performPull } from '../../../services/pull-service.js'; +import type { PullResult } from '../../../services/pull-service.js'; export function createPullHandler() { return async (req: Request, res: Response): Promise => { try { - const { worktreePath } = req.body as { + const { worktreePath, remote, stashIfNeeded } = req.body as { worktreePath: string; + remote?: string; + /** When true, automatically stash local changes before pulling and reapply after */ + stashIfNeeded?: boolean; }; if (!worktreePath) { @@ -27,67 +38,66 @@ export function createPullHandler() { return; } - // Get current branch name - const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', { - cwd: worktreePath, - }); - const branchName = branchOutput.trim(); - - // Fetch latest from remote - await execAsync('git fetch origin', { cwd: worktreePath }); - - // Check if there are local changes that would be overwritten - const { stdout: status } = await execAsync('git status --porcelain', { - cwd: worktreePath, - }); - const hasLocalChanges = status.trim().length > 0; - - if (hasLocalChanges) { - res.status(400).json({ - success: false, - error: 'You have local changes. Please commit them before pulling.', - }); - return; - } - - // Pull latest changes - try { - const { stdout: pullOutput } = await execAsync(`git pull origin ${branchName}`, { - cwd: worktreePath, - }); - - // Check if we pulled any changes - const alreadyUpToDate = pullOutput.includes('Already up to date'); - - res.json({ - success: true, - result: { - branch: branchName, - pulled: !alreadyUpToDate, - message: alreadyUpToDate ? 'Already up to date' : 'Pulled latest changes', - }, - }); - } catch (pullError: unknown) { - const err = pullError as { stderr?: string; message?: string }; - const errorMsg = err.stderr || err.message || 'Pull failed'; + // Execute the pull via the service + const result = await performPull(worktreePath, { remote, stashIfNeeded }); - // Check for common errors - if (errorMsg.includes('no tracking information')) { - res.status(400).json({ - success: false, - error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=origin/${branchName}`, - }); - return; - } - - res.status(500).json({ - success: false, - error: errorMsg, - }); - } + // Map service result to HTTP response + mapResultToResponse(res, result); } catch (error) { logError(error, 'Pull failed'); res.status(500).json({ success: false, error: getErrorMessage(error) }); } }; } + +/** + * Map a PullResult from the service to the appropriate HTTP response. + * + * - Successful results (including local-changes-detected info) → 200 + * - Validation/state errors (detached HEAD, no upstream) → 400 + * - Operational errors (fetch/stash/pull failures) → 500 + */ +function mapResultToResponse(res: Response, result: PullResult): void { + if (!result.success && result.error) { + // Determine the appropriate HTTP status for errors + const statusCode = isClientError(result.error) ? 400 : 500; + res.status(statusCode).json({ + success: false, + error: result.error, + ...(result.stashRecoveryFailed && { stashRecoveryFailed: true }), + }); + return; + } + + // Success case (includes partial success like local changes detected, conflicts, etc.) + res.json({ + success: true, + result: { + branch: result.branch, + pulled: result.pulled, + hasLocalChanges: result.hasLocalChanges, + localChangedFiles: result.localChangedFiles, + hasConflicts: result.hasConflicts, + conflictSource: result.conflictSource, + conflictFiles: result.conflictFiles, + stashed: result.stashed, + stashRestored: result.stashRestored, + message: result.message, + }, + }); +} + +/** + * Determine whether an error message represents a client error (400) + * vs a server error (500). + * + * Client errors are validation issues or invalid git state that the user + * needs to resolve (e.g. detached HEAD, no upstream, no tracking info). + */ +function isClientError(errorMessage: string): boolean { + return ( + errorMessage.includes('detached HEAD') || + errorMessage.includes('has no upstream branch') || + errorMessage.includes('no tracking information') + ); +} diff --git a/apps/server/src/routes/worktree/routes/rebase.ts b/apps/server/src/routes/worktree/routes/rebase.ts new file mode 100644 index 000000000..7efb2c4b5 --- /dev/null +++ b/apps/server/src/routes/worktree/routes/rebase.ts @@ -0,0 +1,124 @@ +/** + * POST /rebase endpoint - Rebase the current branch onto a target branch + * + * Rebases the current worktree branch onto a specified target branch + * (e.g., origin/main) for a linear history. Detects conflicts and + * returns structured conflict information for AI-assisted resolution. + * + * Git business logic is delegated to rebase-service.ts. + * Events are emitted at key lifecycle points for WebSocket subscribers. + * + * Note: Git repository validation (isGitRepo, hasCommits) is handled by + * the requireValidWorktree middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import path from 'path'; +import { getErrorMessage, logError, isValidBranchName } from '../common.js'; +import type { EventEmitter } from '../../../lib/events.js'; +import { runRebase } from '../../../services/rebase-service.js'; + +export function createRebaseHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, ontoBranch } = req.body as { + worktreePath: string; + /** The branch/ref to rebase onto (e.g., 'origin/main', 'main') */ + ontoBranch: string; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath is required', + }); + return; + } + + if (!ontoBranch) { + res.status(400).json({ + success: false, + error: 'ontoBranch is required', + }); + return; + } + + // Normalize the path to prevent path traversal and ensure consistent paths + const resolvedWorktreePath = path.resolve(worktreePath); + + // Validate the branch name (allow remote refs like origin/main) + if (!isValidBranchName(ontoBranch)) { + res.status(400).json({ + success: false, + error: `Invalid branch name: "${ontoBranch}"`, + }); + return; + } + + // Emit started event + events.emit('rebase:started', { + worktreePath: resolvedWorktreePath, + ontoBranch, + }); + + // Execute the rebase via the service + const result = await runRebase(resolvedWorktreePath, ontoBranch); + + if (result.success) { + // Emit success event + events.emit('rebase:success', { + worktreePath: resolvedWorktreePath, + branch: result.branch, + ontoBranch: result.ontoBranch, + }); + + res.json({ + success: true, + result: { + branch: result.branch, + ontoBranch: result.ontoBranch, + message: result.message, + }, + }); + } else if (result.hasConflicts) { + // Emit conflict event + events.emit('rebase:conflict', { + worktreePath: resolvedWorktreePath, + ontoBranch, + conflictFiles: result.conflictFiles, + aborted: result.aborted, + }); + + res.status(409).json({ + success: false, + error: result.error, + hasConflicts: true, + conflictFiles: result.conflictFiles, + aborted: result.aborted, + }); + } else { + // Emit failure event for non-conflict failures + events.emit('rebase:failure', { + worktreePath: resolvedWorktreePath, + branch: result.branch, + ontoBranch: result.ontoBranch, + error: result.error, + }); + + res.status(500).json({ + success: false, + error: result.error ?? 'Rebase failed', + hasConflicts: false, + }); + } + } catch (error) { + // Emit failure event + events.emit('rebase:failure', { + error: getErrorMessage(error), + }); + + logError(error, 'Rebase failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/stage-files.ts b/apps/server/src/routes/worktree/routes/stage-files.ts new file mode 100644 index 000000000..d04813e7e --- /dev/null +++ b/apps/server/src/routes/worktree/routes/stage-files.ts @@ -0,0 +1,74 @@ +/** + * POST /stage-files endpoint - Stage or unstage files in a worktree + * + * Supports two operations: + * 1. Stage files: `git add ` (adds files to the staging area) + * 2. Unstage files: `git reset HEAD -- ` (removes files from staging area) + * + * Note: Git repository validation (isGitRepo) is handled by + * the requireGitRepoOnly middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import { getErrorMessage, logError } from '../common.js'; +import { stageFiles, StageFilesValidationError } from '../../../services/stage-files-service.js'; + +export function createStageFilesHandler() { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, files, operation } = req.body as { + worktreePath: string; + files: string[]; + operation: 'stage' | 'unstage'; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + if (!Array.isArray(files) || files.length === 0) { + res.status(400).json({ + success: false, + error: 'files array required and must not be empty', + }); + return; + } + + for (const file of files) { + if (typeof file !== 'string' || file.trim() === '') { + res.status(400).json({ + success: false, + error: 'Each element of files must be a non-empty string', + }); + return; + } + } + + if (operation !== 'stage' && operation !== 'unstage') { + res.status(400).json({ + success: false, + error: 'operation must be "stage" or "unstage"', + }); + return; + } + + const result = await stageFiles(worktreePath, files, operation); + + res.json({ + success: true, + result, + }); + } catch (error) { + if (error instanceof StageFilesValidationError) { + res.status(400).json({ success: false, error: error.message }); + return; + } + logError(error, `${(req.body as { operation?: string })?.operation ?? 'stage'} files failed`); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/stash-apply.ts b/apps/server/src/routes/worktree/routes/stash-apply.ts new file mode 100644 index 000000000..f854edd3f --- /dev/null +++ b/apps/server/src/routes/worktree/routes/stash-apply.ts @@ -0,0 +1,78 @@ +/** + * POST /stash-apply endpoint - Apply or pop a stash in a worktree + * + * Applies a specific stash entry to the working directory. + * Can either "apply" (keep stash) or "pop" (remove stash after applying). + * + * All git operations and conflict detection are delegated to StashService. + * + * Note: Git repository validation (isGitRepo) is handled by + * the requireGitRepoOnly middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import type { EventEmitter } from '../../../lib/events.js'; +import { getErrorMessage, logError } from '../common.js'; +import { applyOrPop } from '../../../services/stash-service.js'; + +export function createStashApplyHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, stashIndex, pop } = req.body as { + worktreePath: string; + stashIndex: number; + pop?: boolean; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + if (stashIndex === undefined || stashIndex === null) { + res.status(400).json({ + success: false, + error: 'stashIndex required', + }); + return; + } + + const idx = typeof stashIndex === 'string' ? Number(stashIndex) : stashIndex; + + if (!Number.isInteger(idx) || idx < 0) { + res.status(400).json({ + success: false, + error: 'stashIndex must be a non-negative integer', + }); + return; + } + + // Delegate all stash apply/pop logic to the service + const result = await applyOrPop(worktreePath, idx, { pop }, events); + + if (!result.success) { + // applyOrPop already logs the error internally via logError — no need to double-log here + res.status(500).json({ success: false, error: result.error }); + return; + } + + res.json({ + success: true, + result: { + applied: result.applied, + hasConflicts: result.hasConflicts, + conflictFiles: result.conflictFiles, + operation: result.operation, + stashIndex: result.stashIndex, + message: result.message, + }, + }); + } catch (error) { + logError(error, 'Stash apply failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/stash-drop.ts b/apps/server/src/routes/worktree/routes/stash-drop.ts new file mode 100644 index 000000000..a05985ee8 --- /dev/null +++ b/apps/server/src/routes/worktree/routes/stash-drop.ts @@ -0,0 +1,83 @@ +/** + * POST /stash-drop endpoint - Drop (delete) a stash entry + * + * The handler only validates input, invokes the service, streams lifecycle + * events via the EventEmitter, and sends the final JSON response. + * + * Git business logic is delegated to stash-service.ts. + * Events are emitted at key lifecycle points for WebSocket subscribers. + * + * Note: Git repository validation (isGitRepo) is handled by + * the requireGitRepoOnly middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import type { EventEmitter } from '../../../lib/events.js'; +import { getErrorMessage, logError } from '../common.js'; +import { dropStash } from '../../../services/stash-service.js'; + +export function createStashDropHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, stashIndex } = req.body as { + worktreePath: string; + stashIndex: number; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + if (!Number.isInteger(stashIndex) || stashIndex < 0) { + res.status(400).json({ + success: false, + error: 'stashIndex required', + }); + return; + } + + // Emit start event so the frontend can observe progress + events.emit('stash:start', { + worktreePath, + stashIndex, + stashRef: `stash@{${stashIndex}}`, + operation: 'drop', + }); + + // Delegate all Git work to the service + const result = await dropStash(worktreePath, stashIndex); + + // Emit success event + events.emit('stash:success', { + worktreePath, + stashIndex, + operation: 'drop', + dropped: result.dropped, + }); + + res.json({ + success: true, + result: { + dropped: result.dropped, + stashIndex: result.stashIndex, + message: result.message, + }, + }); + } catch (error) { + // Emit error event so the frontend can react + events.emit('stash:failure', { + worktreePath: req.body?.worktreePath, + stashIndex: req.body?.stashIndex, + operation: 'drop', + error: getErrorMessage(error), + }); + + logError(error, 'Stash drop failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/stash-list.ts b/apps/server/src/routes/worktree/routes/stash-list.ts new file mode 100644 index 000000000..c34b38783 --- /dev/null +++ b/apps/server/src/routes/worktree/routes/stash-list.ts @@ -0,0 +1,76 @@ +/** + * POST /stash-list endpoint - List all stashes in a worktree + * + * The handler only validates input, invokes the service, streams lifecycle + * events via the EventEmitter, and sends the final JSON response. + * + * Git business logic is delegated to stash-service.ts. + * Events are emitted at key lifecycle points for WebSocket subscribers. + * + * Note: Git repository validation (isGitRepo) is handled by + * the requireGitRepoOnly middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import type { EventEmitter } from '../../../lib/events.js'; +import { getErrorMessage, logError } from '../common.js'; +import { listStash } from '../../../services/stash-service.js'; + +export function createStashListHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath } = req.body as { + worktreePath: string; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + // Emit start event so the frontend can observe progress + events.emit('stash:start', { + worktreePath, + operation: 'list', + }); + + // Delegate all Git work to the service + const result = await listStash(worktreePath); + + // Emit progress with stash count + events.emit('stash:progress', { + worktreePath, + operation: 'list', + total: result.total, + }); + + // Emit success event + events.emit('stash:success', { + worktreePath, + operation: 'list', + total: result.total, + }); + + res.json({ + success: true, + result: { + stashes: result.stashes, + total: result.total, + }, + }); + } catch (error) { + // Emit error event so the frontend can react + events.emit('stash:failure', { + worktreePath: req.body?.worktreePath, + operation: 'list', + error: getErrorMessage(error), + }); + + logError(error, 'Stash list failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/stash-push.ts b/apps/server/src/routes/worktree/routes/stash-push.ts new file mode 100644 index 000000000..d2be6701d --- /dev/null +++ b/apps/server/src/routes/worktree/routes/stash-push.ts @@ -0,0 +1,81 @@ +/** + * POST /stash-push endpoint - Stash changes in a worktree + * + * The handler only validates input, invokes the service, streams lifecycle + * events via the EventEmitter, and sends the final JSON response. + * + * Git business logic is delegated to stash-service.ts. + * Events are emitted at key lifecycle points for WebSocket subscribers. + * + * Note: Git repository validation (isGitRepo) is handled by + * the requireGitRepoOnly middleware in index.ts + */ + +import type { Request, Response } from 'express'; +import type { EventEmitter } from '../../../lib/events.js'; +import { getErrorMessage, logError } from '../common.js'; +import { pushStash } from '../../../services/stash-service.js'; + +export function createStashPushHandler(events: EventEmitter) { + return async (req: Request, res: Response): Promise => { + try { + const { worktreePath, message, files } = req.body as { + worktreePath: string; + message?: string; + files?: string[]; + }; + + if (!worktreePath) { + res.status(400).json({ + success: false, + error: 'worktreePath required', + }); + return; + } + + // Emit start event so the frontend can observe progress + events.emit('stash:start', { + worktreePath, + operation: 'push', + }); + + // Delegate all Git work to the service + const result = await pushStash(worktreePath, { message, files }); + + // Emit progress with stash result + events.emit('stash:progress', { + worktreePath, + operation: 'push', + stashed: result.stashed, + branch: result.branch, + }); + + // Emit success event + events.emit('stash:success', { + worktreePath, + operation: 'push', + stashed: result.stashed, + branch: result.branch, + }); + + res.json({ + success: true, + result: { + stashed: result.stashed, + branch: result.branch, + message: result.message, + }, + }); + } catch (error) { + // Emit error event so the frontend can react + events.emit('stash:failure', { + worktreePath: req.body?.worktreePath, + operation: 'push', + error: getErrorMessage(error), + }); + + logError(error, 'Stash push failed'); + res.status(500).json({ success: false, error: getErrorMessage(error) }); + } + }; +} diff --git a/apps/server/src/routes/worktree/routes/switch-branch.ts b/apps/server/src/routes/worktree/routes/switch-branch.ts index 63be752bb..9e873aacc 100644 --- a/apps/server/src/routes/worktree/routes/switch-branch.ts +++ b/apps/server/src/routes/worktree/routes/switch-branch.ts @@ -1,67 +1,29 @@ /** * POST /switch-branch endpoint - Switch to an existing branch * - * Simple branch switching. - * If there are uncommitted changes, the switch will fail and - * the user should commit first. + * Handles branch switching with automatic stash/reapply of local changes. + * If there are uncommitted changes, they are stashed before switching and + * reapplied after. If the stash pop results in merge conflicts, returns + * a special response code so the UI can create a conflict resolution task. + * + * For remote branches (e.g., "origin/feature"), automatically creates a + * local tracking branch and checks it out. + * + * Also fetches the latest remote refs after switching. + * + * Git business logic is delegated to worktree-branch-service.ts. + * Events are emitted at key lifecycle points for WebSocket subscribers. * * Note: Git repository validation (isGitRepo, hasCommits) is handled by * the requireValidWorktree middleware in index.ts */ import type { Request, Response } from 'express'; -import { exec } from 'child_process'; -import { promisify } from 'util'; -import { getErrorMessage, logError } from '../common.js'; - -const execAsync = promisify(exec); - -function isUntrackedLine(line: string): boolean { - return line.startsWith('?? '); -} - -function isExcludedWorktreeLine(line: string): boolean { - return line.includes('.worktrees/') || line.endsWith('.worktrees'); -} - -function isBlockingChangeLine(line: string): boolean { - if (!line.trim()) return false; - if (isExcludedWorktreeLine(line)) return false; - if (isUntrackedLine(line)) return false; - return true; -} - -/** - * Check if there are uncommitted changes in the working directory - * Excludes .worktrees/ directory which is created by automaker - */ -async function hasUncommittedChanges(cwd: string): Promise { - try { - const { stdout } = await execAsync('git status --porcelain', { cwd }); - const lines = stdout.trim().split('\n').filter(isBlockingChangeLine); - return lines.length > 0; - } catch { - return false; - } -} - -/** - * Get a summary of uncommitted changes for user feedback - * Excludes .worktrees/ directory - */ -async function getChangesSummary(cwd: string): Promise { - try { - const { stdout } = await execAsync('git status --short', { cwd }); - const lines = stdout.trim().split('\n').filter(isBlockingChangeLine); - if (lines.length === 0) return ''; - if (lines.length <= 5) return lines.join(', '); - return `${lines.slice(0, 5).join(', ')} and ${lines.length - 5} more files`; - } catch { - return 'unknown changes'; - } -} +import { getErrorMessage, logError, isValidBranchName } from '../common.js'; +import type { EventEmitter } from '../../../lib/events.js'; +import { performSwitchBranch } from '../../../services/worktree-branch-service.js'; -export function createSwitchBranchHandler() { +export function createSwitchBranchHandler(events?: EventEmitter) { return async (req: Request, res: Response): Promise => { try { const { worktreePath, branchName } = req.body as { @@ -85,62 +47,58 @@ export function createSwitchBranchHandler() { return; } - // Get current branch - const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', { - cwd: worktreePath, - }); - const previousBranch = currentBranchOutput.trim(); - - if (previousBranch === branchName) { - res.json({ - success: true, - result: { - previousBranch, - currentBranch: branchName, - message: `Already on branch '${branchName}'`, - }, - }); - return; - } - - // Check if branch exists - try { - await execAsync(`git rev-parse --verify ${branchName}`, { - cwd: worktreePath, - }); - } catch { + // Validate branch name using shared allowlist to prevent Git option injection + if (!isValidBranchName(branchName)) { res.status(400).json({ success: false, - error: `Branch '${branchName}' does not exist`, + error: 'Invalid branch name', }); return; } - // Check for uncommitted changes - if (await hasUncommittedChanges(worktreePath)) { - const summary = await getChangesSummary(worktreePath); - res.status(400).json({ + // Execute the branch switch via the service + const result = await performSwitchBranch(worktreePath, branchName, events); + + // Map service result to HTTP response + if (!result.success) { + // Determine status code based on error type + const statusCode = isBranchNotFoundError(result.error) ? 400 : 500; + res.status(statusCode).json({ success: false, - error: `Cannot switch branches: you have uncommitted changes (${summary}). Please commit your changes first.`, - code: 'UNCOMMITTED_CHANGES', + error: result.error, + ...(result.stashPopConflicts !== undefined && { + stashPopConflicts: result.stashPopConflicts, + }), + ...(result.stashPopConflictMessage && { + stashPopConflictMessage: result.stashPopConflictMessage, + }), }); return; } - // Switch to the target branch - await execAsync(`git checkout "${branchName}"`, { cwd: worktreePath }); - res.json({ success: true, - result: { - previousBranch, - currentBranch: branchName, - message: `Switched to branch '${branchName}'`, - }, + result: result.result, }); } catch (error) { + events?.emit('switch:error', { + error: getErrorMessage(error), + }); + logError(error, 'Switch branch failed'); res.status(500).json({ success: false, error: getErrorMessage(error) }); } }; } + +/** + * Determine whether an error message represents a client error (400) + * vs a server error (500). + * + * Client errors are validation issues like non-existent branches or + * unparseable remote branch names. + */ +function isBranchNotFoundError(error?: string): boolean { + if (!error) return false; + return error.includes('does not exist') || error.includes('Failed to parse remote branch name'); +} diff --git a/apps/server/src/routes/zai/index.ts b/apps/server/src/routes/zai/index.ts new file mode 100644 index 000000000..4e5b874cc --- /dev/null +++ b/apps/server/src/routes/zai/index.ts @@ -0,0 +1,159 @@ +import { Router, Request, Response } from 'express'; +import { ZaiUsageService } from '../../services/zai-usage-service.js'; +import type { SettingsService } from '../../services/settings-service.js'; +import { createLogger } from '@automaker/utils'; + +const logger = createLogger('Zai'); + +export function createZaiRoutes( + usageService: ZaiUsageService, + settingsService: SettingsService +): Router { + const router = Router(); + + // Initialize z.ai API token from credentials on startup + (async () => { + try { + const credentials = await settingsService.getCredentials(); + if (credentials.apiKeys?.zai) { + usageService.setApiToken(credentials.apiKeys.zai); + logger.info('[init] Loaded z.ai API key from credentials'); + } + } catch (error) { + logger.error('[init] Failed to load z.ai API key from credentials:', error); + } + })(); + + // Get current usage (fetches from z.ai API) + router.get('/usage', async (_req: Request, res: Response) => { + try { + // Check if z.ai API is configured + const isAvailable = usageService.isAvailable(); + if (!isAvailable) { + // Use a 200 + error payload so the UI doesn't interpret it as session auth error + res.status(200).json({ + error: 'z.ai API not configured', + message: 'Set Z_AI_API_KEY environment variable to enable z.ai usage tracking', + }); + return; + } + + const usage = await usageService.fetchUsageData(); + res.json(usage); + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error'; + + if (message.includes('not configured') || message.includes('API token')) { + res.status(200).json({ + error: 'API token required', + message: 'Set Z_AI_API_KEY environment variable to enable z.ai usage tracking', + }); + } else if (message.includes('failed') || message.includes('request')) { + res.status(200).json({ + error: 'API request failed', + message: message, + }); + } else { + logger.error('Error fetching z.ai usage:', error); + res.status(500).json({ error: message }); + } + } + }); + + // Configure API token (for settings page) + router.post('/configure', async (req: Request, res: Response) => { + try { + const { apiToken, apiHost } = req.body; + + // Validate apiToken: must be present and a string + if (apiToken === undefined || apiToken === null || typeof apiToken !== 'string') { + res.status(400).json({ + success: false, + error: 'Invalid request: apiToken is required and must be a string', + }); + return; + } + + // Validate apiHost if provided: must be a string and a well-formed URL + if (apiHost !== undefined && apiHost !== null) { + if (typeof apiHost !== 'string') { + res.status(400).json({ + success: false, + error: 'Invalid request: apiHost must be a string', + }); + return; + } + // Validate that apiHost is a well-formed URL + try { + const parsedUrl = new URL(apiHost); + if (parsedUrl.protocol !== 'http:' && parsedUrl.protocol !== 'https:') { + res.status(400).json({ + success: false, + error: 'Invalid request: apiHost must be a valid HTTP or HTTPS URL', + }); + return; + } + } catch { + res.status(400).json({ + success: false, + error: 'Invalid request: apiHost must be a well-formed URL', + }); + return; + } + } + + // Pass only the sanitized values to the service + const sanitizedToken = apiToken.trim(); + const sanitizedHost = typeof apiHost === 'string' ? apiHost.trim() : undefined; + + const result = await usageService.configure( + { apiToken: sanitizedToken, apiHost: sanitizedHost }, + settingsService + ); + res.json(result); + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error'; + logger.error('Error configuring z.ai:', error); + res.status(500).json({ error: message }); + } + }); + + // Verify API key without storing it (for testing in settings) + router.post('/verify', async (req: Request, res: Response) => { + try { + const { apiKey } = req.body; + const result = await usageService.verifyApiKey(apiKey); + res.json(result); + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error'; + logger.error('Error verifying z.ai API key:', error); + res.json({ + success: false, + authenticated: false, + error: `Network error: ${message}`, + }); + } + }); + + // Check if z.ai is available + router.get('/status', async (_req: Request, res: Response) => { + try { + const isAvailable = usageService.isAvailable(); + const hasEnvApiKey = Boolean(process.env.Z_AI_API_KEY); + const hasApiKey = usageService.getApiToken() !== null; + + res.json({ + success: true, + available: isAvailable, + hasApiKey, + hasEnvApiKey, + message: isAvailable ? 'z.ai API is configured' : 'z.ai API token not configured', + }); + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error'; + res.status(500).json({ success: false, error: message }); + } + }); + + return router; +} diff --git a/apps/server/src/services/agent-executor.ts b/apps/server/src/services/agent-executor.ts index 5d0498046..3ed38da2c 100644 --- a/apps/server/src/services/agent-executor.ts +++ b/apps/server/src/services/agent-executor.ts @@ -42,6 +42,27 @@ export class AgentExecutor { private static readonly WRITE_DEBOUNCE_MS = 500; private static readonly STREAM_HEARTBEAT_MS = 15_000; + /** + * Sanitize a provider error value into clean text. + * Coalesces to string, removes ANSI codes, strips leading "Error:" prefix, + * trims, and returns 'Unknown error' when empty. + */ + private static sanitizeProviderError(input: string | { error?: string } | undefined): string { + let raw: string; + if (typeof input === 'string') { + raw = input; + } else if (input && typeof input === 'object' && typeof input.error === 'string') { + raw = input.error; + } else { + raw = ''; + } + const cleaned = raw + .replace(/\x1b\[[0-9;]*m/g, '') + .replace(/^Error:\s*/i, '') + .trim(); + return cleaned || 'Unknown error'; + } + constructor( private eventBus: TypedEventBus, private featureStateManager: FeatureStateManager, @@ -255,7 +276,7 @@ export class AgentExecutor { } } } else if (msg.type === 'error') { - throw new Error(msg.error || 'Unknown error'); + throw new Error(AgentExecutor.sanitizeProviderError(msg.error)); } else if (msg.type === 'result' && msg.subtype === 'success') scheduleWrite(); } await writeToFile(); @@ -390,9 +411,15 @@ export class AgentExecutor { input: b.input, }); } - } else if (msg.type === 'error') - throw new Error(msg.error || `Error during task ${task.id}`); - else if (msg.type === 'result' && msg.subtype === 'success') { + } else if (msg.type === 'error') { + // Clean the error: strip ANSI codes and redundant "Error: " prefix + const cleanedError = + (msg.error || `Error during task ${task.id}`) + .replace(/\x1b\[[0-9;]*m/g, '') + .replace(/^Error:\s*/i, '') + .trim() || `Error during task ${task.id}`; + throw new Error(cleanedError); + } else if (msg.type === 'result' && msg.subtype === 'success') { taskOutput += msg.result || ''; responseText += msg.result || ''; } @@ -444,17 +471,11 @@ export class AgentExecutor { callbacks: AgentExecutorCallbacks ): Promise<{ responseText: string; tasksCompleted: number }> { const { - workDir, featureId, projectPath, - abortController, branchName = null, planningMode = 'skip', provider, - effectiveBareModel, - credentials, - claudeCompatibleProvider, - mcpServers, sdkOptions, } = options; let responseText = initialResponseText, @@ -562,7 +583,14 @@ export class AgentExecutor { content: b.text, }); } - if (msg.type === 'error') throw new Error(msg.error || 'Error during plan revision'); + if (msg.type === 'error') { + const cleanedError = + (msg.error || 'Error during plan revision') + .replace(/\x1b\[[0-9;]*m/g, '') + .replace(/^Error:\s*/i, '') + .trim() || 'Error during plan revision'; + throw new Error(cleanedError); + } if (msg.type === 'result' && msg.subtype === 'success') revText += msg.result || ''; } const mi = revText.indexOf('[SPEC_GENERATED]'); @@ -680,9 +708,15 @@ export class AgentExecutor { input: b.input, }); } - else if (msg.type === 'error') - throw new Error(msg.error || 'Unknown error during implementation'); - else if (msg.type === 'result' && msg.subtype === 'success') responseText += msg.result || ''; + else if (msg.type === 'error') { + const cleanedError = + (msg.error || 'Unknown error during implementation') + .replace(/\x1b\[[0-9;]*m/g, '') + .replace(/^Error:\s*/i, '') + .trim() || 'Unknown error during implementation'; + throw new Error(cleanedError); + } else if (msg.type === 'result' && msg.subtype === 'success') + responseText += msg.result || ''; } return { responseText }; } diff --git a/apps/server/src/services/agent-service.ts b/apps/server/src/services/agent-service.ts index 0ecec44e7..b1fec9419 100644 --- a/apps/server/src/services/agent-service.ts +++ b/apps/server/src/services/agent-service.ts @@ -15,11 +15,9 @@ import { loadContextFiles, createLogger, classifyError, - getUserFriendlyErrorMessage, } from '@automaker/utils'; import { ProviderFactory } from '../providers/provider-factory.js'; import { createChatOptions, validateWorkingDirectory } from '../lib/sdk-options.js'; -import { PathNotAllowedError } from '@automaker/platform'; import type { SettingsService } from './settings-service.js'; import { getAutoLoadClaudeMdSetting, @@ -98,6 +96,20 @@ export class AgentService { await secureFs.mkdir(this.stateDir, { recursive: true }); } + /** + * Detect provider-side session errors (session not found, expired, etc.). + * Used to decide whether to clear a stale sdkSessionId. + */ + private isStaleSessionError(rawErrorText: string): boolean { + const errorLower = rawErrorText.toLowerCase(); + return ( + errorLower.includes('session not found') || + errorLower.includes('session expired') || + errorLower.includes('invalid session') || + errorLower.includes('no such session') + ); + } + /** * Start or resume a conversation */ @@ -108,32 +120,26 @@ export class AgentService { sessionId: string; workingDirectory?: string; }) { - if (!this.sessions.has(sessionId)) { - const messages = await this.loadSession(sessionId); - const metadata = await this.loadMetadata(); - const sessionMetadata = metadata[sessionId]; - - // Determine the effective working directory + // ensureSession handles loading from disk if not in memory. + // For startConversation, we always want to create a session even if + // metadata doesn't exist yet (new session), so we fall back to creating one. + let session = await this.ensureSession(sessionId, workingDirectory); + if (!session) { + // Session doesn't exist on disk either — create a fresh in-memory session. const effectiveWorkingDirectory = workingDirectory || process.cwd(); const resolvedWorkingDirectory = path.resolve(effectiveWorkingDirectory); - - // Validate that the working directory is allowed using centralized validation validateWorkingDirectory(resolvedWorkingDirectory); - // Load persisted queue - const promptQueue = await this.loadQueueState(sessionId); - - this.sessions.set(sessionId, { - messages, + session = { + messages: [], isRunning: false, abortController: null, workingDirectory: resolvedWorkingDirectory, - sdkSessionId: sessionMetadata?.sdkSessionId, // Load persisted SDK session ID - promptQueue, - }); + promptQueue: [], + }; + this.sessions.set(sessionId, session); } - const session = this.sessions.get(sessionId)!; return { success: true, messages: session.messages, @@ -141,6 +147,98 @@ export class AgentService { }; } + /** + * Ensure a session is loaded into memory. + * + * Sessions may exist on disk (in metadata and session files) but not be + * present in the in-memory Map — for example after a server restart, or + * when a client calls sendMessage before explicitly calling startConversation. + * + * This helper transparently loads the session from disk when it is missing + * from memory, eliminating "session not found" errors for sessions that + * were previously created but not yet initialized in memory. + * + * If both metadata and session files are missing, the session truly doesn't + * exist. A detailed diagnostic log is emitted so developers can track down + * how the invalid session ID was generated. + * + * @returns The in-memory Session object, or null if the session doesn't exist at all + */ + private async ensureSession( + sessionId: string, + workingDirectory?: string + ): Promise { + const existing = this.sessions.get(sessionId); + if (existing) { + return existing; + } + + // Try to load from disk — the session may have been created earlier + // (e.g. via createSession) but never initialized in memory. + let metadata: Record; + let messages: Message[]; + try { + [metadata, messages] = await Promise.all([this.loadMetadata(), this.loadSession(sessionId)]); + } catch (error) { + // Disk read failure should not be treated as "session not found" — + // it's a transient I/O problem. Log and return null so callers can + // surface an appropriate error message. + this.logger.error( + `Failed to load session ${sessionId} from disk (I/O error — NOT a missing session):`, + error + ); + return null; + } + + const sessionMetadata = metadata[sessionId]; + + // If there's no metadata AND no persisted messages, the session truly doesn't exist. + // Log diagnostic info to help track down how we ended up with an invalid session ID. + if (!sessionMetadata && messages.length === 0) { + this.logger.warn( + `Session "${sessionId}" not found: no metadata and no persisted messages. ` + + `This can happen when a session ID references a deleted/expired session, ` + + `or when the server restarted and the session was never persisted to disk. ` + + `Available session IDs in metadata: [${Object.keys(metadata).slice(0, 10).join(', ')}${Object.keys(metadata).length > 10 ? '...' : ''}]` + ); + return null; + } + + const effectiveWorkingDirectory = + workingDirectory || sessionMetadata?.workingDirectory || process.cwd(); + const resolvedWorkingDirectory = path.resolve(effectiveWorkingDirectory); + + // Validate that the working directory is allowed using centralized validation + try { + validateWorkingDirectory(resolvedWorkingDirectory); + } catch (validationError) { + this.logger.warn( + `Session "${sessionId}": working directory "${resolvedWorkingDirectory}" is not allowed — ` + + `returning null so callers treat it as a missing session. Error: ${(validationError as Error).message}` + ); + return null; + } + + // Load persisted queue + const promptQueue = await this.loadQueueState(sessionId); + + const session: Session = { + messages, + isRunning: false, + abortController: null, + workingDirectory: resolvedWorkingDirectory, + sdkSessionId: sessionMetadata?.sdkSessionId, + promptQueue, + }; + + this.sessions.set(sessionId, session); + this.logger.info( + `Auto-initialized session ${sessionId} from disk ` + + `(${messages.length} messages, sdkSessionId: ${sessionMetadata?.sdkSessionId ? 'present' : 'none'})` + ); + return session; + } + /** * Send a message to the agent and stream responses */ @@ -161,10 +259,18 @@ export class AgentService { thinkingLevel?: ThinkingLevel; reasoningEffort?: ReasoningEffort; }) { - const session = this.sessions.get(sessionId); + const session = await this.ensureSession(sessionId, workingDirectory); if (!session) { - this.logger.error('ERROR: Session not found:', sessionId); - throw new Error(`Session ${sessionId} not found`); + this.logger.error( + `Session not found: ${sessionId}. ` + + `The session may have been deleted, never created, or lost after a server restart. ` + + `In-memory sessions: ${this.sessions.size}, requested ID: ${sessionId}` + ); + throw new Error( + `Session ${sessionId} not found. ` + + `The session may have been deleted or expired. ` + + `Please create a new session and try again.` + ); } if (session.isRunning) { @@ -327,7 +433,7 @@ export class AgentService { // When using a custom provider (GLM, MiniMax), use resolved Claude model for SDK config // (thinking level budgets, allowedTools) but we MUST pass the provider's model ID - // (e.g. "GLM-4.7") to the API - not "claude-sonnet-4-20250514" which causes "model not found" + // (e.g. "GLM-4.7") to the API - not "claude-sonnet-4-6" which causes "model not found" const modelForSdk = providerResolvedModel || model; const sessionModelForSdk = providerResolvedModel ? undefined : session.model; @@ -441,8 +547,13 @@ export class AgentService { const toolUses: Array<{ name: string; input: unknown }> = []; for await (const msg of stream) { - // Capture SDK session ID from any message and persist it - if (msg.session_id && !session.sdkSessionId) { + // Capture SDK session ID from any message and persist it. + // Update when: + // - No session ID set yet (first message in a new session) + // - The provider returned a *different* session ID (e.g., after a + // "Session not found" recovery where the provider started a fresh + // session — the stale ID must be replaced with the new one) + if (msg.session_id && msg.session_id !== session.sdkSessionId) { session.sdkSessionId = msg.session_id; // Persist the SDK session ID to ensure conversation continuity across server restarts await this.updateSession(sessionId, { sdkSessionId: msg.session_id }); @@ -505,12 +616,36 @@ export class AgentService { // streamed error messages instead of throwing. Handle these here so the // Agent Runner UX matches the Claude/Cursor behavior without changing // their provider implementations. - const rawErrorText = + + // Clean error text: strip ANSI escape codes and the redundant "Error: " + // prefix that CLI providers (especially OpenCode) add to stderr output. + // The OpenCode provider strips these in normalizeEvent/executeQuery, but + // we also strip here as a defense-in-depth measure. + // + // Without stripping the "Error: " prefix, the wrapping at line ~647 + // (`content: \`Error: ${enhancedText}\``) produces double-prefixed text: + // "Error: Error: Session not found" — confusing for the user. + const rawMsgError = (typeof msg.error === 'string' && msg.error.trim()) || 'Unexpected error from provider during agent execution.'; + let rawErrorText = rawMsgError.replace(/\x1b\[[0-9;]*m/g, '').trim() || rawMsgError; + // Remove the CLI's "Error: " prefix to prevent double-wrapping + rawErrorText = rawErrorText.replace(/^Error:\s*/i, '').trim() || rawErrorText; const errorInfo = classifyError(new Error(rawErrorText)); + // Detect provider-side session errors and proactively clear the stale + // sdkSessionId so the next attempt starts a fresh provider session. + // This handles providers that don't have built-in session recovery + // (unlike OpenCode which auto-retries without the session flag). + if (session.sdkSessionId && this.isStaleSessionError(rawErrorText)) { + this.logger.info( + `Clearing stale sdkSessionId for session ${sessionId} after provider session error` + ); + session.sdkSessionId = undefined; + await this.clearSdkSessionId(sessionId); + } + // Keep the provider-supplied text intact (Codex already includes helpful tips), // only add a small rate-limit hint when we can detect it. const enhancedText = errorInfo.isRateLimit @@ -571,13 +706,30 @@ export class AgentService { this.logger.error('Error:', error); + // Strip ANSI escape codes and the "Error: " prefix from thrown error + // messages so the UI receives clean text without double-prefixing. + let rawThrownMsg = ((error as Error).message || '').replace(/\x1b\[[0-9;]*m/g, '').trim(); + rawThrownMsg = rawThrownMsg.replace(/^Error:\s*/i, '').trim() || rawThrownMsg; + const thrownErrorMsg = rawThrownMsg.toLowerCase(); + + // Check if the thrown error is a provider-side session error. + // Clear the stale sdkSessionId so the next retry starts fresh. + if (session.sdkSessionId && this.isStaleSessionError(rawThrownMsg)) { + this.logger.info( + `Clearing stale sdkSessionId for session ${sessionId} after thrown session error` + ); + session.sdkSessionId = undefined; + await this.clearSdkSessionId(sessionId); + } + session.isRunning = false; session.abortController = null; + const cleanErrorMsg = rawThrownMsg || (error as Error).message; const errorMessage: Message = { id: this.generateId(), role: 'assistant', - content: `Error: ${(error as Error).message}`, + content: `Error: ${cleanErrorMsg}`, timestamp: new Date().toISOString(), isError: true, }; @@ -587,7 +739,7 @@ export class AgentService { this.emitAgentEvent(sessionId, { type: 'error', - error: (error as Error).message, + error: cleanErrorMsg, message: errorMessage, }); @@ -598,8 +750,8 @@ export class AgentService { /** * Get conversation history */ - getHistory(sessionId: string) { - const session = this.sessions.get(sessionId); + async getHistory(sessionId: string) { + const session = await this.ensureSession(sessionId); if (!session) { return { success: false, error: 'Session not found' }; } @@ -615,7 +767,7 @@ export class AgentService { * Stop current agent execution */ async stopExecution(sessionId: string) { - const session = this.sessions.get(sessionId); + const session = await this.ensureSession(sessionId); if (!session) { return { success: false, error: 'Session not found' }; } @@ -637,9 +789,16 @@ export class AgentService { if (session) { session.messages = []; session.isRunning = false; + session.sdkSessionId = undefined; // Clear stale provider session ID to prevent "Session not found" errors await this.saveSession(sessionId, []); } + // Clear the sdkSessionId from persisted metadata so it doesn't get + // reloaded by ensureSession() after a server restart. + // This prevents "Session not found" errors when the provider-side session + // no longer exists (e.g., OpenCode CLI sessions expire on disk). + await this.clearSdkSessionId(sessionId); + return { success: true }; } @@ -796,6 +955,23 @@ export class AgentService { return true; } + /** + * Clear the sdkSessionId from persisted metadata. + * + * This removes the provider-side session ID so that the next message + * starts a fresh provider session instead of trying to resume a stale one. + * Prevents "Session not found" errors from CLI providers like OpenCode + * when the provider-side session has been deleted or expired. + */ + async clearSdkSessionId(sessionId: string): Promise { + const metadata = await this.loadMetadata(); + if (metadata[sessionId] && metadata[sessionId].sdkSessionId) { + delete metadata[sessionId].sdkSessionId; + metadata[sessionId].updatedAt = new Date().toISOString(); + await this.saveMetadata(metadata); + } + } + // Queue management methods /** @@ -810,7 +986,7 @@ export class AgentService { thinkingLevel?: ThinkingLevel; } ): Promise<{ success: boolean; queuedPrompt?: QueuedPrompt; error?: string }> { - const session = this.sessions.get(sessionId); + const session = await this.ensureSession(sessionId); if (!session) { return { success: false, error: 'Session not found' }; } @@ -839,8 +1015,10 @@ export class AgentService { /** * Get the current queue for a session */ - getQueue(sessionId: string): { success: boolean; queue?: QueuedPrompt[]; error?: string } { - const session = this.sessions.get(sessionId); + async getQueue( + sessionId: string + ): Promise<{ success: boolean; queue?: QueuedPrompt[]; error?: string }> { + const session = await this.ensureSession(sessionId); if (!session) { return { success: false, error: 'Session not found' }; } @@ -854,7 +1032,7 @@ export class AgentService { sessionId: string, promptId: string ): Promise<{ success: boolean; error?: string }> { - const session = this.sessions.get(sessionId); + const session = await this.ensureSession(sessionId); if (!session) { return { success: false, error: 'Session not found' }; } @@ -879,7 +1057,7 @@ export class AgentService { * Clear all prompts from the queue */ async clearQueue(sessionId: string): Promise<{ success: boolean; error?: string }> { - const session = this.sessions.get(sessionId); + const session = await this.ensureSession(sessionId); if (!session) { return { success: false, error: 'Session not found' }; } @@ -962,10 +1140,24 @@ export class AgentService { } } + /** + * Emit an event to the agent stream (private, used internally). + */ private emitAgentEvent(sessionId: string, data: Record): void { this.events.emit('agent:stream', { sessionId, ...data }); } + /** + * Emit an error event for a session. + * + * Public method so that route handlers can surface errors to the UI + * even when sendMessage() throws before it can emit its own error event + * (e.g., when the session is not found and no in-memory session exists). + */ + emitSessionError(sessionId: string, error: string): void { + this.events.emit('agent:stream', { sessionId, type: 'error', error }); + } + private async getSystemPrompt(): Promise { // Load from settings (no caching - allows hot reload of custom prompts) const prompts = await getPromptCustomization(this.settingsService, '[AgentService]'); diff --git a/apps/server/src/services/auto-loop-coordinator.ts b/apps/server/src/services/auto-loop-coordinator.ts index ddc666d50..29c365585 100644 --- a/apps/server/src/services/auto-loop-coordinator.ts +++ b/apps/server/src/services/auto-loop-coordinator.ts @@ -4,6 +4,7 @@ import type { Feature } from '@automaker/types'; import { createLogger, classifyError } from '@automaker/utils'; +import { areDependenciesSatisfied } from '@automaker/dependency-resolver'; import type { TypedEventBus } from './typed-event-bus.js'; import type { ConcurrencyManager } from './concurrency-manager.js'; import type { SettingsService } from './settings-service.js'; @@ -64,6 +65,7 @@ export type ClearExecutionStateFn = ( ) => Promise; export type ResetStuckFeaturesFn = (projectPath: string) => Promise; export type IsFeatureFinishedFn = (feature: Feature) => boolean; +export type LoadAllFeaturesFn = (projectPath: string) => Promise; export class AutoLoopCoordinator { private autoLoopsByProject = new Map(); @@ -78,7 +80,8 @@ export class AutoLoopCoordinator { private clearExecutionStateFn: ClearExecutionStateFn, private resetStuckFeaturesFn: ResetStuckFeaturesFn, private isFeatureFinishedFn: IsFeatureFinishedFn, - private isFeatureRunningFn: (featureId: string) => boolean + private isFeatureRunningFn: (featureId: string) => boolean, + private loadAllFeaturesFn?: LoadAllFeaturesFn ) {} /** @@ -158,10 +161,7 @@ export class AutoLoopCoordinator { const projectState = this.autoLoopsByProject.get(worktreeKey); if (!projectState) return; const { projectPath, branchName } = projectState.config; - let iterationCount = 0; - while (projectState.isRunning && !projectState.abortController.signal.aborted) { - iterationCount++; try { const runningCount = await this.getRunningCountForWorktree(projectPath, branchName); if (runningCount >= projectState.config.maxConcurrency) { @@ -181,9 +181,34 @@ export class AutoLoopCoordinator { await this.sleep(10000, projectState.abortController.signal); continue; } - const nextFeature = pendingFeatures.find( - (f) => !this.isFeatureRunningFn(f.id) && !this.isFeatureFinishedFn(f) + + // Load all features for dependency checking (if callback provided) + const allFeatures = this.loadAllFeaturesFn + ? await this.loadAllFeaturesFn(projectPath) + : undefined; + + // Filter to eligible features: not running, not finished, and dependencies satisfied. + // When loadAllFeaturesFn is not provided, allFeatures is undefined and we bypass + // dependency checks (returning true) to avoid false negatives caused by completed + // features being absent from pendingFeatures. + const eligibleFeatures = pendingFeatures.filter( + (f) => + !this.isFeatureRunningFn(f.id) && + !this.isFeatureFinishedFn(f) && + (this.loadAllFeaturesFn ? areDependenciesSatisfied(f, allFeatures!) : true) ); + + // Sort eligible features by priority (lower number = higher priority, default 2) + eligibleFeatures.sort((a, b) => (a.priority ?? 2) - (b.priority ?? 2)); + + const nextFeature = eligibleFeatures[0] ?? null; + + if (nextFeature) { + logger.info( + `Auto-loop selected feature "${nextFeature.title || nextFeature.id}" ` + + `(priority=${nextFeature.priority ?? 2}) from ${eligibleFeatures.length} eligible features` + ); + } if (nextFeature) { projectState.hasEmittedIdleEvent = false; this.executeFeatureFn( @@ -390,6 +415,10 @@ export class AutoLoopCoordinator { const projectId = settings.projects?.find((p) => p.path === projectPath)?.id; const autoModeByWorktree = settings.autoModeByWorktree; if (projectId && autoModeByWorktree && typeof autoModeByWorktree === 'object') { + // Normalize both null and 'main' to '__main__' to match the same + // canonicalization used by getWorktreeAutoLoopKey, ensuring that + // lookups for the primary branch always use the '__main__' sentinel + // regardless of whether the caller passed null or the string 'main'. const normalizedBranch = branchName === null || branchName === 'main' ? '__main__' : branchName; const worktreeId = `${projectId}::${normalizedBranch}`; diff --git a/apps/server/src/services/auto-mode/facade.ts b/apps/server/src/services/auto-mode/facade.ts index e31543b41..2d8e9c9ea 100644 --- a/apps/server/src/services/auto-mode/facade.ts +++ b/apps/server/src/services/auto-mode/facade.ts @@ -15,12 +15,14 @@ import path from 'path'; import { exec } from 'child_process'; import { promisify } from 'util'; import type { Feature, PlanningMode, ThinkingLevel } from '@automaker/types'; -import { DEFAULT_MAX_CONCURRENCY, stripProviderPrefix } from '@automaker/types'; +import { DEFAULT_MAX_CONCURRENCY, DEFAULT_MODELS, stripProviderPrefix } from '@automaker/types'; +import { resolveModelString } from '@automaker/model-resolver'; import { createLogger, loadContextFiles, classifyError } from '@automaker/utils'; -import { getFeatureDir, spawnProcess } from '@automaker/platform'; +import { getFeatureDir } from '@automaker/platform'; import * as secureFs from '../../lib/secure-fs.js'; import { validateWorkingDirectory } from '../../lib/sdk-options.js'; import { getPromptCustomization, getProviderByModelId } from '../../lib/settings-helpers.js'; +import { execGitCommand } from '@automaker/git-utils'; import { TypedEventBus } from '../typed-event-bus.js'; import { ConcurrencyManager } from '../concurrency-manager.js'; import { WorktreeResolver } from '../worktree-resolver.js'; @@ -49,24 +51,6 @@ import type { const execAsync = promisify(exec); const logger = createLogger('AutoModeServiceFacade'); -/** - * Execute git command with array arguments to prevent command injection. - */ -async function execGitCommand(args: string[], cwd: string): Promise { - const result = await spawnProcess({ - command: 'git', - args, - cwd, - }); - - if (result.exitCode === 0) { - return result.stdout; - } else { - const errorMessage = result.stderr || `Git command failed with code ${result.exitCode}`; - throw new Error(errorMessage); - } -} - /** * AutoModeServiceFacade provides a clean interface for auto-mode functionality. * @@ -198,23 +182,18 @@ export class AutoModeServiceFacade { return facadeInstance; }; - // PipelineOrchestrator - runAgentFn is a stub; routes use AutoModeService directly - const pipelineOrchestrator = new PipelineOrchestrator( - eventBus, - featureStateManager, - agentExecutor, - testRunnerService, - worktreeResolver, - concurrencyManager, - settingsService, - // Callbacks - (pPath, featureId, status) => - featureStateManager.updateFeatureStatus(pPath, featureId, status), - loadContextFiles, - buildFeaturePrompt, - (pPath, featureId, useWorktrees, _isAutoMode, _model, opts) => - getFacade().executeFeature(featureId, useWorktrees, false, undefined, opts), - // runAgentFn - delegates to AgentExecutor + /** + * Shared agent-run helper used by both PipelineOrchestrator and ExecutionService. + * + * Resolves the model string, looks up the custom provider/credentials via + * getProviderByModelId, then delegates to agentExecutor.execute with the + * full payload. The opts parameter uses an index-signature union so it + * accepts both the typed ExecutionService opts object and the looser + * Record used by PipelineOrchestrator without requiring + * type casts at the call sites. + */ + const createRunAgentFn = + () => async ( workDir: string, featureId: string, @@ -223,9 +202,18 @@ export class AutoModeServiceFacade { pPath: string, imagePaths?: string[], model?: string, - opts?: Record - ) => { - const resolvedModel = model || 'claude-sonnet-4-20250514'; + opts?: { + planningMode?: PlanningMode; + requirePlanApproval?: boolean; + previousContent?: string; + systemPrompt?: string; + autoLoadClaudeMd?: boolean; + thinkingLevel?: ThinkingLevel; + branchName?: string | null; + [key: string]: unknown; + } + ): Promise => { + const resolvedModel = resolveModelString(model, DEFAULT_MODELS.claude); const provider = ProviderFactory.getProviderForModel(resolvedModel); const effectiveBareModel = stripProviderPrefix(resolvedModel); @@ -234,7 +222,7 @@ export class AutoModeServiceFacade { | import('@automaker/types').ClaudeCompatibleProvider | undefined; let credentials: import('@automaker/types').Credentials | undefined; - if (resolvedModel && settingsService) { + if (settingsService) { const providerResult = await getProviderByModelId( resolvedModel, settingsService, @@ -275,7 +263,7 @@ export class AutoModeServiceFacade { featureStateManager.saveFeatureSummary(projPath, fId, summary), buildTaskPrompt: (task, allTasks, taskIndex, _planContent, template, feedback) => { let taskPrompt = template - .replace(/\{\{taskName\}\}/g, task.description) + .replace(/\{\{taskName\}\}/g, task.description || `Task ${task.id}`) .replace(/\{\{taskIndex\}\}/g, String(taskIndex + 1)) .replace(/\{\{totalTasks\}\}/g, String(allTasks.length)) .replace(/\{\{taskDescription\}\}/g, task.description || `Task ${task.id}`); @@ -286,7 +274,25 @@ export class AutoModeServiceFacade { }, } ); - } + }; + + // PipelineOrchestrator - runAgentFn delegates to AgentExecutor via shared helper + const pipelineOrchestrator = new PipelineOrchestrator( + eventBus, + featureStateManager, + agentExecutor, + testRunnerService, + worktreeResolver, + concurrencyManager, + settingsService, + // Callbacks + (pPath, featureId, status) => + featureStateManager.updateFeatureStatus(pPath, featureId, status), + loadContextFiles, + buildFeaturePrompt, + (pPath, featureId, useWorktrees, _isAutoMode, _model, opts) => + getFacade().executeFeature(featureId, useWorktrees, false, undefined, opts), + createRunAgentFn() ); // AutoLoopCoordinator - ALWAYS create new with proper execution callbacks @@ -324,95 +330,17 @@ export class AutoModeServiceFacade { feature.status === 'completed' || feature.status === 'verified' || feature.status === 'waiting_approval', - (featureId) => concurrencyManager.isRunning(featureId) + (featureId) => concurrencyManager.isRunning(featureId), + async (pPath) => featureLoader.getAll(pPath) ); - // ExecutionService - runAgentFn calls AgentExecutor.execute + // ExecutionService - runAgentFn delegates to AgentExecutor via shared helper const executionService = new ExecutionService( eventBus, concurrencyManager, worktreeResolver, settingsService, - // runAgentFn - delegates to AgentExecutor - async ( - workDir: string, - featureId: string, - prompt: string, - abortController: AbortController, - pPath: string, - imagePaths?: string[], - model?: string, - opts?: { - projectPath?: string; - planningMode?: PlanningMode; - requirePlanApproval?: boolean; - systemPrompt?: string; - autoLoadClaudeMd?: boolean; - thinkingLevel?: ThinkingLevel; - branchName?: string | null; - } - ) => { - const resolvedModel = model || 'claude-sonnet-4-20250514'; - const provider = ProviderFactory.getProviderForModel(resolvedModel); - const effectiveBareModel = stripProviderPrefix(resolvedModel); - - // Resolve custom provider (GLM, MiniMax, etc.) for baseUrl and credentials - let claudeCompatibleProvider: - | import('@automaker/types').ClaudeCompatibleProvider - | undefined; - let credentials: import('@automaker/types').Credentials | undefined; - if (resolvedModel && settingsService) { - const providerResult = await getProviderByModelId( - resolvedModel, - settingsService, - '[AutoModeFacade]' - ); - if (providerResult.provider) { - claudeCompatibleProvider = providerResult.provider; - credentials = providerResult.credentials; - } - } - - await agentExecutor.execute( - { - workDir, - featureId, - prompt, - projectPath: pPath, - abortController, - imagePaths, - model: resolvedModel, - planningMode: opts?.planningMode, - requirePlanApproval: opts?.requirePlanApproval, - systemPrompt: opts?.systemPrompt, - autoLoadClaudeMd: opts?.autoLoadClaudeMd, - thinkingLevel: opts?.thinkingLevel, - branchName: opts?.branchName, - provider, - effectiveBareModel, - credentials, - claudeCompatibleProvider, - }, - { - waitForApproval: (fId, projPath) => planApprovalService.waitForApproval(fId, projPath), - saveFeatureSummary: (projPath, fId, summary) => - featureStateManager.saveFeatureSummary(projPath, fId, summary), - updateFeatureSummary: (projPath, fId, summary) => - featureStateManager.saveFeatureSummary(projPath, fId, summary), - buildTaskPrompt: (task, allTasks, taskIndex, planContent, template, feedback) => { - let taskPrompt = template - .replace(/\{\{taskName\}\}/g, task.description) - .replace(/\{\{taskIndex\}\}/g, String(taskIndex + 1)) - .replace(/\{\{totalTasks\}\}/g, String(allTasks.length)) - .replace(/\{\{taskDescription\}\}/g, task.description || task.description); - if (feedback) { - taskPrompt = taskPrompt.replace(/\{\{userFeedback\}\}/g, feedback); - } - return taskPrompt; - }, - } - ); - }, + createRunAgentFn(), (context) => pipelineOrchestrator.executePipeline(context), (pPath, featureId, status) => featureStateManager.updateFeatureStatus(pPath, featureId, status), @@ -591,12 +519,22 @@ export class AutoModeServiceFacade { useWorktrees = false, _calledInternally = false ): Promise { - return this.recoveryService.resumeFeature( - this.projectPath, - featureId, - useWorktrees, - _calledInternally - ); + // Note: ExecutionService.executeFeature catches its own errors internally and + // does NOT re-throw them (it emits auto_mode_error and returns normally). + // Therefore, errors that reach this catch block are pre-execution failures + // (e.g., feature not found, context read error) that ExecutionService never + // handled — so calling handleFacadeError here does NOT produce duplicate events. + try { + return await this.recoveryService.resumeFeature( + this.projectPath, + featureId, + useWorktrees, + _calledInternally + ); + } catch (error) { + this.handleFacadeError(error, 'resumeFeature', featureId); + throw error; + } } /** diff --git a/apps/server/src/services/auto-mode/global-service.ts b/apps/server/src/services/auto-mode/global-service.ts index 90576f8c2..459562ebc 100644 --- a/apps/server/src/services/auto-mode/global-service.ts +++ b/apps/server/src/services/auto-mode/global-service.ts @@ -10,7 +10,6 @@ */ import path from 'path'; -import type { Feature } from '@automaker/types'; import { createLogger } from '@automaker/utils'; import type { EventEmitter } from '../../lib/events.js'; import { TypedEventBus } from '../typed-event-bus.js'; diff --git a/apps/server/src/services/branch-commit-log-service.ts b/apps/server/src/services/branch-commit-log-service.ts new file mode 100644 index 000000000..9666f98cf --- /dev/null +++ b/apps/server/src/services/branch-commit-log-service.ts @@ -0,0 +1,172 @@ +/** + * Service for fetching branch commit log data. + * + * Extracts the heavy Git command execution and parsing logic from the + * branch-commit-log route handler so the handler only validates input, + * invokes this service, streams lifecycle events, and sends the response. + */ + +import { execGitCommand } from '../lib/git.js'; + +// ============================================================================ +// Types +// ============================================================================ + +export interface BranchCommit { + hash: string; + shortHash: string; + author: string; + authorEmail: string; + date: string; + subject: string; + body: string; + files: string[]; +} + +export interface BranchCommitLogResult { + branch: string; + commits: BranchCommit[]; + total: number; +} + +// ============================================================================ +// Service +// ============================================================================ + +/** + * Fetch the commit log for a specific branch (or HEAD). + * + * Runs a single `git log --name-only` invocation (plus `git rev-parse` + * when branchName is omitted) inside the given worktree path and + * returns a structured result. + * + * @param worktreePath - Absolute path to the worktree / repository + * @param branchName - Branch to query (omit or pass undefined for HEAD) + * @param limit - Maximum number of commits to return (clamped 1-100) + */ +export async function getBranchCommitLog( + worktreePath: string, + branchName: string | undefined, + limit: number +): Promise { + // Clamp limit to a reasonable range + const parsedLimit = Number(limit); + const commitLimit = Math.min(Math.max(1, Number.isFinite(parsedLimit) ? parsedLimit : 20), 100); + + // Use the specified branch or default to HEAD + const targetRef = branchName || 'HEAD'; + + // Fetch commit metadata AND file lists in a single git call. + // Uses custom record separators so we can parse both metadata and + // --name-only output from one invocation, eliminating the previous + // N+1 pattern that spawned a separate `git diff-tree` per commit. + // + // -m causes merge commits to be diffed against each parent so all + // files touched by the merge are listed (without -m, --name-only + // produces no file output for merge commits because they have 2+ parents). + // This means merge commits appear multiple times in the output (once per + // parent), so we deduplicate by hash below and merge their file lists. + // We over-fetch (2× the limit) to compensate for -m duplicating merge + // commit entries, then trim the result to the requested limit. + // Use ASCII control characters as record separators – these cannot appear in + // git commit messages, so these delimiters are safe regardless of commit + // body content. %x00 and %x01 in git's format string emit literal NUL / + // SOH bytes respectively. + // + // COMMIT_SEP (\x00) – marks the start of each commit record. + // META_END (\x01) – separates commit metadata from the --name-only file list. + // + // Full per-commit layout emitted by git: + // \x00\n\n\n...\n\n\x01 + const COMMIT_SEP = '\x00'; + const META_END = '\x01'; + const fetchLimit = commitLimit * 2; + + const logOutput = await execGitCommand( + [ + 'log', + targetRef, + `--max-count=${fetchLimit}`, + '-m', + '--name-only', + `--format=%x00%n%H%n%h%n%an%n%ae%n%aI%n%s%n%b%x01`, + ], + worktreePath + ); + + // Split output into per-commit blocks and drop the empty first chunk + // (the output starts with a NUL commit separator). + const commitBlocks = logOutput.split(COMMIT_SEP).filter((block) => block.trim()); + + // Use a Map to deduplicate merge commit entries (which appear once per + // parent when -m is used) while preserving insertion order. + const commitMap = new Map(); + + for (const block of commitBlocks) { + const metaEndIdx = block.indexOf(META_END); + if (metaEndIdx === -1) continue; // malformed block, skip + + // --- Parse metadata (everything before the META_END delimiter) --- + const metaRaw = block.substring(0, metaEndIdx); + const metaLines = metaRaw.split('\n'); + + // The first line may be empty (newline right after COMMIT_SEP), skip it + const nonEmptyStart = metaLines.findIndex((l) => l.trim() !== ''); + if (nonEmptyStart === -1) continue; + + const fields = metaLines.slice(nonEmptyStart); + if (fields.length < 6) continue; // need at least hash..subject + + const hash = fields[0].trim(); + if (!hash) continue; // defensive: skip if hash is empty + const shortHash = fields[1]?.trim() ?? ''; + const author = fields[2]?.trim() ?? ''; + const authorEmail = fields[3]?.trim() ?? ''; + const date = fields[4]?.trim() ?? ''; + const subject = fields[5]?.trim() ?? ''; + const body = fields.slice(6).join('\n').trim(); + + // --- Parse file list (everything after the META_END delimiter) --- + const filesRaw = block.substring(metaEndIdx + META_END.length); + const blockFiles = filesRaw + .trim() + .split('\n') + .filter((f) => f.trim()); + + // Merge file lists for duplicate entries (merge commits with -m) + const existing = commitMap.get(hash); + if (existing) { + // Add new files to the existing entry's file set + const fileSet = new Set(existing.files); + for (const f of blockFiles) fileSet.add(f); + existing.files = [...fileSet]; + } else { + commitMap.set(hash, { + hash, + shortHash, + author, + authorEmail, + date, + subject, + body, + files: [...new Set(blockFiles)], + }); + } + } + + // Trim to the requested limit (we over-fetched to account for -m duplicates) + const commits = [...commitMap.values()].slice(0, commitLimit); + + // If branchName wasn't specified, get current branch for display + let displayBranch = branchName; + if (!displayBranch) { + const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath); + displayBranch = branchOutput.trim(); + } + + return { + branch: displayBranch, + commits, + total: commits.length, + }; +} diff --git a/apps/server/src/services/branch-utils.ts b/apps/server/src/services/branch-utils.ts new file mode 100644 index 000000000..9a618da06 --- /dev/null +++ b/apps/server/src/services/branch-utils.ts @@ -0,0 +1,170 @@ +/** + * branch-utils - Shared git branch helper utilities + * + * Provides common git operations used by both checkout-branch-service and + * worktree-branch-service. Extracted to avoid duplication and ensure + * consistent behaviour across branch-related services. + */ + +import { createLogger, getErrorMessage } from '@automaker/utils'; +import { execGitCommand, execGitCommandWithLockRetry } from '../lib/git.js'; + +const logger = createLogger('BranchUtils'); + +// ============================================================================ +// Types +// ============================================================================ + +export interface HasAnyChangesOptions { + /** + * When true, lines that refer to worktree-internal paths (containing + * ".worktrees/" or ending with ".worktrees") are excluded from the count. + * Use this in contexts where worktree directory entries should not be + * considered as real working-tree changes (e.g. worktree-branch-service). + */ + excludeWorktreePaths?: boolean; + /** + * When true (default), untracked files (lines starting with "??") are + * included in the change count. When false, untracked files are ignored so + * that hasAnyChanges() is consistent with stashChanges() called without + * --include-untracked. + */ + includeUntracked?: boolean; +} + +// ============================================================================ +// Helpers +// ============================================================================ + +/** + * Returns true when a `git status --porcelain` output line refers to a + * worktree-internal path that should be ignored when deciding whether there + * are "real" local changes. + */ +function isExcludedWorktreeLine(line: string): boolean { + return line.includes('.worktrees/') || line.endsWith('.worktrees'); +} + +// ============================================================================ +// Exported Utilities +// ============================================================================ + +/** + * Check if there are any changes that should be stashed. + * + * @param cwd - Working directory of the git repository / worktree + * @param options - Optional flags controlling which lines are counted + * @param options.excludeWorktreePaths - When true, lines matching worktree + * internal paths are excluded so they are not mistaken for real changes + * @param options.includeUntracked - When false, untracked files (lines + * starting with "??") are excluded so this is consistent with a + * stashChanges() call that does not pass --include-untracked. + * Defaults to true. + */ +export async function hasAnyChanges(cwd: string, options?: HasAnyChangesOptions): Promise { + try { + const includeUntracked = options?.includeUntracked ?? true; + const stdout = await execGitCommand(['status', '--porcelain'], cwd); + const lines = stdout + .trim() + .split('\n') + .filter((line) => { + if (!line.trim()) return false; + if (options?.excludeWorktreePaths && isExcludedWorktreeLine(line)) return false; + if (!includeUntracked && line.startsWith('??')) return false; + return true; + }); + return lines.length > 0; + } catch (err) { + logger.error('hasAnyChanges: execGitCommand failed — returning false', { + cwd, + error: getErrorMessage(err), + }); + return false; + } +} + +/** + * Stash all local changes (including untracked files if requested). + * Returns true if a stash was created, false if there was nothing to stash. + * Throws on unexpected errors so callers abort rather than proceeding silently. + * + * @param cwd - Working directory of the git repository / worktree + * @param message - Stash message + * @param includeUntracked - When true, passes `--include-untracked` to git stash + */ +export async function stashChanges( + cwd: string, + message: string, + includeUntracked: boolean = true +): Promise { + try { + const args = ['stash', 'push']; + if (includeUntracked) { + args.push('--include-untracked'); + } + args.push('-m', message); + + const stdout = await execGitCommandWithLockRetry(args, cwd); + + // git exits 0 but prints a benign message when there is nothing to stash + const stdoutLower = stdout.toLowerCase(); + if ( + stdoutLower.includes('no local changes to save') || + stdoutLower.includes('nothing to stash') + ) { + logger.debug('stashChanges: nothing to stash', { cwd, message, stdout }); + return false; + } + + return true; + } catch (error) { + const errorMsg = getErrorMessage(error); + + // Unexpected error – log full details and re-throw so the caller aborts + // rather than proceeding with an un-stashed working tree + logger.error('stashChanges: unexpected error during stash', { + cwd, + message, + error: errorMsg, + }); + throw new Error(`Failed to stash changes in ${cwd}: ${errorMsg}`); + } +} + +/** + * Pop the most recent stash entry. + * Returns an object indicating success and whether there were conflicts. + * + * @param cwd - Working directory of the git repository / worktree + */ +export async function popStash( + cwd: string +): Promise<{ success: boolean; hasConflicts: boolean; error?: string }> { + try { + await execGitCommandWithLockRetry(['stash', 'pop'], cwd); + // If execGitCommandWithLockRetry succeeds (zero exit code), there are no conflicts + return { success: true, hasConflicts: false }; + } catch (error) { + const errorMsg = getErrorMessage(error); + if (errorMsg.includes('CONFLICT') || errorMsg.includes('Merge conflict')) { + return { success: false, hasConflicts: true, error: errorMsg }; + } + return { success: false, hasConflicts: false, error: errorMsg }; + } +} + +/** + * Check if a local branch already exists. + * + * @param cwd - Working directory of the git repository / worktree + * @param branchName - The branch name to look up (without refs/heads/ prefix) + */ +export async function localBranchExists(cwd: string, branchName: string): Promise { + try { + await execGitCommand(['rev-parse', '--verify', `refs/heads/${branchName}`], cwd); + return true; + } catch { + return false; + } +} diff --git a/apps/server/src/services/checkout-branch-service.ts b/apps/server/src/services/checkout-branch-service.ts new file mode 100644 index 000000000..35fa8f21c --- /dev/null +++ b/apps/server/src/services/checkout-branch-service.ts @@ -0,0 +1,343 @@ +/** + * CheckoutBranchService - Create and checkout a new branch with stash handling + * + * Handles new branch creation with automatic stash/reapply of local changes. + * If there are uncommitted changes and the caller requests stashing, they are + * stashed before creating the branch and reapplied after. If the stash pop + * results in merge conflicts, returns a special response so the UI can create + * a conflict resolution task. + * + * Follows the same pattern as worktree-branch-service.ts (performSwitchBranch). + * + * The workflow: + * 1. Validate inputs (branch name, base branch) + * 2. Get current branch name + * 3. Check if target branch already exists + * 4. Optionally stash local changes + * 5. Create and checkout the new branch + * 6. Reapply stashed changes (detect conflicts) + * 7. Handle error recovery (restore stash if checkout fails) + */ + +import { getErrorMessage } from '@automaker/utils'; +import { execGitCommand } from '../lib/git.js'; +import type { EventEmitter } from '../lib/events.js'; +import { hasAnyChanges, stashChanges, popStash, localBranchExists } from './branch-utils.js'; + +// ============================================================================ +// Types +// ============================================================================ + +export interface CheckoutBranchOptions { + /** When true, stash local changes before checkout and reapply after */ + stashChanges?: boolean; + /** When true, include untracked files in the stash */ + includeUntracked?: boolean; +} + +export interface CheckoutBranchResult { + success: boolean; + error?: string; + result?: { + previousBranch: string; + newBranch: string; + message: string; + hasConflicts?: boolean; + stashedChanges?: boolean; + }; + /** Set when checkout fails and stash pop produced conflicts during recovery */ + stashPopConflicts?: boolean; + /** Human-readable message when stash pop conflicts occur during error recovery */ + stashPopConflictMessage?: string; +} + +// ============================================================================ +// Main Service Function +// ============================================================================ + +/** + * Create and checkout a new branch, optionally stashing and restoring local changes. + * + * @param worktreePath - Path to the git worktree + * @param branchName - Name of the new branch to create + * @param baseBranch - Optional base branch to create from (defaults to current HEAD) + * @param options - Stash handling options + * @param events - Optional event emitter for lifecycle events + * @returns CheckoutBranchResult with detailed status information + */ +export async function performCheckoutBranch( + worktreePath: string, + branchName: string, + baseBranch?: string, + options?: CheckoutBranchOptions, + events?: EventEmitter +): Promise { + const shouldStash = options?.stashChanges ?? false; + const includeUntracked = options?.includeUntracked ?? true; + + // Emit start event + events?.emit('switch:start', { worktreePath, branchName, operation: 'checkout' }); + + // 1. Get current branch + let previousBranch: string; + try { + const currentBranchOutput = await execGitCommand( + ['rev-parse', '--abbrev-ref', 'HEAD'], + worktreePath + ); + previousBranch = currentBranchOutput.trim(); + } catch (branchError) { + const branchErrorMsg = getErrorMessage(branchError); + events?.emit('switch:error', { + worktreePath, + branchName, + error: branchErrorMsg, + }); + return { + success: false, + error: `Failed to determine current branch: ${branchErrorMsg}`, + }; + } + + // 2. Check if branch already exists + if (await localBranchExists(worktreePath, branchName)) { + events?.emit('switch:error', { + worktreePath, + branchName, + error: `Branch '${branchName}' already exists`, + }); + return { + success: false, + error: `Branch '${branchName}' already exists`, + }; + } + + // 3. Validate base branch if provided + if (baseBranch) { + try { + await execGitCommand(['rev-parse', '--verify', baseBranch], worktreePath); + } catch { + events?.emit('switch:error', { + worktreePath, + branchName, + error: `Base branch '${baseBranch}' does not exist`, + }); + return { + success: false, + error: `Base branch '${baseBranch}' does not exist`, + }; + } + } + + // 4. Stash local changes if requested and there are changes + let didStash = false; + + if (shouldStash) { + const hadChanges = await hasAnyChanges(worktreePath, { includeUntracked }); + if (hadChanges) { + events?.emit('switch:stash', { + worktreePath, + previousBranch, + targetBranch: branchName, + action: 'push', + }); + + const stashMessage = `Auto-stash before switching to ${branchName}`; + try { + didStash = await stashChanges(worktreePath, stashMessage, includeUntracked); + } catch (stashError) { + const stashErrorMsg = getErrorMessage(stashError); + events?.emit('switch:error', { + worktreePath, + branchName, + error: `Failed to stash local changes: ${stashErrorMsg}`, + }); + return { + success: false, + error: `Failed to stash local changes before creating branch: ${stashErrorMsg}`, + }; + } + } + } + + try { + // 5. Create and checkout the new branch + events?.emit('switch:checkout', { + worktreePath, + targetBranch: branchName, + isRemote: false, + previousBranch, + }); + + const checkoutArgs = ['checkout', '-b', branchName]; + if (baseBranch) { + checkoutArgs.push(baseBranch); + } + await execGitCommand(checkoutArgs, worktreePath); + + // 6. Reapply stashed changes if we stashed earlier + let hasConflicts = false; + let conflictMessage = ''; + let stashReapplied = false; + + if (didStash) { + events?.emit('switch:pop', { + worktreePath, + targetBranch: branchName, + action: 'pop', + }); + + // Isolate the pop in its own try/catch so a thrown exception does not + // propagate to the outer catch block, which would attempt a second pop. + try { + const popResult = await popStash(worktreePath); + // Mark didStash false so the outer error-recovery path cannot pop again. + didStash = false; + hasConflicts = popResult.hasConflicts; + if (popResult.hasConflicts) { + conflictMessage = `Created branch '${branchName}' but merge conflicts occurred when reapplying your local changes. Please resolve the conflicts.`; + } else if (!popResult.success) { + conflictMessage = `Created branch '${branchName}' but failed to reapply stashed changes: ${popResult.error}. Your changes are still in the stash.`; + } else { + stashReapplied = true; + } + } catch (popError) { + // Pop threw an unexpected exception. Record the error and clear didStash + // so the outer catch does not attempt a second pop. + didStash = false; + conflictMessage = `Created branch '${branchName}' but an error occurred while reapplying stashed changes: ${getErrorMessage(popError)}. Your changes may still be in the stash.`; + events?.emit('switch:pop', { + worktreePath, + targetBranch: branchName, + action: 'pop', + error: getErrorMessage(popError), + }); + } + } + + if (hasConflicts) { + events?.emit('switch:done', { + worktreePath, + previousBranch, + currentBranch: branchName, + hasConflicts: true, + }); + return { + success: true, + result: { + previousBranch, + newBranch: branchName, + message: conflictMessage, + hasConflicts: true, + stashedChanges: true, + }, + }; + } else if (didStash && !stashReapplied) { + // Stash pop failed for a non-conflict reason — stash is still present + events?.emit('switch:done', { + worktreePath, + previousBranch, + currentBranch: branchName, + stashPopFailed: true, + }); + return { + success: true, + result: { + previousBranch, + newBranch: branchName, + message: conflictMessage, + hasConflicts: false, + stashedChanges: true, + }, + }; + } else { + const stashNote = stashReapplied ? ' (local changes stashed and reapplied)' : ''; + events?.emit('switch:done', { + worktreePath, + previousBranch, + currentBranch: branchName, + stashReapplied, + }); + return { + success: true, + result: { + previousBranch, + newBranch: branchName, + message: `Created and checked out branch '${branchName}'${stashNote}`, + hasConflicts: false, + stashedChanges: stashReapplied, + }, + }; + } + } catch (checkoutError) { + // 7. If checkout failed and we stashed, try to restore the stash + if (didStash) { + try { + const popResult = await popStash(worktreePath); + if (popResult.hasConflicts) { + const checkoutErrorMsg = getErrorMessage(checkoutError); + events?.emit('switch:error', { + worktreePath, + branchName, + error: checkoutErrorMsg, + stashPopConflicts: true, + }); + return { + success: false, + error: checkoutErrorMsg, + stashPopConflicts: true, + stashPopConflictMessage: + 'Stash pop resulted in conflicts: your stashed changes were partially reapplied ' + + 'but produced merge conflicts. Please resolve the conflicts before retrying.', + }; + } else if (!popResult.success) { + const checkoutErrorMsg = getErrorMessage(checkoutError); + const combinedMessage = + `${checkoutErrorMsg}. Additionally, restoring your stashed changes failed: ` + + `${popResult.error ?? 'unknown error'} — your changes are still saved in the stash.`; + events?.emit('switch:error', { + worktreePath, + branchName, + error: combinedMessage, + }); + return { + success: false, + error: combinedMessage, + stashPopConflicts: false, + }; + } + // popResult.success === true: stash was cleanly restored + } catch (popError) { + // popStash itself threw — build a failure result rather than letting + // the exception propagate and produce an unhandled rejection. + const checkoutErrorMsg = getErrorMessage(checkoutError); + const popErrorMsg = getErrorMessage(popError); + const combinedMessage = + `${checkoutErrorMsg}. Additionally, an error occurred while attempting to restore ` + + `your stashed changes: ${popErrorMsg} — your changes may still be saved in the stash.`; + events?.emit('switch:error', { + worktreePath, + branchName, + error: combinedMessage, + }); + return { + success: false, + error: combinedMessage, + stashPopConflicts: false, + stashPopConflictMessage: combinedMessage, + }; + } + } + const checkoutErrorMsg = getErrorMessage(checkoutError); + events?.emit('switch:error', { + worktreePath, + branchName, + error: checkoutErrorMsg, + }); + return { + success: false, + error: checkoutErrorMsg, + stashPopConflicts: false, + }; + } +} diff --git a/apps/server/src/services/cherry-pick-service.ts b/apps/server/src/services/cherry-pick-service.ts new file mode 100644 index 000000000..be5dbac2d --- /dev/null +++ b/apps/server/src/services/cherry-pick-service.ts @@ -0,0 +1,179 @@ +/** + * CherryPickService - Cherry-pick git operations without HTTP + * + * Extracted from worktree cherry-pick route to encapsulate all git + * cherry-pick business logic in a single service. Follows the same + * pattern as merge-service.ts. + */ + +import { createLogger } from '@automaker/utils'; +import { execGitCommand, getCurrentBranch } from '../lib/git.js'; +import { type EventEmitter } from '../lib/events.js'; + +const logger = createLogger('CherryPickService'); + +// ============================================================================ +// Types +// ============================================================================ + +export interface CherryPickOptions { + noCommit?: boolean; +} + +export interface CherryPickResult { + success: boolean; + error?: string; + hasConflicts?: boolean; + aborted?: boolean; + cherryPicked?: boolean; + commitHashes?: string[]; + branch?: string; + message?: string; +} + +// ============================================================================ +// Service Functions +// ============================================================================ + +/** + * Verify that each commit hash exists in the repository. + * + * @param worktreePath - Path to the git worktree + * @param commitHashes - Array of commit hashes to verify + * @param emitter - Optional event emitter for lifecycle events + * @returns The first invalid commit hash, or null if all are valid + */ +export async function verifyCommits( + worktreePath: string, + commitHashes: string[], + emitter?: EventEmitter +): Promise { + for (const hash of commitHashes) { + try { + await execGitCommand(['rev-parse', '--verify', hash], worktreePath); + } catch { + emitter?.emit('cherry-pick:verify-failed', { worktreePath, hash }); + return hash; + } + } + return null; +} + +/** + * Run the cherry-pick operation on the given worktree. + * + * @param worktreePath - Path to the git worktree + * @param commitHashes - Array of commit hashes to cherry-pick (in order) + * @param options - Cherry-pick options (e.g., noCommit) + * @param emitter - Optional event emitter for lifecycle events + * @returns CherryPickResult with success/failure information + */ +export async function runCherryPick( + worktreePath: string, + commitHashes: string[], + options?: CherryPickOptions, + emitter?: EventEmitter +): Promise { + const args = ['cherry-pick']; + if (options?.noCommit) { + args.push('--no-commit'); + } + args.push(...commitHashes); + + emitter?.emit('cherry-pick:started', { worktreePath, commitHashes }); + + try { + await execGitCommand(args, worktreePath); + + const branch = await getCurrentBranch(worktreePath); + + if (options?.noCommit) { + const result: CherryPickResult = { + success: true, + cherryPicked: false, + commitHashes, + branch, + message: `Staged changes from ${commitHashes.length} commit(s); no commit created due to --no-commit`, + }; + emitter?.emit('cherry-pick:success', { worktreePath, commitHashes, branch }); + return result; + } + + const result: CherryPickResult = { + success: true, + cherryPicked: true, + commitHashes, + branch, + message: `Successfully cherry-picked ${commitHashes.length} commit(s)`, + }; + emitter?.emit('cherry-pick:success', { worktreePath, commitHashes, branch }); + return result; + } catch (cherryPickError: unknown) { + // Check if this is a cherry-pick conflict + const err = cherryPickError as { stdout?: string; stderr?: string; message?: string }; + const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`; + const hasConflicts = + output.includes('CONFLICT') || + output.includes('cherry-pick failed') || + output.includes('could not apply'); + + if (hasConflicts) { + // Abort the cherry-pick to leave the repo in a clean state + const aborted = await abortCherryPick(worktreePath, emitter); + + if (!aborted) { + logger.error( + 'Failed to abort cherry-pick after conflict; repository may be in a dirty state', + { worktreePath } + ); + } + + emitter?.emit('cherry-pick:conflict', { + worktreePath, + commitHashes, + aborted, + stdout: err.stdout, + stderr: err.stderr, + }); + + return { + success: false, + error: aborted + ? 'Cherry-pick aborted due to conflicts; no changes were applied.' + : 'Cherry-pick failed due to conflicts and the abort also failed; repository may be in a dirty state.', + hasConflicts: true, + aborted, + }; + } + + // Non-conflict error - propagate + throw cherryPickError; + } +} + +/** + * Abort an in-progress cherry-pick operation. + * + * @param worktreePath - Path to the git worktree + * @param emitter - Optional event emitter for lifecycle events + * @returns true if abort succeeded, false if it failed (logged as warning) + */ +export async function abortCherryPick( + worktreePath: string, + emitter?: EventEmitter +): Promise { + try { + await execGitCommand(['cherry-pick', '--abort'], worktreePath); + emitter?.emit('cherry-pick:abort', { worktreePath, aborted: true }); + return true; + } catch (err: unknown) { + const error = err as { message?: string }; + logger.warn('Failed to abort cherry-pick after conflict'); + emitter?.emit('cherry-pick:abort', { + worktreePath, + aborted: false, + error: error.message ?? 'Unknown error during cherry-pick abort', + }); + return false; + } +} diff --git a/apps/server/src/services/claude-usage-service.ts b/apps/server/src/services/claude-usage-service.ts index 40cffd7f7..ffb076319 100644 --- a/apps/server/src/services/claude-usage-service.ts +++ b/apps/server/src/services/claude-usage-service.ts @@ -295,7 +295,6 @@ export class ClaudeUsageService { } // Don't fail if we have data - return it instead // Check cleaned output since raw output has ANSI codes between words - // eslint-disable-next-line no-control-regex const cleanedForCheck = output .replace(/\x1B\[(\d+)C/g, (_m: string, n: string) => ' '.repeat(parseInt(n, 10))) .replace(/\x1B\[[0-9;?]*[A-Za-z@]/g, ''); @@ -332,7 +331,6 @@ export class ClaudeUsageService { // Convert cursor forward (ESC[nC) to spaces first to preserve word boundaries, // then strip remaining ANSI sequences. Without this, the Claude CLI TUI output // like "Current week (all models)" becomes "Currentweek(allmodels)". - // eslint-disable-next-line no-control-regex const cleanOutput = output .replace(/\x1B\[(\d+)C/g, (_match: string, n: string) => ' '.repeat(parseInt(n, 10))) .replace(/\x1B\[[0-9;?]*[A-Za-z@]/g, ''); @@ -492,7 +490,6 @@ export class ClaudeUsageService { // First, convert cursor movement sequences to whitespace to preserve word boundaries. // The Claude CLI TUI uses ESC[nC (cursor forward) instead of actual spaces between words. // Without this, "Current week (all models)" becomes "Currentweek(allmodels)" after stripping. - // eslint-disable-next-line no-control-regex let clean = text // Cursor forward (CSI n C): replace with n spaces to preserve word separation .replace(/\x1B\[(\d+)C/g, (_match, n) => ' '.repeat(parseInt(n, 10))) diff --git a/apps/server/src/services/commit-log-service.ts b/apps/server/src/services/commit-log-service.ts new file mode 100644 index 000000000..14cb21d0b --- /dev/null +++ b/apps/server/src/services/commit-log-service.ts @@ -0,0 +1,161 @@ +/** + * Service for fetching commit log data from a worktree. + * + * Extracts the heavy Git command execution and parsing logic from the + * commit-log route handler so the handler only validates input, + * invokes this service, streams lifecycle events, and sends the response. + * + * Follows the same approach as branch-commit-log-service: a single + * `git log --name-only` call with custom separators to fetch both + * commit metadata and file lists, avoiding N+1 git invocations. + */ + +import { execGitCommand } from '../lib/git.js'; + +// ============================================================================ +// Types +// ============================================================================ + +export interface CommitLogEntry { + hash: string; + shortHash: string; + author: string; + authorEmail: string; + date: string; + subject: string; + body: string; + files: string[]; +} + +export interface CommitLogResult { + branch: string; + commits: CommitLogEntry[]; + total: number; +} + +// ============================================================================ +// Service +// ============================================================================ + +/** + * Fetch the commit log for a worktree (HEAD). + * + * Runs a single `git log --name-only` invocation plus `git rev-parse` + * inside the given worktree path and returns a structured result. + * + * @param worktreePath - Absolute path to the worktree / repository + * @param limit - Maximum number of commits to return (clamped 1-100) + */ +export async function getCommitLog(worktreePath: string, limit: number): Promise { + // Clamp limit to a reasonable range + const parsedLimit = Number(limit); + const commitLimit = Math.min(Math.max(1, Number.isFinite(parsedLimit) ? parsedLimit : 20), 100); + + // Use custom separators to parse both metadata and file lists from + // a single git log invocation (same approach as branch-commit-log-service). + // + // -m causes merge commits to be diffed against each parent so all + // files touched by the merge are listed (without -m, --name-only + // produces no file output for merge commits because they have 2+ parents). + // This means merge commits appear multiple times in the output (once per + // parent), so we deduplicate by hash below and merge their file lists. + // We over-fetch (2x the limit) to compensate for -m duplicating merge + // commit entries, then trim the result to the requested limit. + // Use ASCII control characters as record separators – these cannot appear in + // git commit messages, so these delimiters are safe regardless of commit + // body content. %x00 and %x01 in git's format string emit literal NUL / + // SOH bytes respectively. + // + // COMMIT_SEP (\x00) – marks the start of each commit record. + // META_END (\x01) – separates commit metadata from the --name-only file list. + // + // Full per-commit layout emitted by git: + // \x00\n\n\n...\n\n\x01 + const COMMIT_SEP = '\x00'; + const META_END = '\x01'; + const fetchLimit = commitLimit * 2; + + const logOutput = await execGitCommand( + [ + 'log', + `--max-count=${fetchLimit}`, + '-m', + '--name-only', + `--format=%x00%n%H%n%h%n%an%n%ae%n%aI%n%s%n%b%x01`, + ], + worktreePath + ); + + // Split output into per-commit blocks and drop the empty first chunk + // (the output starts with a NUL commit separator). + const commitBlocks = logOutput.split(COMMIT_SEP).filter((block) => block.trim()); + + // Use a Map to deduplicate merge commit entries (which appear once per + // parent when -m is used) while preserving insertion order. + const commitMap = new Map(); + + for (const block of commitBlocks) { + const metaEndIdx = block.indexOf(META_END); + if (metaEndIdx === -1) continue; // malformed block, skip + + // --- Parse metadata (everything before the META_END delimiter) --- + const metaRaw = block.substring(0, metaEndIdx); + const metaLines = metaRaw.split('\n'); + + // The first line may be empty (newline right after COMMIT_SEP), skip it + const nonEmptyStart = metaLines.findIndex((l) => l.trim() !== ''); + if (nonEmptyStart === -1) continue; + + const fields = metaLines.slice(nonEmptyStart); + if (fields.length < 6) continue; // need at least hash..subject + + const hash = fields[0].trim(); + if (!hash) continue; // defensive: skip if hash is empty + const shortHash = fields[1]?.trim() ?? ''; + const author = fields[2]?.trim() ?? ''; + const authorEmail = fields[3]?.trim() ?? ''; + const date = fields[4]?.trim() ?? ''; + const subject = fields[5]?.trim() ?? ''; + const body = fields.slice(6).join('\n').trim(); + + // --- Parse file list (everything after the META_END delimiter) --- + const filesRaw = block.substring(metaEndIdx + META_END.length); + const blockFiles = filesRaw + .trim() + .split('\n') + .filter((f) => f.trim()); + + // Merge file lists for duplicate entries (merge commits with -m) + const existing = commitMap.get(hash); + if (existing) { + // Add new files to the existing entry's file set + const fileSet = new Set(existing.files); + for (const f of blockFiles) fileSet.add(f); + existing.files = [...fileSet]; + } else { + commitMap.set(hash, { + hash, + shortHash, + author, + authorEmail, + date, + subject, + body, + files: [...new Set(blockFiles)], + }); + } + } + + // Trim to the requested limit (we over-fetched to account for -m duplicates) + const commits = [...commitMap.values()].slice(0, commitLimit); + + // Get current branch name + const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath); + const branch = branchOutput.trim(); + + return { + branch, + commits, + total: commits.length, + }; +} diff --git a/apps/server/src/services/dev-server-service.ts b/apps/server/src/services/dev-server-service.ts index 76cf31748..13281dc1b 100644 --- a/apps/server/src/services/dev-server-service.ts +++ b/apps/server/src/services/dev-server-service.ts @@ -246,7 +246,7 @@ class DevServerService { // No process found on port, which is fine } } - } catch (error) { + } catch { // Ignore errors - port might not have any process logger.debug(`No process to kill on port ${port}`); } diff --git a/apps/server/src/services/event-history-service.ts b/apps/server/src/services/event-history-service.ts index b983af09b..a70917251 100644 --- a/apps/server/src/services/event-history-service.ts +++ b/apps/server/src/services/event-history-service.ts @@ -13,12 +13,7 @@ import { createLogger } from '@automaker/utils'; import * as secureFs from '../lib/secure-fs.js'; -import { - getEventHistoryDir, - getEventHistoryIndexPath, - getEventPath, - ensureEventHistoryDir, -} from '@automaker/platform'; +import { getEventHistoryIndexPath, getEventPath, ensureEventHistoryDir } from '@automaker/platform'; import type { StoredEvent, StoredEventIndex, diff --git a/apps/server/src/services/execution-service.ts b/apps/server/src/services/execution-service.ts index f7a51ace9..3ebce443d 100644 --- a/apps/server/src/services/execution-service.ts +++ b/apps/server/src/services/execution-service.ts @@ -20,7 +20,6 @@ import type { TypedEventBus } from './typed-event-bus.js'; import type { ConcurrencyManager, RunningFeature } from './concurrency-manager.js'; import type { WorktreeResolver } from './worktree-resolver.js'; import type { SettingsService } from './settings-service.js'; -import type { PipelineContext } from './pipeline-orchestrator.js'; import { pipelineService } from './pipeline-service.js'; // Re-export callback types from execution-types.ts for backward compatibility diff --git a/apps/server/src/services/feature-export-service.ts b/apps/server/src/services/feature-export-service.ts index a58b65276..bd741dc22 100644 --- a/apps/server/src/services/feature-export-service.ts +++ b/apps/server/src/services/feature-export-service.ts @@ -205,7 +205,6 @@ export class FeatureExportService { importData: FeatureImport ): Promise { const warnings: string[] = []; - const errors: string[] = []; try { // Extract feature from data (handle both raw Feature and wrapped FeatureExport) diff --git a/apps/server/src/services/feature-loader.ts b/apps/server/src/services/feature-loader.ts index b40a85f07..941194b71 100644 --- a/apps/server/src/services/feature-loader.ts +++ b/apps/server/src/services/feature-loader.ts @@ -195,9 +195,10 @@ export class FeatureLoader { } // Read all feature directories + // secureFs.readdir returns Dirent[] but typed as generic; cast to access isDirectory() const entries = (await secureFs.readdir(featuresDir, { withFileTypes: true, - })) as any[]; + })) as import('fs').Dirent[]; const featureDirs = entries.filter((entry) => entry.isDirectory()); // Load all features concurrently with automatic recovery from backups diff --git a/apps/server/src/services/gemini-usage-service.ts b/apps/server/src/services/gemini-usage-service.ts new file mode 100644 index 000000000..fba8bda34 --- /dev/null +++ b/apps/server/src/services/gemini-usage-service.ts @@ -0,0 +1,817 @@ +/** + * Gemini Usage Service + * + * Service for tracking Gemini CLI usage and quota. + * Uses the internal Google Cloud quota API (same as CodexBar). + * See: https://github.com/steipete/CodexBar/blob/main/docs/gemini.md + * + * OAuth credentials are extracted from the Gemini CLI installation, + * not hardcoded, to ensure compatibility with CLI updates. + */ + +import { createLogger } from '@automaker/utils'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; +import { execFileSync } from 'child_process'; + +const logger = createLogger('GeminiUsage'); + +// Quota API endpoint (internal Google Cloud API) +const QUOTA_API_URL = 'https://cloudcode-pa.googleapis.com/v1internal:retrieveUserQuota'; + +// Code Assist endpoint for getting project ID and tier info +const CODE_ASSIST_URL = 'https://cloudcode-pa.googleapis.com/v1internal:loadCodeAssist'; + +// Google OAuth endpoints for token refresh +const GOOGLE_TOKEN_URL = 'https://oauth2.googleapis.com/token'; + +/** Default timeout for fetch requests in milliseconds */ +const FETCH_TIMEOUT_MS = 10_000; + +/** TTL for cached credentials in milliseconds (5 minutes) */ +const CREDENTIALS_CACHE_TTL_MS = 5 * 60 * 1000; + +export interface GeminiQuotaBucket { + /** Model ID this quota applies to */ + modelId: string; + /** Remaining fraction (0-1) */ + remainingFraction: number; + /** ISO-8601 reset time */ + resetTime: string; +} + +/** Simplified quota info for a model tier (Flash or Pro) */ +export interface GeminiTierQuota { + /** Used percentage (0-100) */ + usedPercent: number; + /** Remaining percentage (0-100) */ + remainingPercent: number; + /** Reset time as human-readable string */ + resetText?: string; + /** ISO-8601 reset time */ + resetTime?: string; +} + +export interface GeminiUsageData { + /** Whether authenticated via CLI */ + authenticated: boolean; + /** Authentication method */ + authMethod: 'cli_login' | 'api_key' | 'none'; + /** Usage percentage (100 - remainingFraction * 100) - overall most constrained */ + usedPercent: number; + /** Remaining percentage - overall most constrained */ + remainingPercent: number; + /** Reset time as human-readable string */ + resetText?: string; + /** ISO-8601 reset time */ + resetTime?: string; + /** Model ID with lowest remaining quota */ + constrainedModel?: string; + /** Flash tier quota (aggregated from all flash models) */ + flashQuota?: GeminiTierQuota; + /** Pro tier quota (aggregated from all pro models) */ + proQuota?: GeminiTierQuota; + /** Raw quota buckets for detailed view */ + quotaBuckets?: GeminiQuotaBucket[]; + /** When this data was last fetched */ + lastUpdated: string; + /** Optional error message */ + error?: string; +} + +interface OAuthCredentials { + access_token?: string; + id_token?: string; + refresh_token?: string; + token_type?: string; + expiry_date?: number; + client_id?: string; + client_secret?: string; +} + +interface OAuthClientCredentials { + clientId: string; + clientSecret: string; +} + +interface QuotaResponse { + // The actual API returns 'buckets', not 'quotaBuckets' + buckets?: Array<{ + modelId?: string; + remainingFraction?: number; + resetTime?: string; + tokenType?: string; + }>; + // Legacy field name (in case API changes) + quotaBuckets?: Array<{ + modelId?: string; + remainingFraction?: number; + resetTime?: string; + tokenType?: string; + }>; +} + +/** + * Gemini Usage Service + * + * Provides real usage/quota data for Gemini CLI users. + * Extracts OAuth credentials from the Gemini CLI installation. + */ +export class GeminiUsageService { + private cachedCredentials: OAuthCredentials | null = null; + private cachedCredentialsAt: number | null = null; + private cachedClientCredentials: OAuthClientCredentials | null = null; + private credentialsPath: string; + /** The actual path from which credentials were loaded (for write-back) */ + private loadedCredentialsPath: string | null = null; + + constructor() { + // Default credentials path for Gemini CLI + this.credentialsPath = path.join(os.homedir(), '.gemini', 'oauth_creds.json'); + } + + /** + * Check if Gemini CLI is authenticated + */ + async isAvailable(): Promise { + const creds = await this.loadCredentials(); + return Boolean(creds?.access_token || creds?.refresh_token); + } + + /** + * Fetch quota/usage data from Google Cloud API + */ + async fetchUsageData(): Promise { + logger.info('[fetchUsageData] Starting...'); + + const creds = await this.loadCredentials(); + + if (!creds || (!creds.access_token && !creds.refresh_token)) { + logger.info('[fetchUsageData] No credentials found'); + return { + authenticated: false, + authMethod: 'none', + usedPercent: 0, + remainingPercent: 100, + lastUpdated: new Date().toISOString(), + error: 'Not authenticated. Run "gemini auth login" to authenticate.', + }; + } + + try { + // Get a valid access token (refresh if needed) + const accessToken = await this.getValidAccessToken(creds); + + if (!accessToken) { + return { + authenticated: false, + authMethod: 'none', + usedPercent: 0, + remainingPercent: 100, + lastUpdated: new Date().toISOString(), + error: 'Failed to obtain access token. Try running "gemini auth login" again.', + }; + } + + // First, get the project ID from loadCodeAssist endpoint + // This is required to get accurate quota data + let projectId: string | undefined; + try { + const codeAssistResponse = await fetch(CODE_ASSIST_URL, { + method: 'POST', + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({}), + signal: AbortSignal.timeout(FETCH_TIMEOUT_MS), + }); + + if (codeAssistResponse.ok) { + const codeAssistData = (await codeAssistResponse.json()) as { + cloudaicompanionProject?: string; + currentTier?: { id?: string; name?: string }; + }; + projectId = codeAssistData.cloudaicompanionProject; + logger.debug('[fetchUsageData] Got project ID:', projectId); + } + } catch (e) { + logger.debug('[fetchUsageData] Failed to get project ID:', e); + } + + // Fetch quota from Google Cloud API + // Pass project ID to get accurate quota (without it, returns default 100%) + const response = await fetch(QUOTA_API_URL, { + method: 'POST', + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(projectId ? { project: projectId } : {}), + signal: AbortSignal.timeout(FETCH_TIMEOUT_MS), + }); + + if (!response.ok) { + const errorText = await response.text().catch(() => ''); + logger.error('[fetchUsageData] Quota API error:', response.status, errorText); + + // Still authenticated, but quota API failed + return { + authenticated: true, + authMethod: 'cli_login', + usedPercent: 0, + remainingPercent: 100, + lastUpdated: new Date().toISOString(), + error: `Quota API unavailable (${response.status})`, + }; + } + + const data = (await response.json()) as QuotaResponse; + + // API returns 'buckets', with fallback to 'quotaBuckets' for compatibility + const apiBuckets = data.buckets || data.quotaBuckets; + + logger.debug('[fetchUsageData] Raw buckets:', JSON.stringify(apiBuckets)); + + if (!apiBuckets || apiBuckets.length === 0) { + return { + authenticated: true, + authMethod: 'cli_login', + usedPercent: 0, + remainingPercent: 100, + lastUpdated: new Date().toISOString(), + }; + } + + // Group buckets into Flash and Pro tiers + // Flash: any model with "flash" in the name + // Pro: any model with "pro" in the name + let flashLowestRemaining = 1.0; + let flashResetTime: string | undefined; + let hasFlashModels = false; + let proLowestRemaining = 1.0; + let proResetTime: string | undefined; + let hasProModels = false; + let overallLowestRemaining = 1.0; + let constrainedModel: string | undefined; + let overallResetTime: string | undefined; + + const quotaBuckets: GeminiQuotaBucket[] = apiBuckets.map((bucket) => { + const remaining = bucket.remainingFraction ?? 1.0; + const modelId = bucket.modelId?.toLowerCase() || ''; + + // Track overall lowest + if (remaining < overallLowestRemaining) { + overallLowestRemaining = remaining; + constrainedModel = bucket.modelId; + overallResetTime = bucket.resetTime; + } + + // Group into Flash or Pro tier + if (modelId.includes('flash')) { + hasFlashModels = true; + if (remaining < flashLowestRemaining) { + flashLowestRemaining = remaining; + flashResetTime = bucket.resetTime; + } + // Also track reset time even if at 100% + if (!flashResetTime && bucket.resetTime) { + flashResetTime = bucket.resetTime; + } + } else if (modelId.includes('pro')) { + hasProModels = true; + if (remaining < proLowestRemaining) { + proLowestRemaining = remaining; + proResetTime = bucket.resetTime; + } + // Also track reset time even if at 100% + if (!proResetTime && bucket.resetTime) { + proResetTime = bucket.resetTime; + } + } + + return { + modelId: bucket.modelId || 'unknown', + remainingFraction: remaining, + resetTime: bucket.resetTime || '', + }; + }); + + const usedPercent = Math.round((1 - overallLowestRemaining) * 100); + const remainingPercent = Math.round(overallLowestRemaining * 100); + + // Build tier quotas (only include if we found models for that tier) + const flashQuota: GeminiTierQuota | undefined = hasFlashModels + ? { + usedPercent: Math.round((1 - flashLowestRemaining) * 100), + remainingPercent: Math.round(flashLowestRemaining * 100), + resetText: flashResetTime ? this.formatResetTime(flashResetTime) : undefined, + resetTime: flashResetTime, + } + : undefined; + + const proQuota: GeminiTierQuota | undefined = hasProModels + ? { + usedPercent: Math.round((1 - proLowestRemaining) * 100), + remainingPercent: Math.round(proLowestRemaining * 100), + resetText: proResetTime ? this.formatResetTime(proResetTime) : undefined, + resetTime: proResetTime, + } + : undefined; + + return { + authenticated: true, + authMethod: 'cli_login', + usedPercent, + remainingPercent, + resetText: overallResetTime ? this.formatResetTime(overallResetTime) : undefined, + resetTime: overallResetTime, + constrainedModel, + flashQuota, + proQuota, + quotaBuckets, + lastUpdated: new Date().toISOString(), + }; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : 'Unknown error'; + logger.error('[fetchUsageData] Error:', errorMsg); + + return { + authenticated: true, + authMethod: 'cli_login', + usedPercent: 0, + remainingPercent: 100, + lastUpdated: new Date().toISOString(), + error: `Failed to fetch quota: ${errorMsg}`, + }; + } + } + + /** + * Load OAuth credentials from file. + * Implements TTL-based cache invalidation and file mtime checks. + */ + private async loadCredentials(): Promise { + // Check if cached credentials are still valid + if (this.cachedCredentials && this.cachedCredentialsAt) { + const now = Date.now(); + const cacheAge = now - this.cachedCredentialsAt; + + if (cacheAge < CREDENTIALS_CACHE_TTL_MS) { + // Cache is within TTL - also check file mtime + const sourcePath = this.loadedCredentialsPath || this.credentialsPath; + try { + const stat = fs.statSync(sourcePath); + if (stat.mtimeMs <= this.cachedCredentialsAt) { + // File hasn't been modified since we cached - use cache + return this.cachedCredentials; + } + // File has been modified, fall through to re-read + logger.debug('[loadCredentials] File modified since cache, re-reading'); + } catch { + // File doesn't exist or can't stat - use cache + return this.cachedCredentials; + } + } else { + // Cache TTL expired, discard + logger.debug('[loadCredentials] Cache TTL expired, re-reading'); + } + + // Invalidate cached credentials + this.cachedCredentials = null; + this.cachedCredentialsAt = null; + } + + // Build unique possible paths (deduplicate) + const rawPaths = [ + this.credentialsPath, + path.join(os.homedir(), '.config', 'gemini', 'oauth_creds.json'), + ]; + const possiblePaths = [...new Set(rawPaths)]; + + for (const credPath of possiblePaths) { + try { + if (fs.existsSync(credPath)) { + const content = fs.readFileSync(credPath, 'utf8'); + const creds = JSON.parse(content); + + // Handle different credential formats + if (creds.access_token || creds.refresh_token) { + this.cachedCredentials = creds; + this.cachedCredentialsAt = Date.now(); + this.loadedCredentialsPath = credPath; + logger.info('[loadCredentials] Loaded from:', credPath); + return creds; + } + + // Some formats nest credentials under 'web' or 'installed' + if (creds.web?.client_id || creds.installed?.client_id) { + const clientCreds = creds.web || creds.installed; + this.cachedCredentials = { + client_id: clientCreds.client_id, + client_secret: clientCreds.client_secret, + }; + this.cachedCredentialsAt = Date.now(); + this.loadedCredentialsPath = credPath; + return this.cachedCredentials; + } + } + } catch (error) { + logger.debug('[loadCredentials] Failed to load from', credPath, error); + } + } + + return null; + } + + /** + * Find the Gemini CLI binary path + */ + private findGeminiBinaryPath(): string | null { + // Try 'which' on Unix-like systems, 'where' on Windows + const whichCmd = process.platform === 'win32' ? 'where' : 'which'; + try { + const whichResult = execFileSync(whichCmd, ['gemini'], { + encoding: 'utf8', + timeout: 5000, + stdio: ['pipe', 'pipe', 'pipe'], + }).trim(); + // 'where' on Windows may return multiple lines; take the first + const firstLine = whichResult.split('\n')[0]?.trim(); + if (firstLine && fs.existsSync(firstLine)) { + return firstLine; + } + } catch { + // Ignore errors from 'which'/'where' + } + + // Check common installation paths + const possiblePaths = [ + // npm global installs + path.join(os.homedir(), '.npm-global', 'bin', 'gemini'), + '/usr/local/bin/gemini', + '/usr/bin/gemini', + // Homebrew + '/opt/homebrew/bin/gemini', + '/usr/local/opt/gemini/bin/gemini', + // nvm/fnm node installs + path.join(os.homedir(), '.nvm', 'versions', 'node'), + path.join(os.homedir(), '.fnm', 'node-versions'), + // Windows + path.join(os.homedir(), 'AppData', 'Roaming', 'npm', 'gemini.cmd'), + path.join(os.homedir(), 'AppData', 'Roaming', 'npm', 'gemini'), + ]; + + for (const p of possiblePaths) { + if (fs.existsSync(p)) { + return p; + } + } + + return null; + } + + /** + * Extract OAuth client credentials from Gemini CLI installation + * This mimics CodexBar's approach of finding oauth2.js in the CLI + */ + private extractOAuthClientCredentials(): OAuthClientCredentials | null { + if (this.cachedClientCredentials) { + return this.cachedClientCredentials; + } + + const geminiBinary = this.findGeminiBinaryPath(); + if (!geminiBinary) { + logger.debug('[extractOAuthClientCredentials] Gemini binary not found'); + return null; + } + + // Resolve symlinks to find actual location + let resolvedPath = geminiBinary; + try { + resolvedPath = fs.realpathSync(geminiBinary); + } catch { + // Use original path if realpath fails + } + + const baseDir = path.dirname(resolvedPath); + logger.debug('[extractOAuthClientCredentials] Base dir:', baseDir); + + // Possible locations for oauth2.js relative to the binary + // Based on CodexBar's search patterns + const possibleOAuth2Paths = [ + // npm global install structure + path.join( + baseDir, + '..', + 'lib', + 'node_modules', + '@google', + 'gemini-cli', + 'dist', + 'src', + 'code_assist', + 'oauth2.js' + ), + path.join( + baseDir, + '..', + 'lib', + 'node_modules', + '@google', + 'gemini-cli-core', + 'dist', + 'src', + 'code_assist', + 'oauth2.js' + ), + // Homebrew/libexec structure + path.join( + baseDir, + '..', + 'libexec', + 'lib', + 'node_modules', + '@google', + 'gemini-cli', + 'dist', + 'src', + 'code_assist', + 'oauth2.js' + ), + path.join( + baseDir, + '..', + 'libexec', + 'lib', + 'node_modules', + '@google', + 'gemini-cli-core', + 'dist', + 'src', + 'code_assist', + 'oauth2.js' + ), + // Direct sibling + path.join(baseDir, '..', 'gemini-cli-core', 'dist', 'src', 'code_assist', 'oauth2.js'), + path.join(baseDir, '..', 'gemini-cli', 'dist', 'src', 'code_assist', 'oauth2.js'), + // Alternative node_modules structures + path.join( + baseDir, + '..', + '..', + 'lib', + 'node_modules', + '@google', + 'gemini-cli', + 'dist', + 'src', + 'code_assist', + 'oauth2.js' + ), + path.join( + baseDir, + '..', + '..', + 'lib', + 'node_modules', + '@google', + 'gemini-cli-core', + 'dist', + 'src', + 'code_assist', + 'oauth2.js' + ), + ]; + + for (const oauth2Path of possibleOAuth2Paths) { + try { + const normalizedPath = path.normalize(oauth2Path); + if (fs.existsSync(normalizedPath)) { + logger.debug('[extractOAuthClientCredentials] Found oauth2.js at:', normalizedPath); + const content = fs.readFileSync(normalizedPath, 'utf8'); + const creds = this.parseOAuthCredentialsFromSource(content); + if (creds) { + this.cachedClientCredentials = creds; + logger.info('[extractOAuthClientCredentials] Extracted credentials from CLI'); + return creds; + } + } + } catch (error) { + logger.debug('[extractOAuthClientCredentials] Failed to read', oauth2Path, error); + } + } + + // Try finding oauth2.js by searching in node_modules (POSIX only) + if (process.platform !== 'win32') { + try { + const searchBase = path.resolve(baseDir, '..'); + const searchResult = execFileSync( + 'find', + [searchBase, '-name', 'oauth2.js', '-path', '*gemini*', '-path', '*code_assist*'], + { encoding: 'utf8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'] } + ) + .trim() + .split('\n')[0]; // Take first result + + if (searchResult && fs.existsSync(searchResult)) { + logger.debug('[extractOAuthClientCredentials] Found via search:', searchResult); + const content = fs.readFileSync(searchResult, 'utf8'); + const creds = this.parseOAuthCredentialsFromSource(content); + if (creds) { + this.cachedClientCredentials = creds; + logger.info( + '[extractOAuthClientCredentials] Extracted credentials from CLI (via search)' + ); + return creds; + } + } + } catch { + // Ignore search errors + } + } + + logger.warn('[extractOAuthClientCredentials] Could not extract credentials from CLI'); + return null; + } + + /** + * Parse OAuth client credentials from oauth2.js source code + */ + private parseOAuthCredentialsFromSource(content: string): OAuthClientCredentials | null { + // Patterns based on CodexBar's regex extraction + // Look for: OAUTH_CLIENT_ID = "..." or const clientId = "..." + const clientIdPatterns = [ + /OAUTH_CLIENT_ID\s*=\s*["']([^"']+)["']/, + /clientId\s*[:=]\s*["']([^"']+)["']/, + /client_id\s*[:=]\s*["']([^"']+)["']/, + /"clientId"\s*:\s*["']([^"']+)["']/, + ]; + + const clientSecretPatterns = [ + /OAUTH_CLIENT_SECRET\s*=\s*["']([^"']+)["']/, + /clientSecret\s*[:=]\s*["']([^"']+)["']/, + /client_secret\s*[:=]\s*["']([^"']+)["']/, + /"clientSecret"\s*:\s*["']([^"']+)["']/, + ]; + + let clientId: string | null = null; + let clientSecret: string | null = null; + + for (const pattern of clientIdPatterns) { + const match = content.match(pattern); + if (match && match[1]) { + clientId = match[1]; + break; + } + } + + for (const pattern of clientSecretPatterns) { + const match = content.match(pattern); + if (match && match[1]) { + clientSecret = match[1]; + break; + } + } + + if (clientId && clientSecret) { + logger.debug('[parseOAuthCredentialsFromSource] Found client credentials'); + return { clientId, clientSecret }; + } + + return null; + } + + /** + * Get a valid access token, refreshing if necessary + */ + private async getValidAccessToken(creds: OAuthCredentials): Promise { + // Check if current token is still valid (with 5 min buffer) + if (creds.access_token && creds.expiry_date) { + const now = Date.now(); + if (creds.expiry_date > now + 5 * 60 * 1000) { + logger.debug('[getValidAccessToken] Using existing token (not expired)'); + return creds.access_token; + } + } + + // If we have a refresh token, try to refresh + if (creds.refresh_token) { + // Try to extract credentials from CLI first + const extractedCreds = this.extractOAuthClientCredentials(); + + // Use extracted credentials, then fall back to credentials in file + const clientId = extractedCreds?.clientId || creds.client_id; + const clientSecret = extractedCreds?.clientSecret || creds.client_secret; + + if (!clientId || !clientSecret) { + logger.error('[getValidAccessToken] No client credentials available for token refresh'); + // Return existing token even if expired - it might still work + return creds.access_token || null; + } + + try { + logger.debug('[getValidAccessToken] Refreshing token...'); + const response = await fetch(GOOGLE_TOKEN_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + body: new URLSearchParams({ + client_id: clientId, + client_secret: clientSecret, + refresh_token: creds.refresh_token, + grant_type: 'refresh_token', + }), + signal: AbortSignal.timeout(FETCH_TIMEOUT_MS), + }); + + if (response.ok) { + const data = (await response.json()) as { access_token?: string; expires_in?: number }; + const newAccessToken = data.access_token; + const expiresIn = data.expires_in || 3600; + + if (newAccessToken) { + logger.info('[getValidAccessToken] Token refreshed successfully'); + + // Update cached credentials + this.cachedCredentials = { + ...creds, + access_token: newAccessToken, + expiry_date: Date.now() + expiresIn * 1000, + }; + this.cachedCredentialsAt = Date.now(); + + // Save back to the file the credentials were loaded from + const writePath = this.loadedCredentialsPath || this.credentialsPath; + try { + fs.writeFileSync(writePath, JSON.stringify(this.cachedCredentials, null, 2)); + } catch (e) { + logger.debug('[getValidAccessToken] Could not save refreshed token:', e); + } + + return newAccessToken; + } + } else { + const errorText = await response.text().catch(() => ''); + logger.error('[getValidAccessToken] Token refresh failed:', response.status, errorText); + } + } catch (error) { + logger.error('[getValidAccessToken] Token refresh error:', error); + } + } + + // Return current access token even if it might be expired + return creds.access_token || null; + } + + /** + * Format reset time as human-readable string + */ + private formatResetTime(isoTime: string): string { + try { + const resetDate = new Date(isoTime); + const now = new Date(); + const diff = resetDate.getTime() - now.getTime(); + + if (diff < 0) { + return 'Resetting soon'; + } + + const minutes = Math.floor(diff / 60000); + const hours = Math.floor(minutes / 60); + + if (hours > 0) { + const remainingMins = minutes % 60; + return remainingMins > 0 ? `Resets in ${hours}h ${remainingMins}m` : `Resets in ${hours}h`; + } + + return `Resets in ${minutes}m`; + } catch { + return ''; + } + } + + /** + * Clear cached credentials (useful after logout) + */ + clearCache(): void { + this.cachedCredentials = null; + this.cachedCredentialsAt = null; + this.cachedClientCredentials = null; + } +} + +// Singleton instance +let usageServiceInstance: GeminiUsageService | null = null; + +/** + * Get the singleton instance of GeminiUsageService + */ +export function getGeminiUsageService(): GeminiUsageService { + if (!usageServiceInstance) { + usageServiceInstance = new GeminiUsageService(); + } + return usageServiceInstance; +} diff --git a/apps/server/src/services/ideation-service.ts b/apps/server/src/services/ideation-service.ts index 0d43252fb..9bbea03b7 100644 --- a/apps/server/src/services/ideation-service.ts +++ b/apps/server/src/services/ideation-service.ts @@ -27,7 +27,6 @@ import type { } from '@automaker/types'; import { DEFAULT_IDEATION_CONTEXT_SOURCES } from '@automaker/types'; import { - getIdeationDir, getIdeasDir, getIdeaDir, getIdeaPath, @@ -407,7 +406,9 @@ export class IdeationService { return []; } - const entries = (await secureFs.readdir(ideasDir, { withFileTypes: true })) as any[]; + const entries = (await secureFs.readdir(ideasDir, { + withFileTypes: true, + })) as import('fs').Dirent[]; const ideaDirs = entries.filter((entry) => entry.isDirectory()); const ideas: Idea[] = []; @@ -855,15 +856,26 @@ ${contextSection}${existingWorkSection}`; } return parsed - .map((item: any, index: number) => ({ - id: this.generateId('sug'), - category, - title: item.title || `Suggestion ${index + 1}`, - description: item.description || '', - rationale: item.rationale || '', - priority: item.priority || 'medium', - relatedFiles: item.relatedFiles || [], - })) + .map( + ( + item: { + title?: string; + description?: string; + rationale?: string; + priority?: 'low' | 'medium' | 'high'; + relatedFiles?: string[]; + }, + index: number + ) => ({ + id: this.generateId('sug'), + category, + title: item.title || `Suggestion ${index + 1}`, + description: item.description || '', + rationale: item.rationale || '', + priority: item.priority || ('medium' as const), + relatedFiles: item.relatedFiles || [], + }) + ) .slice(0, count); } catch (error) { logger.warn('Failed to parse JSON response:', error); @@ -1705,7 +1717,9 @@ ${contextSection}${existingWorkSection}`; const results: AnalysisFileInfo[] = []; try { - const entries = (await secureFs.readdir(dirPath, { withFileTypes: true })) as any[]; + const entries = (await secureFs.readdir(dirPath, { + withFileTypes: true, + })) as import('fs').Dirent[]; for (const entry of entries) { if (entry.isDirectory()) { diff --git a/apps/server/src/services/merge-service.ts b/apps/server/src/services/merge-service.ts index 087aa8013..1a814acc8 100644 --- a/apps/server/src/services/merge-service.ts +++ b/apps/server/src/services/merge-service.ts @@ -4,8 +4,9 @@ * Extracted from worktree merge route to allow internal service calls. */ -import { createLogger } from '@automaker/utils'; -import { spawnProcess } from '@automaker/platform'; +import { createLogger, isValidBranchName } from '@automaker/utils'; +import { type EventEmitter } from '../lib/events.js'; +import { execGitCommand } from '@automaker/git-utils'; const logger = createLogger('MergeService'); export interface MergeOptions { @@ -18,6 +19,7 @@ export interface MergeServiceResult { success: boolean; error?: string; hasConflicts?: boolean; + conflictFiles?: string[]; mergedBranch?: string; targetBranch?: string; deleted?: { @@ -26,31 +28,6 @@ export interface MergeServiceResult { }; } -/** - * Execute git command with array arguments to prevent command injection. - */ -async function execGitCommand(args: string[], cwd: string): Promise { - const result = await spawnProcess({ - command: 'git', - args, - cwd, - }); - - if (result.exitCode === 0) { - return result.stdout; - } else { - const errorMessage = result.stderr || `Git command failed with code ${result.exitCode}`; - throw new Error(errorMessage); - } -} - -/** - * Validate branch name to prevent command injection. - */ -function isValidBranchName(name: string): boolean { - return /^[a-zA-Z0-9._\-/]+$/.test(name) && name.length < 250; -} - /** * Perform a git merge operation directly without HTTP. * @@ -65,7 +42,8 @@ export async function performMerge( branchName: string, worktreePath: string, targetBranch: string = 'main', - options?: MergeOptions + options?: MergeOptions, + emitter?: EventEmitter ): Promise { if (!projectPath || !branchName || !worktreePath) { return { @@ -110,6 +88,9 @@ export async function performMerge( }; } + // Emit merge:start after validating inputs + emitter?.emit('merge:start', { branchName, targetBranch: mergeTo, worktreePath }); + // Merge the feature branch into the target branch (using safe array-based commands) const mergeMessage = options?.message || `Merge ${branchName} into ${mergeTo}`; const mergeArgs = options?.squash @@ -117,21 +98,107 @@ export async function performMerge( : ['merge', branchName, '-m', mergeMessage]; try { - await execGitCommand(mergeArgs, projectPath); + // Set LC_ALL=C so git always emits English output regardless of the system + // locale, making text-based conflict detection reliable. + await execGitCommand(mergeArgs, projectPath, { LC_ALL: 'C' }); } catch (mergeError: unknown) { - // Check if this is a merge conflict + // Check if this is a merge conflict. We use a multi-layer strategy so + // that detection is reliable even when locale settings vary or git's text + // output changes across versions: + // + // 1. Primary (text-based): scan the error output for well-known English + // conflict markers. Because we pass LC_ALL=C above these strings are + // always in English, but we keep the check as one layer among several. + // + // 2. Unmerged-path check: run `git diff --name-only --diff-filter=U` + // (locale-stable) and treat any non-empty output as a conflict + // indicator, capturing the file list at the same time. + // + // 3. Fallback status check: run `git status --porcelain` and look for + // lines whose first two characters indicate an unmerged state + // (UU, AA, DD, AU, UA, DU, UD). + // + // hasConflicts is true when ANY of the three layers returns positive. const err = mergeError as { stdout?: string; stderr?: string; message?: string }; const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`; - const hasConflicts = output.includes('CONFLICT') || output.includes('Automatic merge failed'); + + // Layer 1 – text matching (locale-safe because we set LC_ALL=C above). + const textIndicatesConflict = + output.includes('CONFLICT') || output.includes('Automatic merge failed'); + + // Layers 2 & 3 – repository state inspection (locale-independent). + // Layer 2: get conflicted files via diff (also locale-stable output). + let conflictFiles: string[] | undefined; + let diffIndicatesConflict = false; + try { + const diffOutput = await execGitCommand( + ['diff', '--name-only', '--diff-filter=U'], + projectPath, + { LC_ALL: 'C' } + ); + const files = diffOutput + .trim() + .split('\n') + .filter((f) => f.trim().length > 0); + if (files.length > 0) { + diffIndicatesConflict = true; + conflictFiles = files; + } + } catch { + // If we can't get the file list, leave conflictFiles undefined so callers + // can distinguish "no conflicts" (empty array) from "unknown due to diff failure" (undefined) + } + + // Layer 3: check for unmerged paths via machine-readable git status. + let hasUnmergedPaths = false; + try { + const statusOutput = await execGitCommand(['status', '--porcelain'], projectPath, { + LC_ALL: 'C', + }); + // Unmerged status codes occupy the first two characters of each line. + // Standard unmerged codes: UU, AA, DD, AU, UA, DU, UD. + const unmergedLines = statusOutput + .split('\n') + .filter((line) => /^(UU|AA|DD|AU|UA|DU|UD)/.test(line)); + hasUnmergedPaths = unmergedLines.length > 0; + + // If Layer 2 did not populate conflictFiles (e.g. diff failed or returned + // nothing) but Layer 3 does detect unmerged paths, parse the status lines + // to extract filenames and assign them to conflictFiles so callers always + // receive an accurate file list when conflicts are present. + if (hasUnmergedPaths && conflictFiles === undefined) { + const parsedFiles = unmergedLines + .map((line) => line.slice(2).trim()) + .filter((f) => f.length > 0); + // Deduplicate (e.g. rename entries can appear twice) + conflictFiles = [...new Set(parsedFiles)]; + } + } catch { + // git status failing is itself a sign something is wrong; leave + // hasUnmergedPaths as false and rely on the other layers. + } + + const hasConflicts = textIndicatesConflict || diffIndicatesConflict || hasUnmergedPaths; if (hasConflicts) { + // Emit merge:conflict event with conflict details + emitter?.emit('merge:conflict', { branchName, targetBranch: mergeTo, conflictFiles }); + return { success: false, error: `Merge CONFLICT: Automatic merge of "${branchName}" into "${mergeTo}" failed. Please resolve conflicts manually.`, hasConflicts: true, + conflictFiles, }; } + // Emit merge:error for non-conflict errors before re-throwing + emitter?.emit('merge:error', { + branchName, + targetBranch: mergeTo, + error: err.message || String(mergeError), + }); + // Re-throw non-conflict errors throw mergeError; } @@ -139,7 +206,18 @@ export async function performMerge( // If squash merge, need to commit (using safe array-based command) if (options?.squash) { const squashMessage = options?.message || `Merge ${branchName} (squash)`; - await execGitCommand(['commit', '-m', squashMessage], projectPath); + try { + await execGitCommand(['commit', '-m', squashMessage], projectPath); + } catch (commitError: unknown) { + const err = commitError as { message?: string }; + // Emit merge:error so subscribers always receive either merge:success or merge:error + emitter?.emit('merge:error', { + branchName, + targetBranch: mergeTo, + error: err.message || String(commitError), + }); + throw commitError; + } } // Optionally delete the worktree and branch after merging @@ -163,19 +241,22 @@ export async function performMerge( // Delete the branch (but not main/master) if (branchName !== 'main' && branchName !== 'master') { - if (!isValidBranchName(branchName)) { - logger.warn(`Invalid branch name detected, skipping deletion: ${branchName}`); - } else { - try { - await execGitCommand(['branch', '-D', branchName], projectPath); - branchDeleted = true; - } catch { - logger.warn(`Failed to delete branch: ${branchName}`); - } + try { + await execGitCommand(['branch', '-D', branchName], projectPath); + branchDeleted = true; + } catch { + logger.warn(`Failed to delete branch: ${branchName}`); } } } + // Emit merge:success with merged branch, target branch, and deletion info + emitter?.emit('merge:success', { + mergedBranch: branchName, + targetBranch: mergeTo, + deleted: options?.deleteWorktreeAndBranch ? { worktreeDeleted, branchDeleted } : undefined, + }); + return { success: true, mergedBranch: branchName, diff --git a/apps/server/src/services/pipeline-orchestrator.ts b/apps/server/src/services/pipeline-orchestrator.ts index 4308825b8..ba30e65f6 100644 --- a/apps/server/src/services/pipeline-orchestrator.ts +++ b/apps/server/src/services/pipeline-orchestrator.ts @@ -519,7 +519,8 @@ export class PipelineOrchestrator { targetBranch || 'main', { deleteWorktreeAndBranch: false, - } + }, + this.eventBus.getUnderlyingEmitter() ); if (!result.success) { diff --git a/apps/server/src/services/pull-service.ts b/apps/server/src/services/pull-service.ts new file mode 100644 index 000000000..ab217c2bd --- /dev/null +++ b/apps/server/src/services/pull-service.ts @@ -0,0 +1,452 @@ +/** + * PullService - Pull git operations without HTTP + * + * Encapsulates the full git pull workflow including: + * - Branch name and detached HEAD detection + * - Fetching from remote + * - Status parsing and local change detection + * - Stash push/pop logic + * - Upstream verification (rev-parse / --verify) + * - Pull execution and conflict detection + * - Conflict file list collection + * + * Extracted from the worktree pull route to improve organization + * and testability. Follows the same pattern as rebase-service.ts + * and cherry-pick-service.ts. + */ + +import { createLogger, getErrorMessage } from '@automaker/utils'; +import { execGitCommand, getConflictFiles } from '@automaker/git-utils'; +import { execGitCommandWithLockRetry, getCurrentBranch } from '../lib/git.js'; + +const logger = createLogger('PullService'); + +// ============================================================================ +// Types +// ============================================================================ + +export interface PullOptions { + /** Remote name to pull from (defaults to 'origin') */ + remote?: string; + /** When true, automatically stash local changes before pulling and reapply after */ + stashIfNeeded?: boolean; +} + +export interface PullResult { + success: boolean; + error?: string; + branch?: string; + pulled?: boolean; + hasLocalChanges?: boolean; + localChangedFiles?: string[]; + stashed?: boolean; + stashRestored?: boolean; + stashRecoveryFailed?: boolean; + hasConflicts?: boolean; + conflictSource?: 'pull' | 'stash'; + conflictFiles?: string[]; + message?: string; +} + +// ============================================================================ +// Helper Functions +// ============================================================================ + +/** + * Fetch the latest refs from a remote. + * + * @param worktreePath - Path to the git worktree + * @param remote - Remote name (e.g. 'origin') + */ +export async function fetchRemote(worktreePath: string, remote: string): Promise { + await execGitCommand(['fetch', remote], worktreePath); +} + +/** + * Parse `git status --porcelain` output into a list of changed file paths. + * + * @param worktreePath - Path to the git worktree + * @returns Object with hasLocalChanges flag and list of changed file paths + */ +export async function getLocalChanges( + worktreePath: string +): Promise<{ hasLocalChanges: boolean; localChangedFiles: string[] }> { + const statusOutput = await execGitCommand(['status', '--porcelain'], worktreePath); + const hasLocalChanges = statusOutput.trim().length > 0; + + let localChangedFiles: string[] = []; + if (hasLocalChanges) { + localChangedFiles = statusOutput + .trim() + .split('\n') + .filter((line) => line.trim().length > 0) + .map((line) => { + const entry = line.substring(3).trim(); + const arrowIndex = entry.indexOf(' -> '); + return arrowIndex !== -1 ? entry.substring(arrowIndex + 4).trim() : entry; + }); + } + + return { hasLocalChanges, localChangedFiles }; +} + +/** + * Stash local changes with a descriptive message. + * + * @param worktreePath - Path to the git worktree + * @param branchName - Current branch name (used in stash message) + * @returns Promise — resolves on success, throws on failure + */ +export async function stashChanges(worktreePath: string, branchName: string): Promise { + const stashMessage = `automaker-pull-stash: Pre-pull stash on ${branchName}`; + await execGitCommandWithLockRetry( + ['stash', 'push', '--include-untracked', '-m', stashMessage], + worktreePath + ); +} + +/** + * Pop the top stash entry. + * + * @param worktreePath - Path to the git worktree + * @returns The stdout from stash pop + */ +export async function popStash(worktreePath: string): Promise { + return await execGitCommandWithLockRetry(['stash', 'pop'], worktreePath); +} + +/** + * Try to pop the stash, returning whether the pop succeeded. + * + * @param worktreePath - Path to the git worktree + * @returns true if stash pop succeeded, false if it failed + */ +async function tryPopStash(worktreePath: string): Promise { + try { + await execGitCommandWithLockRetry(['stash', 'pop'], worktreePath); + return true; + } catch (stashPopError) { + // Stash pop failed - leave it in stash list for manual recovery + logger.error('Failed to reapply stash during error recovery', { + worktreePath, + error: getErrorMessage(stashPopError), + }); + return false; + } +} + +/** + * Result of the upstream/remote branch check. + * - 'tracking': the branch has a configured upstream tracking ref + * - 'remote': no tracking ref, but the remote branch exists + * - 'none': neither a tracking ref nor a remote branch was found + */ +export type UpstreamStatus = 'tracking' | 'remote' | 'none'; + +/** + * Check whether the branch has an upstream tracking ref, or whether + * the remote branch exists. + * + * @param worktreePath - Path to the git worktree + * @param branchName - Current branch name + * @param remote - Remote name + * @returns UpstreamStatus indicating tracking ref, remote branch, or neither + */ +export async function hasUpstreamOrRemoteBranch( + worktreePath: string, + branchName: string, + remote: string +): Promise { + try { + await execGitCommand(['rev-parse', '--abbrev-ref', `${branchName}@{upstream}`], worktreePath); + return 'tracking'; + } catch { + // No upstream tracking - check if the remote branch exists + try { + await execGitCommand(['rev-parse', '--verify', `${remote}/${branchName}`], worktreePath); + return 'remote'; + } catch { + return 'none'; + } + } +} + +/** + * Check whether an error output string indicates a merge conflict. + */ +function isConflictError(errorOutput: string): boolean { + return errorOutput.includes('CONFLICT') || errorOutput.includes('Automatic merge failed'); +} + +/** + * Check whether an output string indicates a stash conflict. + */ +function isStashConflict(output: string): boolean { + return output.includes('CONFLICT') || output.includes('Merge conflict'); +} + +// ============================================================================ +// Main Service Function +// ============================================================================ + +/** + * Perform a full git pull workflow on the given worktree. + * + * The workflow: + * 1. Get current branch name (detect detached HEAD) + * 2. Fetch from remote + * 3. Check for local changes + * 4. If local changes and stashIfNeeded, stash them + * 5. Verify upstream tracking or remote branch exists + * 6. Execute `git pull` + * 7. If stash was created and pull succeeded, reapply stash + * 8. Detect and report conflicts from pull or stash reapplication + * + * @param worktreePath - Path to the git worktree + * @param options - Pull options (remote, stashIfNeeded) + * @returns PullResult with detailed status information + */ +export async function performPull( + worktreePath: string, + options?: PullOptions +): Promise { + const targetRemote = options?.remote || 'origin'; + const stashIfNeeded = options?.stashIfNeeded ?? false; + + // 1. Get current branch name + let branchName: string; + try { + branchName = await getCurrentBranch(worktreePath); + } catch (err) { + return { + success: false, + error: `Failed to get current branch: ${getErrorMessage(err)}`, + }; + } + + // 2. Check for detached HEAD state + if (branchName === 'HEAD') { + return { + success: false, + error: 'Cannot pull in detached HEAD state. Please checkout a branch first.', + }; + } + + // 3. Fetch latest from remote + try { + await fetchRemote(worktreePath, targetRemote); + } catch (fetchError) { + return { + success: false, + error: `Failed to fetch from remote '${targetRemote}': ${getErrorMessage(fetchError)}`, + }; + } + + // 4. Check for local changes + let hasLocalChanges: boolean; + let localChangedFiles: string[]; + try { + ({ hasLocalChanges, localChangedFiles } = await getLocalChanges(worktreePath)); + } catch (err) { + return { + success: false, + error: `Failed to get local changes: ${getErrorMessage(err)}`, + }; + } + + // 5. If there are local changes and stashIfNeeded is not requested, return info + if (hasLocalChanges && !stashIfNeeded) { + return { + success: true, + branch: branchName, + pulled: false, + hasLocalChanges: true, + localChangedFiles, + message: + 'Local changes detected. Use stashIfNeeded to automatically stash and reapply changes.', + }; + } + + // 6. Stash local changes if needed + let didStash = false; + if (hasLocalChanges && stashIfNeeded) { + try { + await stashChanges(worktreePath, branchName); + didStash = true; + } catch (stashError) { + return { + success: false, + error: `Failed to stash local changes: ${getErrorMessage(stashError)}`, + }; + } + } + + // 7. Verify upstream tracking or remote branch exists + const upstreamStatus = await hasUpstreamOrRemoteBranch(worktreePath, branchName, targetRemote); + if (upstreamStatus === 'none') { + let stashRecoveryFailed = false; + if (didStash) { + const stashPopped = await tryPopStash(worktreePath); + stashRecoveryFailed = !stashPopped; + } + return { + success: false, + error: `Branch '${branchName}' has no upstream branch on remote '${targetRemote}'. Push it first or set upstream with: git branch --set-upstream-to=${targetRemote}/${branchName}${stashRecoveryFailed ? ' Local changes remain stashed and need manual recovery (run: git stash pop).' : ''}`, + stashRecoveryFailed: stashRecoveryFailed ? stashRecoveryFailed : undefined, + }; + } + + // 8. Pull latest changes + // When the branch has a configured upstream tracking ref, let Git use it automatically. + // When only the remote branch exists (no tracking ref), explicitly specify remote and branch. + const pullArgs = upstreamStatus === 'tracking' ? ['pull'] : ['pull', targetRemote, branchName]; + let pullConflict = false; + let pullConflictFiles: string[] = []; + try { + const pullOutput = await execGitCommand(pullArgs, worktreePath); + + const alreadyUpToDate = pullOutput.includes('Already up to date'); + + // If no stash to reapply, return success + if (!didStash) { + return { + success: true, + branch: branchName, + pulled: !alreadyUpToDate, + hasLocalChanges: false, + stashed: false, + stashRestored: false, + message: alreadyUpToDate ? 'Already up to date' : 'Pulled latest changes', + }; + } + } catch (pullError: unknown) { + const err = pullError as { stderr?: string; stdout?: string; message?: string }; + const errorOutput = `${err.stderr || ''} ${err.stdout || ''} ${err.message || ''}`; + + if (isConflictError(errorOutput)) { + pullConflict = true; + try { + pullConflictFiles = await getConflictFiles(worktreePath); + } catch { + pullConflictFiles = []; + } + } else { + // Non-conflict pull error + let stashRecoveryFailed = false; + if (didStash) { + const stashPopped = await tryPopStash(worktreePath); + stashRecoveryFailed = !stashPopped; + } + + // Check for common errors + const errorMsg = err.stderr || err.message || 'Pull failed'; + if (errorMsg.includes('no tracking information')) { + return { + success: false, + error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=${targetRemote}/${branchName}${stashRecoveryFailed ? ' Local changes remain stashed and need manual recovery (run: git stash pop).' : ''}`, + stashRecoveryFailed: stashRecoveryFailed ? stashRecoveryFailed : undefined, + }; + } + + return { + success: false, + error: `${errorMsg}${stashRecoveryFailed ? ' Local changes remain stashed and need manual recovery (run: git stash pop).' : ''}`, + stashRecoveryFailed: stashRecoveryFailed ? stashRecoveryFailed : undefined, + }; + } + } + + // 9. If pull had conflicts, return conflict info (don't try stash pop) + if (pullConflict) { + return { + success: false, + branch: branchName, + pulled: true, + hasConflicts: true, + conflictSource: 'pull', + conflictFiles: pullConflictFiles, + stashed: didStash, + stashRestored: false, + message: + `Pull resulted in merge conflicts. ${didStash ? 'Your local changes are still stashed.' : ''}`.trim(), + }; + } + + // 10. Pull succeeded, now try to reapply stash + if (didStash) { + return await reapplyStash(worktreePath, branchName); + } + + // Shouldn't reach here, but return a safe default + return { + success: true, + branch: branchName, + pulled: true, + message: 'Pulled latest changes', + }; +} + +/** + * Attempt to reapply stashed changes after a successful pull. + * Handles both clean reapplication and conflict scenarios. + * + * @param worktreePath - Path to the git worktree + * @param branchName - Current branch name + * @returns PullResult reflecting stash reapplication status + */ +async function reapplyStash(worktreePath: string, branchName: string): Promise { + try { + await popStash(worktreePath); + + // Stash pop succeeded cleanly (popStash throws on non-zero exit) + return { + success: true, + branch: branchName, + pulled: true, + hasConflicts: false, + stashed: true, + stashRestored: true, + message: 'Pulled latest changes and restored your stashed changes.', + }; + } catch (stashPopError: unknown) { + const err = stashPopError as { stderr?: string; stdout?: string; message?: string }; + const errorOutput = `${err.stderr || ''} ${err.stdout || ''} ${err.message || ''}`; + + // Check if stash pop failed due to conflicts + // The stash remains in the stash list when conflicts occur, so stashRestored is false + if (isStashConflict(errorOutput)) { + let stashConflictFiles: string[] = []; + try { + stashConflictFiles = await getConflictFiles(worktreePath); + } catch { + stashConflictFiles = []; + } + + return { + success: true, + branch: branchName, + pulled: true, + hasConflicts: true, + conflictSource: 'stash', + conflictFiles: stashConflictFiles, + stashed: true, + stashRestored: false, + message: 'Pull succeeded but reapplying your stashed changes resulted in merge conflicts.', + }; + } + + // Non-conflict stash pop error - stash is still in the stash list + logger.warn('Failed to reapply stash after pull', { worktreePath, error: errorOutput }); + + return { + success: true, + branch: branchName, + pulled: true, + hasConflicts: false, + stashed: true, + stashRestored: false, + message: + 'Pull succeeded but failed to reapply stashed changes. Your changes are still in the stash list.', + }; + } +} diff --git a/apps/server/src/services/rebase-service.ts b/apps/server/src/services/rebase-service.ts new file mode 100644 index 000000000..14f806e9e --- /dev/null +++ b/apps/server/src/services/rebase-service.ts @@ -0,0 +1,223 @@ +/** + * RebaseService - Rebase git operations without HTTP + * + * Handles git rebase operations with conflict detection and reporting. + * Follows the same pattern as merge-service.ts and cherry-pick-service.ts. + */ + +import fs from 'fs/promises'; +import path from 'path'; +import { createLogger, getErrorMessage } from '@automaker/utils'; +import { execGitCommand, getCurrentBranch, getConflictFiles } from '@automaker/git-utils'; + +const logger = createLogger('RebaseService'); + +// ============================================================================ +// Types +// ============================================================================ + +export interface RebaseResult { + success: boolean; + error?: string; + hasConflicts?: boolean; + conflictFiles?: string[]; + aborted?: boolean; + branch?: string; + ontoBranch?: string; + message?: string; +} + +// ============================================================================ +// Service Functions +// ============================================================================ + +/** + * Run a git rebase operation on the given worktree. + * + * @param worktreePath - Path to the git worktree + * @param ontoBranch - The branch to rebase onto (e.g., 'origin/main') + * @returns RebaseResult with success/failure information + */ +export async function runRebase(worktreePath: string, ontoBranch: string): Promise { + // Reject empty, whitespace-only, or dash-prefixed branch names. + const normalizedOntoBranch = ontoBranch?.trim() ?? ''; + if (normalizedOntoBranch === '' || normalizedOntoBranch.startsWith('-')) { + return { + success: false, + error: `Invalid branch name: "${ontoBranch}" must not be empty or start with a dash.`, + }; + } + + // Get current branch name before rebase + let currentBranch: string; + try { + currentBranch = await getCurrentBranch(worktreePath); + } catch (branchErr) { + return { + success: false, + error: `Failed to resolve current branch for worktree "${worktreePath}": ${getErrorMessage(branchErr)}`, + }; + } + + try { + // Pass ontoBranch after '--' so git treats it as a ref, not an option. + // Set LC_ALL=C so git always emits English output regardless of the system + // locale, making text-based conflict detection reliable. + await execGitCommand(['rebase', '--', normalizedOntoBranch], worktreePath, { LC_ALL: 'C' }); + + return { + success: true, + branch: currentBranch, + ontoBranch: normalizedOntoBranch, + message: `Successfully rebased ${currentBranch} onto ${normalizedOntoBranch}`, + }; + } catch (rebaseError: unknown) { + // Check if this is a rebase conflict. We use a multi-layer strategy so + // that detection is reliable even when locale settings vary or git's text + // output changes across versions: + // + // 1. Primary (text-based): scan the error output for well-known English + // conflict markers. Because we pass LC_ALL=C above these strings are + // always in English, but we keep the check as one layer among several. + // + // 2. Repository-state check: run `git rev-parse --git-dir` to find the + // actual .git directory, then verify whether the in-progress rebase + // state directories (.git/rebase-merge or .git/rebase-apply) exist. + // These are created by git at the start of a rebase and are the most + // reliable indicator that a rebase is still in progress (i.e. stopped + // due to conflicts). + // + // 3. Unmerged-path check: run `git status --porcelain` (machine-readable, + // locale-independent) and look for lines whose first two characters + // indicate an unmerged state (UU, AA, DD, AU, UA, DU, UD). + // + // hasConflicts is true when ANY of the three layers returns positive. + const err = rebaseError as { stdout?: string; stderr?: string; message?: string }; + const output = `${err.stdout || ''} ${err.stderr || ''} ${err.message || ''}`; + + // Layer 1 – text matching (locale-safe because we set LC_ALL=C above). + const textIndicatesConflict = + output.includes('CONFLICT') || + output.includes('could not apply') || + output.includes('Resolve all conflicts') || + output.includes('fix conflicts'); + + // Layers 2 & 3 – repository state inspection (locale-independent). + let rebaseStateExists = false; + let hasUnmergedPaths = false; + try { + // Find the canonical .git directory for this worktree. + const gitDir = (await execGitCommand(['rev-parse', '--git-dir'], worktreePath)).trim(); + // git rev-parse --git-dir returns a path relative to cwd when the repo is + // a worktree, so we resolve it against worktreePath. + const resolvedGitDir = path.resolve(worktreePath, gitDir); + + // Layer 2: check for rebase state directories. + const rebaseMergeDir = path.join(resolvedGitDir, 'rebase-merge'); + const rebaseApplyDir = path.join(resolvedGitDir, 'rebase-apply'); + const [rebaseMergeExists, rebaseApplyExists] = await Promise.all([ + fs + .access(rebaseMergeDir) + .then(() => true) + .catch(() => false), + fs + .access(rebaseApplyDir) + .then(() => true) + .catch(() => false), + ]); + rebaseStateExists = rebaseMergeExists || rebaseApplyExists; + } catch { + // If rev-parse fails the repo may be in an unexpected state; fall back to + // text-based detection only. + } + + try { + // Layer 3: check for unmerged paths via machine-readable git status. + const statusOutput = await execGitCommand(['status', '--porcelain'], worktreePath, { + LC_ALL: 'C', + }); + // Unmerged status codes occupy the first two characters of each line. + // Standard unmerged codes: UU, AA, DD, AU, UA, DU, UD. + hasUnmergedPaths = statusOutput + .split('\n') + .some((line) => /^(UU|AA|DD|AU|UA|DU|UD)/.test(line)); + } catch { + // git status failing is itself a sign something is wrong; leave + // hasUnmergedPaths as false and rely on the other layers. + } + + const hasConflicts = textIndicatesConflict || rebaseStateExists || hasUnmergedPaths; + + if (hasConflicts) { + // Attempt to fetch the list of conflicted files. We wrap this in its + // own try/catch so that a failure here does NOT prevent abortRebase from + // running – keeping the repository in a clean state is the priority. + let conflictFiles: string[] | undefined; + let conflictFilesError: unknown; + try { + conflictFiles = await getConflictFiles(worktreePath); + } catch (getConflictFilesError: unknown) { + conflictFilesError = getConflictFilesError; + logger.warn('Failed to retrieve conflict files after rebase conflict', { + worktreePath, + error: getErrorMessage(getConflictFilesError), + }); + } + + // Abort the rebase to leave the repo in a clean state. This must + // always run regardless of whether getConflictFiles succeeded. + const aborted = await abortRebase(worktreePath); + + if (!aborted) { + logger.error('Failed to abort rebase after conflict; repository may be in a dirty state', { + worktreePath, + }); + } + + // Re-throw a composed error so callers retain both the original rebase + // failure context and any conflict-file lookup failure. + if (conflictFilesError !== undefined) { + const composedMessage = [ + `Rebase of "${currentBranch}" onto "${normalizedOntoBranch}" failed due to conflicts.`, + `Original rebase error: ${getErrorMessage(rebaseError)}`, + `Additionally, fetching conflict files failed: ${getErrorMessage(conflictFilesError)}`, + aborted + ? 'The rebase was aborted; no changes were applied.' + : 'The rebase abort also failed; repository may be in a dirty state.', + ].join(' '); + throw new Error(composedMessage); + } + + return { + success: false, + error: aborted + ? `Rebase of "${currentBranch}" onto "${normalizedOntoBranch}" aborted due to conflicts; no changes were applied.` + : `Rebase of "${currentBranch}" onto "${normalizedOntoBranch}" failed due to conflicts and the abort also failed; repository may be in a dirty state.`, + hasConflicts: true, + conflictFiles, + aborted, + branch: currentBranch, + ontoBranch: normalizedOntoBranch, + }; + } + + // Non-conflict error - propagate + throw rebaseError; + } +} + +/** + * Abort an in-progress rebase operation. + * + * @param worktreePath - Path to the git worktree + * @returns true if abort succeeded, false if it failed (logged as warning) + */ +export async function abortRebase(worktreePath: string): Promise { + try { + await execGitCommand(['rebase', '--abort'], worktreePath); + return true; + } catch (err) { + logger.warn('Failed to abort rebase after conflict', err instanceof Error ? err.message : err); + return false; + } +} diff --git a/apps/server/src/services/recovery-service.ts b/apps/server/src/services/recovery-service.ts index d575f1da6..d08f5a8e5 100644 --- a/apps/server/src/services/recovery-service.ts +++ b/apps/server/src/services/recovery-service.ts @@ -250,6 +250,14 @@ export class RecoveryService { async resumeInterruptedFeatures(projectPath: string): Promise { const featuresDir = getFeaturesDir(projectPath); try { + // Load execution state to find features that were running before restart. + // This is critical because reconcileAllFeatureStates() runs at server startup + // and resets in_progress/interrupted/pipeline_* features to ready/backlog + // BEFORE the UI connects and calls this method. Without checking execution state, + // we would find no features to resume since their statuses have already been reset. + const executionState = await this.loadExecutionState(projectPath); + const previouslyRunningIds = new Set(executionState.runningFeatureIds ?? []); + const entries = await secureFs.readdir(featuresDir, { withFileTypes: true }); const featuresWithContext: Feature[] = []; const featuresWithoutContext: Feature[] = []; @@ -263,18 +271,37 @@ export class RecoveryService { logRecoveryWarning(result, `Feature ${entry.name}`, logger); const feature = result.data; if (!feature) continue; - if ( + + // Check if the feature should be resumed: + // 1. Features still in active states (in_progress, pipeline_*) - not yet reconciled + // 2. Features in interrupted state - explicitly marked for resume + // 3. Features that were previously running (from execution state) and are now + // in ready/backlog due to reconciliation resetting their status + const isActiveState = feature.status === 'in_progress' || - (feature.status && feature.status.startsWith('pipeline_')) - ) { - (await this.contextExists(projectPath, feature.id)) - ? featuresWithContext.push(feature) - : featuresWithoutContext.push(feature); + feature.status === 'interrupted' || + (feature.status && feature.status.startsWith('pipeline_')); + const wasReconciledFromRunning = + previouslyRunningIds.has(feature.id) && + (feature.status === 'ready' || feature.status === 'backlog'); + + if (isActiveState || wasReconciledFromRunning) { + if (await this.contextExists(projectPath, feature.id)) { + featuresWithContext.push(feature); + } else { + featuresWithoutContext.push(feature); + } } } } const allInterruptedFeatures = [...featuresWithContext, ...featuresWithoutContext]; if (allInterruptedFeatures.length === 0) return; + + logger.info( + `[resumeInterruptedFeatures] Found ${allInterruptedFeatures.length} feature(s) to resume ` + + `(${previouslyRunningIds.size} from execution state, statuses: ${allInterruptedFeatures.map((f) => `${f.id}=${f.status}`).join(', ')})` + ); + this.eventBus.emitAutoModeEvent('auto_mode_resuming_features', { message: `Resuming ${allInterruptedFeatures.length} interrupted feature(s)`, projectPath, @@ -295,6 +322,10 @@ export class RecoveryService { /* continue */ } } + + // Clear execution state after successful resume to prevent + // re-resuming the same features on subsequent calls + await this.clearExecutionState(projectPath); } catch { /* ignore */ } diff --git a/apps/server/src/services/settings-service.ts b/apps/server/src/services/settings-service.ts index 6ffdd4882..6a3d804eb 100644 --- a/apps/server/src/services/settings-service.ts +++ b/apps/server/src/services/settings-service.ts @@ -729,6 +729,7 @@ export class SettingsService { anthropic: { configured: boolean; masked: string }; google: { configured: boolean; masked: string }; openai: { configured: boolean; masked: string }; + zai: { configured: boolean; masked: string }; }> { const credentials = await this.getCredentials(); @@ -750,6 +751,10 @@ export class SettingsService { configured: !!credentials.apiKeys.openai, masked: maskKey(credentials.apiKeys.openai), }, + zai: { + configured: !!credentials.apiKeys.zai, + masked: maskKey(credentials.apiKeys.zai), + }, }; } @@ -1018,6 +1023,7 @@ export class SettingsService { anthropic: apiKeys.anthropic || '', google: apiKeys.google || '', openai: apiKeys.openai || '', + zai: '', }, }); migratedCredentials = true; diff --git a/apps/server/src/services/stage-files-service.ts b/apps/server/src/services/stage-files-service.ts new file mode 100644 index 000000000..e155b3ee1 --- /dev/null +++ b/apps/server/src/services/stage-files-service.ts @@ -0,0 +1,117 @@ +/** + * stageFilesService - Path validation and git staging/unstaging operations + * + * Extracted from createStageFilesHandler to centralise path canonicalization, + * path-traversal validation, and git invocation so they can be tested and + * reused independently of the HTTP layer. + */ + +import path from 'path'; +import fs from 'fs/promises'; +import { execGitCommand } from '../lib/git.js'; + +/** + * Result returned by `stageFiles` on success. + */ +export interface StageFilesResult { + operation: string; + filesCount: number; +} + +/** + * Error thrown when one or more file paths fail validation (e.g. absolute + * paths, path-traversal attempts, or paths that resolve outside the worktree + * root, or when the worktree path itself does not exist). + * + * Handlers can catch this to return an HTTP 400 response instead of 500. + */ +export class StageFilesValidationError extends Error { + constructor(message: string) { + super(message); + this.name = 'StageFilesValidationError'; + } +} + +/** + * Resolve the canonical path of the worktree root, validate every file path + * against it to prevent path-traversal attacks, and then invoke the + * appropriate git command (`add` or `reset`) to stage or unstage the files. + * + * @param worktreePath - Absolute path to the git worktree root directory. + * @param files - Relative file paths to stage or unstage. + * @param operation - `'stage'` runs `git add`, `'unstage'` runs `git reset HEAD`. + * + * @returns An object containing the operation name and the number of files + * that were staged/unstaged. + * + * @throws {StageFilesValidationError} When `worktreePath` is inaccessible or + * any entry in `files` fails the path-traversal checks. + * @throws {Error} When the underlying git command fails. + */ +export async function stageFiles( + worktreePath: string, + files: string[], + operation: 'stage' | 'unstage' +): Promise { + // Canonicalize the worktree root by resolving symlinks so that + // path-traversal checks are reliable even when symlinks are involved. + let canonicalRoot: string; + try { + canonicalRoot = await fs.realpath(worktreePath); + } catch { + throw new StageFilesValidationError('worktreePath does not exist or is not accessible'); + } + + // Validate and sanitize each file path to prevent path traversal attacks. + // Each file entry is resolved against the canonicalized worktree root and + // must remain within that root directory. + const base = canonicalRoot + path.sep; + const sanitizedFiles: string[] = []; + for (const file of files) { + // Reject empty or whitespace-only paths — path.resolve(canonicalRoot, '') + // returns canonicalRoot itself, so without this guard an empty string would + // pass all subsequent checks and be forwarded to git unchanged. + if (file.trim() === '') { + throw new StageFilesValidationError( + 'Invalid file path (empty or whitespace-only paths not allowed)' + ); + } + // Reject absolute paths + if (path.isAbsolute(file)) { + throw new StageFilesValidationError( + `Invalid file path (absolute paths not allowed): ${file}` + ); + } + // Reject entries containing '..' + if (file.includes('..')) { + throw new StageFilesValidationError( + `Invalid file path (path traversal not allowed): ${file}` + ); + } + // Resolve the file path against the canonicalized worktree root and + // ensure the result stays within the worktree directory. + const resolved = path.resolve(canonicalRoot, file); + if (resolved !== canonicalRoot && !resolved.startsWith(base)) { + throw new StageFilesValidationError( + `Invalid file path (outside worktree directory): ${file}` + ); + } + // Forward only the original relative path to git — git interprets + // paths relative to its working directory (canonicalRoot / worktreePath), + // so we do not need to pass the resolved absolute path. + sanitizedFiles.push(file); + } + + if (operation === 'stage') { + // Stage the specified files + await execGitCommand(['add', '--', ...sanitizedFiles], worktreePath); + } else { + // Unstage the specified files + await execGitCommand(['reset', 'HEAD', '--', ...sanitizedFiles], worktreePath); + } + + return { + operation, + filesCount: sanitizedFiles.length, + }; +} diff --git a/apps/server/src/services/stash-service.ts b/apps/server/src/services/stash-service.ts new file mode 100644 index 000000000..dd0a8737c --- /dev/null +++ b/apps/server/src/services/stash-service.ts @@ -0,0 +1,461 @@ +/** + * StashService - Stash operations without HTTP + * + * Encapsulates stash workflows including: + * - Push (create) stashes with optional message and file selection + * - List all stash entries with metadata and changed files + * - Apply or pop a stash entry with conflict detection + * - Drop (delete) a stash entry + * - Conflict detection from command output and git diff + * - Lifecycle event emission (start, progress, conflicts, success, failure) + * + * Extracted from the worktree stash route handlers to improve organisation + * and testability. Follows the same pattern as pull-service.ts and + * merge-service.ts. + */ + +import { createLogger, getErrorMessage } from '@automaker/utils'; +import type { EventEmitter } from '../lib/events.js'; +import { execGitCommand, execGitCommandWithLockRetry } from '../lib/git.js'; + +const logger = createLogger('StashService'); + +// ============================================================================ +// Types +// ============================================================================ + +export interface StashApplyOptions { + /** When true, remove the stash entry after applying (git stash pop) */ + pop?: boolean; +} + +export interface StashApplyResult { + success: boolean; + error?: string; + applied?: boolean; + hasConflicts?: boolean; + conflictFiles?: string[]; + operation?: 'apply' | 'pop'; + stashIndex?: number; + message?: string; +} + +export interface StashPushResult { + success: boolean; + error?: string; + stashed: boolean; + branch?: string; + message?: string; +} + +export interface StashEntry { + index: number; + message: string; + branch: string; + date: string; + files: string[]; +} + +export interface StashListResult { + success: boolean; + error?: string; + stashes: StashEntry[]; + total: number; +} + +export interface StashDropResult { + success: boolean; + error?: string; + dropped: boolean; + stashIndex?: number; + message?: string; +} + +// ============================================================================ +// Helper Functions +// ============================================================================ + +/** + * Retrieve the list of files with unmerged (conflicted) entries using git diff. + * + * @param worktreePath - Path to the git worktree + * @returns Array of file paths that have unresolved conflicts + */ +export async function getConflictedFiles(worktreePath: string): Promise { + try { + const diffOutput = await execGitCommand( + ['diff', '--name-only', '--diff-filter=U'], + worktreePath + ); + return diffOutput + .trim() + .split('\n') + .filter((f) => f.trim().length > 0); + } catch { + // If we cannot get the file list, return an empty array + return []; + } +} + +/** + * Determine whether command output indicates a merge conflict. + */ +function isConflictOutput(output: string): boolean { + return output.includes('CONFLICT') || output.includes('Merge conflict'); +} + +/** + * Build a conflict result from stash apply/pop, emit events, and return. + * Extracted to avoid duplicating conflict handling in the try and catch paths. + */ +async function handleStashConflicts( + worktreePath: string, + stashIndex: number, + operation: 'apply' | 'pop', + events?: EventEmitter +): Promise { + const conflictFiles = await getConflictedFiles(worktreePath); + + events?.emit('stash:conflicts', { + worktreePath, + stashIndex, + operation, + conflictFiles, + }); + + const result: StashApplyResult = { + success: true, + applied: true, + hasConflicts: true, + conflictFiles, + operation, + stashIndex, + message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} with conflicts. Please resolve the conflicts.`, + }; + + events?.emit('stash:success', { + worktreePath, + stashIndex, + operation, + hasConflicts: true, + conflictFiles, + }); + + return result; +} + +// ============================================================================ +// Main Service Function +// ============================================================================ + +/** + * Apply or pop a stash entry in the given worktree. + * + * The workflow: + * 1. Validate inputs + * 2. Emit stash:start event + * 3. Run `git stash apply` or `git stash pop` + * 4. Emit stash:progress event with raw command output + * 5. Check output for conflict markers; if conflicts found, collect files and + * emit stash:conflicts event + * 6. Emit stash:success or stash:failure depending on outcome + * 7. Return a structured StashApplyResult + * + * @param worktreePath - Absolute path to the git worktree + * @param stashIndex - Zero-based stash index (stash@{N}) + * @param options - Optional flags (pop) + * @returns StashApplyResult with detailed status information + */ +export async function applyOrPop( + worktreePath: string, + stashIndex: number, + options?: StashApplyOptions, + events?: EventEmitter +): Promise { + const operation: 'apply' | 'pop' = options?.pop ? 'pop' : 'apply'; + const stashRef = `stash@{${stashIndex}}`; + + logger.info(`[StashService] ${operation} ${stashRef} in ${worktreePath}`); + + // 1. Emit start event + events?.emit('stash:start', { worktreePath, stashIndex, stashRef, operation }); + + try { + // 2. Run git stash apply / pop + let stdout = ''; + + try { + stdout = await execGitCommand(['stash', operation, stashRef], worktreePath); + } catch (gitError: unknown) { + const err = gitError as { stdout?: string; stderr?: string; message?: string }; + const errStdout = err.stdout || ''; + const errStderr = err.stderr || err.message || ''; + + const combinedOutput = `${errStdout}\n${errStderr}`; + + // 3. Emit progress with raw output + events?.emit('stash:progress', { + worktreePath, + stashIndex, + operation, + output: combinedOutput, + }); + + // 4. Check if the error is a conflict + if (isConflictOutput(combinedOutput)) { + return handleStashConflicts(worktreePath, stashIndex, operation, events); + } + + // 5. Non-conflict git error – re-throw so the outer catch logs and handles it + throw gitError; + } + + // 6. Command succeeded – check stdout for conflict markers (some git versions + // exit 0 even when conflicts occur during apply) + const combinedOutput = stdout; + + events?.emit('stash:progress', { worktreePath, stashIndex, operation, output: combinedOutput }); + + if (isConflictOutput(combinedOutput)) { + return handleStashConflicts(worktreePath, stashIndex, operation, events); + } + + // 7. Clean success + const result: StashApplyResult = { + success: true, + applied: true, + hasConflicts: false, + operation, + stashIndex, + message: `Stash ${operation === 'pop' ? 'popped' : 'applied'} successfully`, + }; + + events?.emit('stash:success', { + worktreePath, + stashIndex, + operation, + hasConflicts: false, + }); + + return result; + } catch (error) { + const errorMessage = getErrorMessage(error); + + logger.error(`Stash ${operation} failed`, { error: getErrorMessage(error) }); + + events?.emit('stash:failure', { + worktreePath, + stashIndex, + operation, + error: errorMessage, + }); + + return { + success: false, + error: errorMessage, + applied: false, + operation, + stashIndex, + }; + } +} + +// ============================================================================ +// Push Stash +// ============================================================================ + +/** + * Stash uncommitted changes (including untracked files) with an optional + * message and optional file selection. + * + * Workflow: + * 1. Check for uncommitted changes via `git status --porcelain` + * 2. If no changes, return early with stashed: false + * 3. Build and run `git stash push --include-untracked [-m message] [-- files]` + * 4. Retrieve the current branch name + * 5. Return a structured StashPushResult + * + * @param worktreePath - Absolute path to the git worktree + * @param options - Optional message and files to selectively stash + * @returns StashPushResult with stash status and branch info + */ +export async function pushStash( + worktreePath: string, + options?: { message?: string; files?: string[] }, + events?: EventEmitter +): Promise { + const message = options?.message; + const files = options?.files; + + logger.info(`[StashService] push stash in ${worktreePath}`); + events?.emit('stash:start', { worktreePath, operation: 'push' }); + + // 1. Check for any changes to stash + const status = await execGitCommand(['status', '--porcelain'], worktreePath); + + if (!status.trim()) { + events?.emit('stash:success', { worktreePath, operation: 'push', stashed: false }); + return { + success: true, + stashed: false, + message: 'No changes to stash', + }; + } + + // 2. Build stash push command args + const args = ['stash', 'push', '--include-untracked']; + if (message && message.trim()) { + args.push('-m', message.trim()); + } + + // If specific files are provided, add them as pathspecs after '--' + if (files && files.length > 0) { + args.push('--'); + args.push(...files); + } + + // 3. Execute stash push (with automatic index.lock cleanup and retry) + await execGitCommandWithLockRetry(args, worktreePath); + + // 4. Get current branch name + const branchOutput = await execGitCommand(['rev-parse', '--abbrev-ref', 'HEAD'], worktreePath); + const branchName = branchOutput.trim(); + + events?.emit('stash:success', { + worktreePath, + operation: 'push', + stashed: true, + branch: branchName, + }); + + return { + success: true, + stashed: true, + branch: branchName, + message: message?.trim() || `WIP on ${branchName}`, + }; +} + +// ============================================================================ +// List Stashes +// ============================================================================ + +/** + * List all stash entries for a worktree with metadata and changed files. + * + * Workflow: + * 1. Run `git stash list` with a custom format to get index, message, and date + * 2. Parse each stash line into a structured StashEntry + * 3. For each entry, fetch the list of files changed via `git stash show` + * 4. Return the full list as a StashListResult + * + * @param worktreePath - Absolute path to the git worktree + * @returns StashListResult with all stash entries and their metadata + */ +export async function listStash(worktreePath: string): Promise { + logger.info(`[StashService] list stashes in ${worktreePath}`); + + // 1. Get stash list with format: index, message, date + // Use %aI (strict ISO 8601) instead of %ai to ensure cross-browser compatibility + const stashOutput = await execGitCommand( + ['stash', 'list', '--format=%gd|||%s|||%aI'], + worktreePath + ); + + if (!stashOutput.trim()) { + return { + success: true, + stashes: [], + total: 0, + }; + } + + const stashLines = stashOutput + .trim() + .split('\n') + .filter((l) => l.trim()); + const stashes: StashEntry[] = []; + + for (const line of stashLines) { + const parts = line.split('|||'); + if (parts.length < 3) continue; + + const refSpec = parts[0].trim(); // e.g., "stash@{0}" + const stashMessage = parts[1].trim(); + const date = parts[2].trim(); + + // Extract index from stash@{N}; skip entries that don't match the expected format + const indexMatch = refSpec.match(/stash@\{(\d+)\}/); + if (!indexMatch) continue; + const index = parseInt(indexMatch[1], 10); + + // Extract branch name from message (format: "WIP on branch: hash message" or "On branch: hash message") + let branch = ''; + const branchMatch = stashMessage.match(/^(?:WIP on|On) ([^:]+):/); + if (branchMatch) { + branch = branchMatch[1]; + } + + // Get list of files in this stash + let files: string[] = []; + try { + const filesOutput = await execGitCommand( + ['stash', 'show', refSpec, '--name-only'], + worktreePath + ); + files = filesOutput + .trim() + .split('\n') + .filter((f) => f.trim()); + } catch { + // Ignore errors getting file list + } + + stashes.push({ + index, + message: stashMessage, + branch, + date, + files, + }); + } + + return { + success: true, + stashes, + total: stashes.length, + }; +} + +// ============================================================================ +// Drop Stash +// ============================================================================ + +/** + * Drop (delete) a stash entry by index. + * + * @param worktreePath - Absolute path to the git worktree + * @param stashIndex - Zero-based stash index (stash@{N}) + * @returns StashDropResult with drop status + */ +export async function dropStash( + worktreePath: string, + stashIndex: number, + events?: EventEmitter +): Promise { + const stashRef = `stash@{${stashIndex}}`; + + logger.info(`[StashService] drop ${stashRef} in ${worktreePath}`); + events?.emit('stash:start', { worktreePath, stashIndex, stashRef, operation: 'drop' }); + + await execGitCommand(['stash', 'drop', stashRef], worktreePath); + + events?.emit('stash:success', { worktreePath, stashIndex, stashRef, operation: 'drop' }); + + return { + success: true, + dropped: true, + stashIndex, + message: `Stash ${stashRef} dropped successfully`, + }; +} diff --git a/apps/server/src/services/worktree-branch-service.ts b/apps/server/src/services/worktree-branch-service.ts new file mode 100644 index 000000000..af6bfedc0 --- /dev/null +++ b/apps/server/src/services/worktree-branch-service.ts @@ -0,0 +1,401 @@ +/** + * WorktreeBranchService - Switch branch operations without HTTP + * + * Handles branch switching with automatic stash/reapply of local changes. + * If there are uncommitted changes, they are stashed before switching and + * reapplied after. If the stash pop results in merge conflicts, returns + * a special response so the UI can create a conflict resolution task. + * + * For remote branches (e.g., "origin/feature"), automatically creates a + * local tracking branch and checks it out. + * + * Also fetches the latest remote refs after switching. + * + * Extracted from the worktree switch-branch route to improve organization + * and testability. Follows the same pattern as pull-service.ts and + * rebase-service.ts. + */ + +import { createLogger, getErrorMessage } from '@automaker/utils'; +import { execGitCommand } from '../lib/git.js'; +import type { EventEmitter } from '../lib/events.js'; +import { hasAnyChanges, stashChanges, popStash, localBranchExists } from './branch-utils.js'; + +const logger = createLogger('WorktreeBranchService'); + +// ============================================================================ +// Types +// ============================================================================ + +export interface SwitchBranchResult { + success: boolean; + error?: string; + result?: { + previousBranch: string; + currentBranch: string; + message: string; + hasConflicts?: boolean; + stashedChanges?: boolean; + }; + /** Set when checkout fails and stash pop produced conflicts during recovery */ + stashPopConflicts?: boolean; + /** Human-readable message when stash pop conflicts occur during error recovery */ + stashPopConflictMessage?: string; +} + +// ============================================================================ +// Local Helpers +// ============================================================================ + +/** Timeout for git fetch operations (30 seconds) */ +const FETCH_TIMEOUT_MS = 30_000; + +/** + * Fetch latest from all remotes (silently, with timeout). + * + * A process-level timeout is enforced via an AbortController so that a + * slow or unresponsive remote does not block the branch-switch flow + * indefinitely. Timeout errors are logged and treated as non-fatal + * (the same as network-unavailable errors) so the rest of the workflow + * continues normally. + */ +async function fetchRemotes(cwd: string): Promise { + const controller = new AbortController(); + const timerId = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS); + + try { + await execGitCommand(['fetch', '--all', '--quiet'], cwd, undefined, controller); + } catch (error) { + if (error instanceof Error && error.message === 'Process aborted') { + // Fetch timed out - log and continue; callers should not be blocked by a slow remote + logger.warn( + `fetchRemotes timed out after ${FETCH_TIMEOUT_MS}ms - continuing without latest remote refs` + ); + } + // Ignore all fetch errors (timeout or otherwise) - we may be offline or the + // remote may be temporarily unavailable. The branch switch itself has + // already succeeded at this point. + } finally { + clearTimeout(timerId); + } +} + +/** + * Parse a remote branch name like "origin/feature-branch" into its parts. + * Splits on the first slash so the remote is the segment before the first '/' + * and the branch is everything after it (preserving any subsequent slashes). + * For example, "origin/feature/my-branch" → { remote: "origin", branch: "feature/my-branch" }. + * Returns null if the input contains no slash. + */ +function parseRemoteBranch(branchName: string): { remote: string; branch: string } | null { + const firstSlash = branchName.indexOf('/'); + if (firstSlash === -1) return null; + return { + remote: branchName.substring(0, firstSlash), + branch: branchName.substring(firstSlash + 1), + }; +} + +/** + * Check if a branch name refers to a remote branch + */ +async function isRemoteBranch(cwd: string, branchName: string): Promise { + try { + const stdout = await execGitCommand(['branch', '-r', '--format=%(refname:short)'], cwd); + const remoteBranches = stdout + .trim() + .split('\n') + .map((b) => b.trim().replace(/^['"]|['"]$/g, '')) + .filter((b) => b); + return remoteBranches.includes(branchName); + } catch (err) { + logger.error('isRemoteBranch: failed to list remote branches — returning false', { + branchName, + cwd, + error: getErrorMessage(err), + }); + return false; + } +} + +// ============================================================================ +// Main Service Function +// ============================================================================ + +/** + * Perform a full branch switch workflow on the given worktree. + * + * The workflow: + * 1. Get current branch name + * 2. Detect remote vs local branch and determine target + * 3. Return early if already on target branch + * 4. Validate branch existence + * 5. Stash local changes if any + * 6. Checkout the target branch + * 7. Fetch latest from remotes + * 8. Reapply stashed changes (detect conflicts) + * 9. Handle error recovery (restore stash if checkout fails) + * + * @param worktreePath - Path to the git worktree + * @param branchName - Branch to switch to (can be local or remote like "origin/feature") + * @param events - Optional event emitter for lifecycle events + * @returns SwitchBranchResult with detailed status information + */ +export async function performSwitchBranch( + worktreePath: string, + branchName: string, + events?: EventEmitter +): Promise { + // Emit start event + events?.emit('switch:start', { worktreePath, branchName }); + + // 1. Get current branch + const currentBranchOutput = await execGitCommand( + ['rev-parse', '--abbrev-ref', 'HEAD'], + worktreePath + ); + const previousBranch = currentBranchOutput.trim(); + + // 2. Determine the actual target branch name for checkout + let targetBranch = branchName; + let isRemote = false; + + // Check if this is a remote branch (e.g., "origin/feature-branch") + let parsedRemote: { remote: string; branch: string } | null = null; + if (await isRemoteBranch(worktreePath, branchName)) { + isRemote = true; + parsedRemote = parseRemoteBranch(branchName); + if (parsedRemote) { + targetBranch = parsedRemote.branch; + } else { + events?.emit('switch:error', { + worktreePath, + branchName, + error: `Failed to parse remote branch name '${branchName}'`, + }); + return { + success: false, + error: `Failed to parse remote branch name '${branchName}'`, + }; + } + } + + // 3. Return early if already on the target branch + if (previousBranch === targetBranch) { + events?.emit('switch:done', { + worktreePath, + previousBranch, + currentBranch: targetBranch, + alreadyOnBranch: true, + }); + return { + success: true, + result: { + previousBranch, + currentBranch: targetBranch, + message: `Already on branch '${targetBranch}'`, + }, + }; + } + + // 4. Check if target branch exists as a local branch + if (!isRemote) { + if (!(await localBranchExists(worktreePath, branchName))) { + events?.emit('switch:error', { + worktreePath, + branchName, + error: `Branch '${branchName}' does not exist`, + }); + return { + success: false, + error: `Branch '${branchName}' does not exist`, + }; + } + } + + // 5. Stash local changes if any exist + const hadChanges = await hasAnyChanges(worktreePath, { excludeWorktreePaths: true }); + let didStash = false; + + if (hadChanges) { + events?.emit('switch:stash', { + worktreePath, + previousBranch, + targetBranch, + action: 'push', + }); + const stashMessage = `automaker-branch-switch: ${previousBranch} → ${targetBranch}`; + try { + didStash = await stashChanges(worktreePath, stashMessage, true); + } catch (stashError) { + const stashErrorMsg = getErrorMessage(stashError); + events?.emit('switch:error', { + worktreePath, + branchName, + error: `Failed to stash local changes: ${stashErrorMsg}`, + }); + return { + success: false, + error: `Failed to stash local changes before switching branches: ${stashErrorMsg}`, + }; + } + } + + try { + // 6. Switch to the target branch + events?.emit('switch:checkout', { + worktreePath, + targetBranch, + isRemote, + previousBranch, + }); + + if (isRemote) { + if (!parsedRemote) { + throw new Error(`Failed to parse remote branch name '${branchName}'`); + } + if (await localBranchExists(worktreePath, parsedRemote.branch)) { + // Local branch exists, just checkout + await execGitCommand(['checkout', parsedRemote.branch], worktreePath); + } else { + // Create local tracking branch from remote + await execGitCommand(['checkout', '-b', parsedRemote.branch, branchName], worktreePath); + } + } else { + await execGitCommand(['checkout', targetBranch], worktreePath); + } + + // 7. Fetch latest from remotes after switching + await fetchRemotes(worktreePath); + + // 8. Reapply stashed changes if we stashed earlier + let hasConflicts = false; + let conflictMessage = ''; + let stashReapplied = false; + + if (didStash) { + events?.emit('switch:pop', { + worktreePath, + targetBranch, + action: 'pop', + }); + + const popResult = await popStash(worktreePath); + hasConflicts = popResult.hasConflicts; + if (popResult.hasConflicts) { + conflictMessage = `Switched to branch '${targetBranch}' but merge conflicts occurred when reapplying your local changes. Please resolve the conflicts.`; + } else if (!popResult.success) { + // Stash pop failed for a non-conflict reason - the stash is still there + conflictMessage = `Switched to branch '${targetBranch}' but failed to reapply stashed changes: ${popResult.error}. Your changes are still in the stash.`; + } else { + stashReapplied = true; + } + } + + if (hasConflicts) { + events?.emit('switch:done', { + worktreePath, + previousBranch, + currentBranch: targetBranch, + hasConflicts: true, + }); + return { + success: true, + result: { + previousBranch, + currentBranch: targetBranch, + message: conflictMessage, + hasConflicts: true, + stashedChanges: true, + }, + }; + } else if (didStash && !stashReapplied) { + // Stash pop failed for a non-conflict reason — stash is still present + events?.emit('switch:done', { + worktreePath, + previousBranch, + currentBranch: targetBranch, + stashPopFailed: true, + }); + return { + success: true, + result: { + previousBranch, + currentBranch: targetBranch, + message: conflictMessage, + hasConflicts: false, + stashedChanges: true, + }, + }; + } else { + const stashNote = stashReapplied ? ' (local changes stashed and reapplied)' : ''; + events?.emit('switch:done', { + worktreePath, + previousBranch, + currentBranch: targetBranch, + stashReapplied, + }); + return { + success: true, + result: { + previousBranch, + currentBranch: targetBranch, + message: `Switched to branch '${targetBranch}'${stashNote}`, + hasConflicts: false, + stashedChanges: stashReapplied, + }, + }; + } + } catch (checkoutError) { + // 9. If checkout failed and we stashed, try to restore the stash + if (didStash) { + const popResult = await popStash(worktreePath); + if (popResult.hasConflicts) { + // Stash pop itself produced merge conflicts — the working tree is now in a + // conflicted state even though the checkout failed. Surface this clearly so + // the caller can prompt the user (or AI) to resolve conflicts rather than + // simply retrying the branch switch. + const checkoutErrorMsg = getErrorMessage(checkoutError); + events?.emit('switch:error', { + worktreePath, + branchName, + error: checkoutErrorMsg, + stashPopConflicts: true, + }); + return { + success: false, + error: checkoutErrorMsg, + stashPopConflicts: true, + stashPopConflictMessage: + 'Stash pop resulted in conflicts: your stashed changes were partially reapplied ' + + 'but produced merge conflicts. Please resolve the conflicts before retrying the branch switch.', + }; + } else if (!popResult.success) { + // Stash pop failed for a non-conflict reason; the stash entry is still intact. + // Include this detail alongside the original checkout error. + const checkoutErrorMsg = getErrorMessage(checkoutError); + const combinedMessage = + `${checkoutErrorMsg}. Additionally, restoring your stashed changes failed: ` + + `${popResult.error ?? 'unknown error'} — your changes are still saved in the stash.`; + events?.emit('switch:error', { + worktreePath, + branchName, + error: combinedMessage, + }); + return { + success: false, + error: combinedMessage, + stashPopConflicts: false, + }; + } + // popResult.success === true: stash was cleanly restored, re-throw the checkout error + } + const checkoutErrorMsg = getErrorMessage(checkoutError); + events?.emit('switch:error', { + worktreePath, + branchName, + error: checkoutErrorMsg, + }); + throw checkoutError; + } +} diff --git a/apps/server/src/services/worktree-service.ts b/apps/server/src/services/worktree-service.ts new file mode 100644 index 000000000..8490c1082 --- /dev/null +++ b/apps/server/src/services/worktree-service.ts @@ -0,0 +1,127 @@ +/** + * WorktreeService - File-system operations for git worktrees + * + * Extracted from the worktree create route to centralise file-copy logic, + * surface errors through an EventEmitter instead of swallowing them, and + * make the behaviour testable in isolation. + */ + +import path from 'path'; +import fs from 'fs/promises'; +import type { EventEmitter } from '../lib/events.js'; +import type { SettingsService } from './settings-service.js'; + +/** + * Error thrown when one or more file copy operations fail during + * `copyConfiguredFiles`. The caller can inspect `failures` for details. + */ +export class CopyFilesError extends Error { + constructor(public readonly failures: Array<{ path: string; error: string }>) { + super(`Failed to copy ${failures.length} file(s): ${failures.map((f) => f.path).join(', ')}`); + this.name = 'CopyFilesError'; + } +} + +/** + * WorktreeService encapsulates file-system operations that run against + * git worktrees (e.g. copying project-configured files into a new worktree). + * + * All operations emit typed events so the frontend can stream progress to the + * user. Errors are collected and surfaced to the caller rather than silently + * swallowed. + */ +export class WorktreeService { + /** + * Copy files / directories listed in the project's `worktreeCopyFiles` + * setting from `projectPath` into `worktreePath`. + * + * Security: paths containing `..` segments or absolute paths are rejected. + * + * Events emitted via `emitter`: + * - `worktree:copy-files:copied` – a file or directory was successfully copied + * - `worktree:copy-files:skipped` – a source file was not found (ENOENT) + * - `worktree:copy-files:failed` – an unexpected error occurred copying a file + * + * @throws {CopyFilesError} if any copy operation fails for a reason other + * than ENOENT (missing source file). + */ + async copyConfiguredFiles( + projectPath: string, + worktreePath: string, + settingsService: SettingsService | undefined, + emitter: EventEmitter + ): Promise { + if (!settingsService) return; + + const projectSettings = await settingsService.getProjectSettings(projectPath); + const copyFiles = projectSettings.worktreeCopyFiles; + + if (!copyFiles || copyFiles.length === 0) return; + + const failures: Array<{ path: string; error: string }> = []; + + for (const relativePath of copyFiles) { + // Security: prevent path traversal + const normalized = path.normalize(relativePath); + if (normalized === '' || normalized === '.') { + const reason = 'Suspicious path rejected (empty or current-dir)'; + emitter.emit('worktree:copy-files:skipped', { + path: relativePath, + reason, + }); + continue; + } + if (normalized.startsWith('..') || path.isAbsolute(normalized)) { + const reason = 'Suspicious path rejected (traversal or absolute)'; + emitter.emit('worktree:copy-files:skipped', { + path: relativePath, + reason, + }); + continue; + } + + const sourcePath = path.join(projectPath, normalized); + const destPath = path.join(worktreePath, normalized); + + try { + // Check if source exists + const stat = await fs.stat(sourcePath); + + // Ensure destination directory exists + const destDir = path.dirname(destPath); + await fs.mkdir(destDir, { recursive: true }); + + if (stat.isDirectory()) { + // Recursively copy directory + await fs.cp(sourcePath, destPath, { recursive: true, force: true }); + } else { + // Copy single file + await fs.copyFile(sourcePath, destPath); + } + + emitter.emit('worktree:copy-files:copied', { + path: normalized, + type: stat.isDirectory() ? 'directory' : 'file', + }); + } catch (err) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') { + emitter.emit('worktree:copy-files:skipped', { + path: normalized, + reason: 'File not found in project root', + }); + } else { + const errorMessage = err instanceof Error ? err.message : String(err); + emitter.emit('worktree:copy-files:failed', { + path: normalized, + error: errorMessage, + }); + failures.push({ path: normalized, error: errorMessage }); + } + } + } + + if (failures.length > 0) { + throw new CopyFilesError(failures); + } + } +} diff --git a/apps/server/src/services/zai-usage-service.ts b/apps/server/src/services/zai-usage-service.ts new file mode 100644 index 000000000..5a9d4dd86 --- /dev/null +++ b/apps/server/src/services/zai-usage-service.ts @@ -0,0 +1,582 @@ +import { createLogger } from '@automaker/utils'; +import { createEventEmitter } from '../lib/events.js'; +import type { SettingsService } from './settings-service.js'; + +const logger = createLogger('ZaiUsage'); + +/** Default timeout for fetch requests in milliseconds */ +const FETCH_TIMEOUT_MS = 10_000; + +/** + * z.ai quota limit entry from the API + */ +export interface ZaiQuotaLimit { + limitType: 'TOKENS_LIMIT' | 'TIME_LIMIT' | string; + limit: number; + used: number; + remaining: number; + usedPercent: number; + nextResetTime: number; // epoch milliseconds +} + +/** + * z.ai usage details by model (for MCP tracking) + */ +export interface ZaiUsageDetail { + modelId: string; + used: number; + limit: number; +} + +/** + * z.ai plan types + */ +export type ZaiPlanType = 'free' | 'basic' | 'standard' | 'professional' | 'enterprise' | 'unknown'; + +/** + * z.ai usage data structure + */ +export interface ZaiUsageData { + quotaLimits: { + tokens?: ZaiQuotaLimit; + mcp?: ZaiQuotaLimit; + planType: ZaiPlanType; + } | null; + usageDetails?: ZaiUsageDetail[]; + lastUpdated: string; +} + +/** + * z.ai API limit entry - supports multiple field naming conventions + */ +interface ZaiApiLimit { + // Type field (z.ai uses 'type', others might use 'limitType') + type?: string; + limitType?: string; + // Limit value (z.ai uses 'usage' for total limit, others might use 'limit') + usage?: number; + limit?: number; + // Used value (z.ai uses 'currentValue', others might use 'used') + currentValue?: number; + used?: number; + // Remaining + remaining?: number; + // Percentage (z.ai uses 'percentage', others might use 'usedPercent') + percentage?: number; + usedPercent?: number; + // Reset time + nextResetTime?: number; + // Additional z.ai fields + unit?: number; + number?: number; + usageDetails?: Array<{ modelCode: string; usage: number }>; +} + +/** + * z.ai API response structure + * Flexible to handle various possible response formats + */ +interface ZaiApiResponse { + code?: number; + success?: boolean; + data?: { + limits?: ZaiApiLimit[]; + // Alternative: limits might be an object instead of array + tokensLimit?: { + limit: number; + used: number; + remaining?: number; + usedPercent?: number; + nextResetTime?: number; + }; + timeLimit?: { + limit: number; + used: number; + remaining?: number; + usedPercent?: number; + nextResetTime?: number; + }; + // Quota-style fields + quota?: number; + quotaUsed?: number; + quotaRemaining?: number; + planName?: string; + plan?: string; + plan_type?: string; + packageName?: string; + usageDetails?: Array<{ + modelId: string; + used: number; + limit: number; + }>; + }; + // Root-level alternatives + limits?: ZaiApiLimit[]; + quota?: number; + quotaUsed?: number; + message?: string; +} + +/** Result from configure method */ +interface ConfigureResult { + success: boolean; + message: string; + isAvailable: boolean; +} + +/** Result from verifyApiKey method */ +interface VerifyResult { + success: boolean; + authenticated: boolean; + message?: string; + error?: string; +} + +/** + * z.ai Usage Service + * + * Fetches usage quota data from the z.ai API. + * Uses API token authentication stored via environment variable or settings. + */ +export class ZaiUsageService { + private apiToken: string | null = null; + private apiHost: string = 'https://api.z.ai'; + + /** + * Set the API token for authentication + */ + setApiToken(token: string): void { + this.apiToken = token; + logger.info('[setApiToken] API token configured'); + } + + /** + * Get the current API token + */ + getApiToken(): string | null { + // Priority: 1. Instance token, 2. Environment variable + return this.apiToken || process.env.Z_AI_API_KEY || null; + } + + /** + * Set the API host (for BigModel CN region support) + */ + setApiHost(host: string): void { + this.apiHost = host.startsWith('http') ? host : `https://${host}`; + logger.info(`[setApiHost] API host set to: ${this.apiHost}`); + } + + /** + * Get the API host + */ + getApiHost(): string { + // Priority: 1. Instance host, 2. Z_AI_API_HOST env, 3. Default + if (process.env.Z_AI_API_HOST) { + const envHost = process.env.Z_AI_API_HOST.trim(); + return envHost.startsWith('http') ? envHost : `https://${envHost}`; + } + return this.apiHost; + } + + /** + * Check if z.ai API is available (has token configured) + */ + isAvailable(): boolean { + const token = this.getApiToken(); + return Boolean(token && token.length > 0); + } + + /** + * Configure z.ai API token and host. + * Persists the token via settingsService and updates in-memory state. + */ + async configure( + options: { apiToken?: string; apiHost?: string }, + settingsService: SettingsService + ): Promise { + const emitter = createEventEmitter(); + + if (options.apiToken !== undefined) { + // Set in-memory token + this.setApiToken(options.apiToken || ''); + + // Persist to credentials + try { + await settingsService.updateCredentials({ + apiKeys: { zai: options.apiToken || '' }, + } as Parameters[0]); + logger.info('[configure] Saved z.ai API key to credentials'); + } catch (persistError) { + logger.error('[configure] Failed to persist z.ai API key:', persistError); + } + } + + if (options.apiHost) { + this.setApiHost(options.apiHost); + } + + const result: ConfigureResult = { + success: true, + message: 'z.ai configuration updated', + isAvailable: this.isAvailable(), + }; + + emitter.emit('notification:created', { + type: 'zai.configured', + success: result.success, + isAvailable: result.isAvailable, + }); + + return result; + } + + /** + * Verify an API key without storing it. + * Makes a test request to the z.ai quota URL with the given key. + */ + async verifyApiKey(apiKey: string | undefined): Promise { + const emitter = createEventEmitter(); + + if (!apiKey || typeof apiKey !== 'string' || apiKey.trim().length === 0) { + return { + success: false, + authenticated: false, + error: 'Please provide an API key to test.', + }; + } + + const quotaUrl = + process.env.Z_AI_QUOTA_URL || `${this.getApiHost()}/api/monitor/usage/quota/limit`; + + logger.info(`[verify] Testing API key against: ${quotaUrl}`); + + try { + const response = await fetch(quotaUrl, { + method: 'GET', + headers: { + Authorization: `Bearer ${apiKey.trim()}`, + Accept: 'application/json', + }, + signal: AbortSignal.timeout(FETCH_TIMEOUT_MS), + }); + + let result: VerifyResult; + + if (response.ok) { + result = { + success: true, + authenticated: true, + message: 'Connection successful! z.ai API responded.', + }; + } else if (response.status === 401 || response.status === 403) { + result = { + success: false, + authenticated: false, + error: 'Invalid API key. Please check your key and try again.', + }; + } else { + result = { + success: false, + authenticated: false, + error: `API request failed: ${response.status} ${response.statusText}`, + }; + } + + emitter.emit('notification:created', { + type: 'zai.verify.result', + success: result.success, + authenticated: result.authenticated, + }); + + return result; + } catch (error) { + // Handle abort/timeout errors specifically + if (error instanceof Error && error.name === 'AbortError') { + const result: VerifyResult = { + success: false, + authenticated: false, + error: 'Request timed out. The z.ai API did not respond in time.', + }; + emitter.emit('notification:created', { + type: 'zai.verify.result', + success: false, + error: 'timeout', + }); + return result; + } + + const message = error instanceof Error ? error.message : 'Unknown error'; + logger.error('Error verifying z.ai API key:', error); + + emitter.emit('notification:created', { + type: 'zai.verify.result', + success: false, + error: message, + }); + + return { + success: false, + authenticated: false, + error: `Network error: ${message}`, + }; + } + } + + /** + * Fetch usage data from z.ai API + */ + async fetchUsageData(): Promise { + logger.info('[fetchUsageData] Starting...'); + const emitter = createEventEmitter(); + + emitter.emit('notification:created', { type: 'zai.usage.start' }); + + const token = this.getApiToken(); + if (!token) { + logger.error('[fetchUsageData] No API token configured'); + const error = new Error( + 'z.ai API token not configured. Set Z_AI_API_KEY environment variable.' + ); + emitter.emit('notification:created', { + type: 'zai.usage.error', + error: error.message, + }); + throw error; + } + + const quotaUrl = + process.env.Z_AI_QUOTA_URL || `${this.getApiHost()}/api/monitor/usage/quota/limit`; + + logger.info(`[fetchUsageData] Fetching from: ${quotaUrl}`); + + try { + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS); + + try { + const response = await fetch(quotaUrl, { + method: 'GET', + headers: { + Authorization: `Bearer ${token}`, + Accept: 'application/json', + }, + signal: controller.signal, + }); + + clearTimeout(timeoutId); + + if (!response.ok) { + logger.error(`[fetchUsageData] HTTP ${response.status}: ${response.statusText}`); + throw new Error(`z.ai API request failed: ${response.status} ${response.statusText}`); + } + + const data = (await response.json()) as unknown as ZaiApiResponse; + logger.info('[fetchUsageData] Response received:', JSON.stringify(data, null, 2)); + + const result = this.parseApiResponse(data); + + emitter.emit('notification:created', { + type: 'zai.usage.success', + data: result, + }); + + return result; + } finally { + clearTimeout(timeoutId); + } + } catch (error) { + // Handle abort/timeout errors + if (error instanceof Error && error.name === 'AbortError') { + const timeoutError = new Error(`z.ai API request timed out after ${FETCH_TIMEOUT_MS}ms`); + emitter.emit('notification:created', { + type: 'zai.usage.error', + error: timeoutError.message, + }); + throw timeoutError; + } + + if (error instanceof Error && error.message.includes('z.ai API')) { + emitter.emit('notification:created', { + type: 'zai.usage.error', + error: error.message, + }); + throw error; + } + + logger.error('[fetchUsageData] Failed to fetch:', error); + const fetchError = new Error( + `Failed to fetch z.ai usage data: ${error instanceof Error ? error.message : String(error)}` + ); + emitter.emit('notification:created', { + type: 'zai.usage.error', + error: fetchError.message, + }); + throw fetchError; + } + } + + /** + * Parse the z.ai API response into our data structure + * Handles multiple possible response formats from z.ai API + */ + private parseApiResponse(response: ZaiApiResponse): ZaiUsageData { + const result: ZaiUsageData = { + quotaLimits: { + planType: 'unknown', + }, + lastUpdated: new Date().toISOString(), + }; + + logger.info('[parseApiResponse] Raw response:', JSON.stringify(response, null, 2)); + + // Try to find data - could be in response.data or at root level + let data = response.data; + + // Check for root-level limits array + if (!data && response.limits) { + logger.info('[parseApiResponse] Found limits at root level'); + data = { limits: response.limits }; + } + + // Check for root-level quota fields + if (!data && (response.quota !== undefined || response.quotaUsed !== undefined)) { + logger.info('[parseApiResponse] Found quota fields at root level'); + data = { quota: response.quota, quotaUsed: response.quotaUsed }; + } + + if (!data) { + logger.warn('[parseApiResponse] No data found in response'); + return result; + } + + logger.info('[parseApiResponse] Data keys:', Object.keys(data)); + + // Parse plan type from various possible field names + const planName = data.planName || data.plan || data.plan_type || data.packageName; + + if (planName) { + const normalizedPlan = String(planName).toLowerCase(); + if (['free', 'basic', 'standard', 'professional', 'enterprise'].includes(normalizedPlan)) { + result.quotaLimits!.planType = normalizedPlan as ZaiPlanType; + } + logger.info(`[parseApiResponse] Plan type: ${result.quotaLimits!.planType}`); + } + + // Parse quota limits from array format + if (data.limits && Array.isArray(data.limits)) { + logger.info('[parseApiResponse] Parsing limits array with', data.limits.length, 'entries'); + for (const limit of data.limits) { + logger.info('[parseApiResponse] Processing limit:', JSON.stringify(limit)); + + // Handle different field naming conventions from z.ai API: + // - 'usage' is the total limit, 'currentValue' is the used amount + // - OR 'limit' is the total limit, 'used' is the used amount + const limitVal = limit.usage ?? limit.limit ?? 0; + const usedVal = limit.currentValue ?? limit.used ?? 0; + + // Get percentage from 'percentage' or 'usedPercent' field, or calculate it + const apiPercent = limit.percentage ?? limit.usedPercent; + const calculatedPercent = limitVal > 0 ? (usedVal / limitVal) * 100 : 0; + const usedPercent = + apiPercent !== undefined && apiPercent > 0 ? apiPercent : calculatedPercent; + + // Get limit type from 'type' or 'limitType' field + const rawLimitType = limit.type ?? limit.limitType ?? ''; + + const quotaLimit: ZaiQuotaLimit = { + limitType: rawLimitType || 'TOKENS_LIMIT', + limit: limitVal, + used: usedVal, + remaining: limit.remaining ?? limitVal - usedVal, + usedPercent, + nextResetTime: limit.nextResetTime ?? 0, + }; + + // Match various possible limitType values + const limitType = String(rawLimitType).toUpperCase(); + if (limitType.includes('TOKEN') || limitType === 'TOKENS_LIMIT') { + result.quotaLimits!.tokens = quotaLimit; + logger.info( + `[parseApiResponse] Tokens: ${quotaLimit.used}/${quotaLimit.limit} (${quotaLimit.usedPercent.toFixed(1)}%)` + ); + } else if (limitType.includes('TIME') || limitType === 'TIME_LIMIT') { + result.quotaLimits!.mcp = quotaLimit; + logger.info( + `[parseApiResponse] MCP: ${quotaLimit.used}/${quotaLimit.limit} (${quotaLimit.usedPercent.toFixed(1)}%)` + ); + } else { + // If limitType is unknown, use as tokens by default (first one) + if (!result.quotaLimits!.tokens) { + quotaLimit.limitType = 'TOKENS_LIMIT'; + result.quotaLimits!.tokens = quotaLimit; + logger.info(`[parseApiResponse] Unknown limit type '${rawLimitType}', using as tokens`); + } + } + } + } + + // Parse alternative object-style limits + if (data.tokensLimit) { + const t = data.tokensLimit; + const limitVal = t.limit ?? 0; + const usedVal = t.used ?? 0; + const calculatedPercent = limitVal > 0 ? (usedVal / limitVal) * 100 : 0; + result.quotaLimits!.tokens = { + limitType: 'TOKENS_LIMIT', + limit: limitVal, + used: usedVal, + remaining: t.remaining ?? limitVal - usedVal, + usedPercent: + t.usedPercent !== undefined && t.usedPercent > 0 ? t.usedPercent : calculatedPercent, + nextResetTime: t.nextResetTime ?? 0, + }; + logger.info('[parseApiResponse] Parsed tokensLimit object'); + } + + if (data.timeLimit) { + const t = data.timeLimit; + const limitVal = t.limit ?? 0; + const usedVal = t.used ?? 0; + const calculatedPercent = limitVal > 0 ? (usedVal / limitVal) * 100 : 0; + result.quotaLimits!.mcp = { + limitType: 'TIME_LIMIT', + limit: limitVal, + used: usedVal, + remaining: t.remaining ?? limitVal - usedVal, + usedPercent: + t.usedPercent !== undefined && t.usedPercent > 0 ? t.usedPercent : calculatedPercent, + nextResetTime: t.nextResetTime ?? 0, + }; + logger.info('[parseApiResponse] Parsed timeLimit object'); + } + + // Parse simple quota/quotaUsed format as tokens + if (data.quota !== undefined && data.quotaUsed !== undefined && !result.quotaLimits!.tokens) { + const limitVal = Number(data.quota) || 0; + const usedVal = Number(data.quotaUsed) || 0; + result.quotaLimits!.tokens = { + limitType: 'TOKENS_LIMIT', + limit: limitVal, + used: usedVal, + remaining: + data.quotaRemaining !== undefined ? Number(data.quotaRemaining) : limitVal - usedVal, + usedPercent: limitVal > 0 ? (usedVal / limitVal) * 100 : 0, + nextResetTime: 0, + }; + logger.info('[parseApiResponse] Parsed simple quota format'); + } + + // Parse usage details (MCP tracking) + if (data.usageDetails && Array.isArray(data.usageDetails)) { + result.usageDetails = data.usageDetails.map((detail) => ({ + modelId: detail.modelId, + used: detail.used, + limit: detail.limit, + })); + logger.info(`[parseApiResponse] Usage details for ${result.usageDetails.length} models`); + } + + logger.info('[parseApiResponse] Final result:', JSON.stringify(result, null, 2)); + return result; + } +} diff --git a/apps/server/src/tests/cli-integration.test.ts b/apps/server/src/tests/cli-integration.test.ts index 7e84eb54b..695e8ea06 100644 --- a/apps/server/src/tests/cli-integration.test.ts +++ b/apps/server/src/tests/cli-integration.test.ts @@ -5,7 +5,7 @@ * across all providers (Claude, Codex, Cursor) */ -import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { describe, it, expect } from 'vitest'; import { detectCli, detectAllCLis, @@ -64,7 +64,7 @@ describe('CLI Detection Framework', () => { }); it('should handle unsupported platform', () => { - const instructions = getInstallInstructions('claude', 'unknown-platform' as any); + const instructions = getInstallInstructions('claude', 'unknown-platform' as NodeJS.Platform); expect(instructions).toContain('No installation instructions available'); }); }); @@ -270,7 +270,7 @@ describe('Error Recovery Tests', () => { expect(results).toHaveProperty('cursor'); // Should provide error information for failures - Object.entries(results).forEach(([provider, result]) => { + Object.entries(results).forEach(([_provider, result]) => { if (!result.detected && result.issues.length > 0) { expect(result.issues.length).toBeGreaterThan(0); expect(result.issues[0]).toBeTruthy(); @@ -339,15 +339,17 @@ describe('Performance Tests', () => { // Edge Cases describe('Edge Cases', () => { it('should handle empty CLI names', async () => { - await expect(detectCli('' as any)).rejects.toThrow(); + await expect(detectCli('' as unknown as Parameters[0])).rejects.toThrow(); }); it('should handle null CLI names', async () => { - await expect(detectCli(null as any)).rejects.toThrow(); + await expect(detectCli(null as unknown as Parameters[0])).rejects.toThrow(); }); it('should handle undefined CLI names', async () => { - await expect(detectCli(undefined as any)).rejects.toThrow(); + await expect( + detectCli(undefined as unknown as Parameters[0]) + ).rejects.toThrow(); }); it('should handle malformed error objects', () => { diff --git a/apps/server/test/git-log-parser.test.js b/apps/server/test/git-log-parser.test.js new file mode 100644 index 000000000..c81c49584 --- /dev/null +++ b/apps/server/test/git-log-parser.test.js @@ -0,0 +1,196 @@ +import { describe, it, expect } from 'vitest'; +import { parseGitLogOutput } from '../src/lib/git-log-parser.js'; + +// Mock data: fields within each commit are newline-separated, +// commits are NUL-separated (matching the parser contract). +const mockGitOutput = [ + 'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body', + 'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message', + 'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body', +].join('\0'); + +// Mock data where commit bodies contain ---END--- markers +const mockOutputWithEndMarker = [ + 'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body\n---END--- is in this message', + 'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message', + 'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body', +].join('\0'); + +// Single-commit mock: fields newline-separated, no trailing NUL needed +const singleCommitOutput = + 'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSingle commit\nSingle commit body'; + +describe('parseGitLogOutput', () => { + describe('normal parsing (three commits)', () => { + it('returns the correct number of commits', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits.length).toBe(3); + }); + + it('parses the first commit fields correctly', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234'); + expect(commits[0].shortHash).toBe('a1b2c3'); + expect(commits[0].author).toBe('John Doe'); + expect(commits[0].authorEmail).toBe('john@example.com'); + expect(commits[0].date).toBe('2023-01-01T12:00:00Z'); + expect(commits[0].subject).toBe('Initial commit'); + expect(commits[0].body).toBe('This is the commit body'); + }); + + it('parses the second commit fields correctly', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits[1].hash).toBe('e5f6g7h8i9j0klmnoprstuv'); + expect(commits[1].shortHash).toBe('e5f6g7'); + expect(commits[1].author).toBe('Jane Smith'); + expect(commits[1].subject).toBe('Fix bug'); + expect(commits[1].body).toMatch(/---END---/); + }); + + it('parses the third commit fields correctly', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits[2].hash).toBe('q1w2e3r4t5y6u7i8o9p0asdfghjkl'); + expect(commits[2].shortHash).toBe('q1w2e3'); + expect(commits[2].author).toBe('Bob Johnson'); + expect(commits[2].subject).toBe('Another commit'); + expect(commits[2].body).toBe('Empty body'); + }); + }); + + describe('parsing with ---END--- in commit messages', () => { + it('returns the correct number of commits', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits.length).toBe(3); + }); + + it('preserves ---END--- text in the body of the first commit', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits[0].subject).toBe('Initial commit'); + expect(commits[0].body).toMatch(/---END---/); + }); + + it('preserves ---END--- text in the body of the second commit', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits[1].subject).toBe('Fix bug'); + expect(commits[1].body).toMatch(/---END---/); + }); + + it('parses the third commit without ---END--- interference', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits[2].subject).toBe('Another commit'); + expect(commits[2].body).toBe('Empty body'); + }); + }); + + describe('empty output', () => { + it('returns an empty array for an empty string', () => { + const commits = parseGitLogOutput(''); + expect(commits).toEqual([]); + expect(commits.length).toBe(0); + }); + }); + + describe('single-commit output', () => { + it('returns exactly one commit', () => { + const commits = parseGitLogOutput(singleCommitOutput); + expect(commits.length).toBe(1); + }); + + it('parses the single commit fields correctly', () => { + const commits = parseGitLogOutput(singleCommitOutput); + expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234'); + expect(commits[0].shortHash).toBe('a1b2c3'); + expect(commits[0].author).toBe('John Doe'); + expect(commits[0].authorEmail).toBe('john@example.com'); + expect(commits[0].date).toBe('2023-01-01T12:00:00Z'); + expect(commits[0].subject).toBe('Single commit'); + expect(commits[0].body).toBe('Single commit body'); + }); + }); + + describe('multi-line commit body', () => { + // Test vector from test-proper-nul-format.js: commit with a 3-line body + const multiLineBodyOutput = + [ + 'abc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is a normal commit body', + 'def456\ndef4\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in this message', + 'ghi789\nghi7\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nThis body has multiple lines\nSecond line\nThird line', + ].join('\0') + '\0'; + + it('returns 3 commits', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits.length).toBe(3); + }); + + it('parses the first commit correctly', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits[0].hash).toBe('abc123'); + expect(commits[0].shortHash).toBe('abc1'); + expect(commits[0].author).toBe('John Doe'); + expect(commits[0].authorEmail).toBe('john@example.com'); + expect(commits[0].date).toBe('2023-01-01T12:00:00Z'); + expect(commits[0].subject).toBe('Initial commit'); + expect(commits[0].body).toBe('This is a normal commit body'); + }); + + it('parses the second commit with ---END--- in body correctly', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits[1].hash).toBe('def456'); + expect(commits[1].shortHash).toBe('def4'); + expect(commits[1].author).toBe('Jane Smith'); + expect(commits[1].subject).toBe('Fix bug'); + expect(commits[1].body).toContain('---END---'); + }); + + it('parses the third commit with a multi-line body correctly', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits[2].hash).toBe('ghi789'); + expect(commits[2].shortHash).toBe('ghi7'); + expect(commits[2].author).toBe('Bob Johnson'); + expect(commits[2].subject).toBe('Another commit'); + expect(commits[2].body).toBe('This body has multiple lines\nSecond line\nThird line'); + }); + }); + + describe('commit with empty body (trailing blank lines after subject)', () => { + // Test vector from test-proper-nul-format.js: empty body commit + const emptyBodyOutput = + 'empty123\nempty1\nAlice Brown\nalice@example.com\n2023-01-04T12:00:00Z\nEmpty body commit\n\n\0'; + + it('returns 1 commit', () => { + const commits = parseGitLogOutput(emptyBodyOutput); + expect(commits.length).toBe(1); + }); + + it('parses the commit subject correctly', () => { + const commits = parseGitLogOutput(emptyBodyOutput); + expect(commits[0].hash).toBe('empty123'); + expect(commits[0].shortHash).toBe('empty1'); + expect(commits[0].author).toBe('Alice Brown'); + expect(commits[0].subject).toBe('Empty body commit'); + }); + + it('produces an empty body string when only blank lines follow the subject', () => { + const commits = parseGitLogOutput(emptyBodyOutput); + expect(commits[0].body).toBe(''); + }); + }); + + describe('leading empty lines in a commit block', () => { + // Blocks that start with blank lines before the hash field + const outputWithLeadingBlanks = + '\n\nabc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSubject here\nBody here'; + + it('returns 1 commit despite leading blank lines', () => { + const commits = parseGitLogOutput(outputWithLeadingBlanks); + expect(commits.length).toBe(1); + }); + + it('parses the commit fields correctly when block has leading empty lines', () => { + const commits = parseGitLogOutput(outputWithLeadingBlanks); + expect(commits[0].hash).toBe('abc123'); + expect(commits[0].subject).toBe('Subject here'); + expect(commits[0].body).toBe('Body here'); + }); + }); +}); diff --git a/apps/server/tests/unit/lib/git-log-parser.test.ts b/apps/server/tests/unit/lib/git-log-parser.test.ts new file mode 100644 index 000000000..53c5342c9 --- /dev/null +++ b/apps/server/tests/unit/lib/git-log-parser.test.ts @@ -0,0 +1,196 @@ +import { describe, it, expect } from 'vitest'; +import { parseGitLogOutput } from '../../../src/lib/git-log-parser.js'; + +// Mock data: fields within each commit are newline-separated, +// commits are NUL-separated (matching the parser contract). +const mockGitOutput = [ + 'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body', + 'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message', + 'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body', +].join('\0'); + +// Mock data where commit bodies contain ---END--- markers +const mockOutputWithEndMarker = [ + 'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is the commit body\n---END--- is in this message', + 'e5f6g7h8i9j0klmnoprstuv\ne5f6g7\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in the message', + 'q1w2e3r4t5y6u7i8o9p0asdfghjkl\nq1w2e3\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nEmpty body', +].join('\0'); + +// Single-commit mock: fields newline-separated, no trailing NUL needed +const singleCommitOutput = + 'a1b2c3d4e5f67890abcd1234567890abcd1234\na1b2c3\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSingle commit\nSingle commit body'; + +describe('parseGitLogOutput', () => { + describe('normal parsing (three commits)', () => { + it('returns the correct number of commits', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits.length).toBe(3); + }); + + it('parses the first commit fields correctly', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234'); + expect(commits[0].shortHash).toBe('a1b2c3'); + expect(commits[0].author).toBe('John Doe'); + expect(commits[0].authorEmail).toBe('john@example.com'); + expect(commits[0].date).toBe('2023-01-01T12:00:00Z'); + expect(commits[0].subject).toBe('Initial commit'); + expect(commits[0].body).toBe('This is the commit body'); + }); + + it('parses the second commit fields correctly', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits[1].hash).toBe('e5f6g7h8i9j0klmnoprstuv'); + expect(commits[1].shortHash).toBe('e5f6g7'); + expect(commits[1].author).toBe('Jane Smith'); + expect(commits[1].subject).toBe('Fix bug'); + expect(commits[1].body).toMatch(/---END---/); + }); + + it('parses the third commit fields correctly', () => { + const commits = parseGitLogOutput(mockGitOutput); + expect(commits[2].hash).toBe('q1w2e3r4t5y6u7i8o9p0asdfghjkl'); + expect(commits[2].shortHash).toBe('q1w2e3'); + expect(commits[2].author).toBe('Bob Johnson'); + expect(commits[2].subject).toBe('Another commit'); + expect(commits[2].body).toBe('Empty body'); + }); + }); + + describe('parsing with ---END--- in commit messages', () => { + it('returns the correct number of commits', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits.length).toBe(3); + }); + + it('preserves ---END--- text in the body of the first commit', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits[0].subject).toBe('Initial commit'); + expect(commits[0].body).toMatch(/---END---/); + }); + + it('preserves ---END--- text in the body of the second commit', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits[1].subject).toBe('Fix bug'); + expect(commits[1].body).toMatch(/---END---/); + }); + + it('parses the third commit without ---END--- interference', () => { + const commits = parseGitLogOutput(mockOutputWithEndMarker); + expect(commits[2].subject).toBe('Another commit'); + expect(commits[2].body).toBe('Empty body'); + }); + }); + + describe('empty output', () => { + it('returns an empty array for an empty string', () => { + const commits = parseGitLogOutput(''); + expect(commits).toEqual([]); + expect(commits.length).toBe(0); + }); + }); + + describe('single-commit output', () => { + it('returns exactly one commit', () => { + const commits = parseGitLogOutput(singleCommitOutput); + expect(commits.length).toBe(1); + }); + + it('parses the single commit fields correctly', () => { + const commits = parseGitLogOutput(singleCommitOutput); + expect(commits[0].hash).toBe('a1b2c3d4e5f67890abcd1234567890abcd1234'); + expect(commits[0].shortHash).toBe('a1b2c3'); + expect(commits[0].author).toBe('John Doe'); + expect(commits[0].authorEmail).toBe('john@example.com'); + expect(commits[0].date).toBe('2023-01-01T12:00:00Z'); + expect(commits[0].subject).toBe('Single commit'); + expect(commits[0].body).toBe('Single commit body'); + }); + }); + + describe('multi-line commit body', () => { + // Test vector from test-proper-nul-format.js: commit with a 3-line body + const multiLineBodyOutput = + [ + 'abc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nInitial commit\nThis is a normal commit body', + 'def456\ndef4\nJane Smith\njane@example.com\n2023-01-02T12:00:00Z\nFix bug\nFixed the bug with ---END--- in this message', + 'ghi789\nghi7\nBob Johnson\nbob@example.com\n2023-01-03T12:00:00Z\nAnother commit\nThis body has multiple lines\nSecond line\nThird line', + ].join('\0') + '\0'; + + it('returns 3 commits', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits.length).toBe(3); + }); + + it('parses the first commit correctly', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits[0].hash).toBe('abc123'); + expect(commits[0].shortHash).toBe('abc1'); + expect(commits[0].author).toBe('John Doe'); + expect(commits[0].authorEmail).toBe('john@example.com'); + expect(commits[0].date).toBe('2023-01-01T12:00:00Z'); + expect(commits[0].subject).toBe('Initial commit'); + expect(commits[0].body).toBe('This is a normal commit body'); + }); + + it('parses the second commit with ---END--- in body correctly', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits[1].hash).toBe('def456'); + expect(commits[1].shortHash).toBe('def4'); + expect(commits[1].author).toBe('Jane Smith'); + expect(commits[1].subject).toBe('Fix bug'); + expect(commits[1].body).toContain('---END---'); + }); + + it('parses the third commit with a multi-line body correctly', () => { + const commits = parseGitLogOutput(multiLineBodyOutput); + expect(commits[2].hash).toBe('ghi789'); + expect(commits[2].shortHash).toBe('ghi7'); + expect(commits[2].author).toBe('Bob Johnson'); + expect(commits[2].subject).toBe('Another commit'); + expect(commits[2].body).toBe('This body has multiple lines\nSecond line\nThird line'); + }); + }); + + describe('commit with empty body (trailing blank lines after subject)', () => { + // Test vector from test-proper-nul-format.js: empty body commit + const emptyBodyOutput = + 'empty123\nempty1\nAlice Brown\nalice@example.com\n2023-01-04T12:00:00Z\nEmpty body commit\n\n\0'; + + it('returns 1 commit', () => { + const commits = parseGitLogOutput(emptyBodyOutput); + expect(commits.length).toBe(1); + }); + + it('parses the commit subject correctly', () => { + const commits = parseGitLogOutput(emptyBodyOutput); + expect(commits[0].hash).toBe('empty123'); + expect(commits[0].shortHash).toBe('empty1'); + expect(commits[0].author).toBe('Alice Brown'); + expect(commits[0].subject).toBe('Empty body commit'); + }); + + it('produces an empty body string when only blank lines follow the subject', () => { + const commits = parseGitLogOutput(emptyBodyOutput); + expect(commits[0].body).toBe(''); + }); + }); + + describe('leading empty lines in a commit block', () => { + // Blocks that start with blank lines before the hash field + const outputWithLeadingBlanks = + '\n\nabc123\nabc1\nJohn Doe\njohn@example.com\n2023-01-01T12:00:00Z\nSubject here\nBody here'; + + it('returns 1 commit despite leading blank lines', () => { + const commits = parseGitLogOutput(outputWithLeadingBlanks); + expect(commits.length).toBe(1); + }); + + it('parses the commit fields correctly when block has leading empty lines', () => { + const commits = parseGitLogOutput(outputWithLeadingBlanks); + expect(commits[0].hash).toBe('abc123'); + expect(commits[0].subject).toBe('Subject here'); + expect(commits[0].body).toBe('Body here'); + }); + }); +}); diff --git a/apps/server/tests/unit/lib/nul-delimiter.test.ts b/apps/server/tests/unit/lib/nul-delimiter.test.ts new file mode 100644 index 000000000..5cf20bdc7 --- /dev/null +++ b/apps/server/tests/unit/lib/nul-delimiter.test.ts @@ -0,0 +1,83 @@ +// Automated tests for NUL character behavior in git commit parsing + +import { describe, it, expect } from 'vitest'; + +describe('NUL character behavior', () => { + // Create a string with NUL characters + const str1 = + 'abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00This is a normal commit body\x00'; + + describe('split on NUL character', () => { + const parts = str1.split('\0'); + + it('should produce the expected number of parts', () => { + // 7 fields + 1 trailing empty string from the trailing \x00 + expect(parts.length).toBe(8); + }); + + it('should contain the expected part values', () => { + expect(parts[0]).toBe('abc123'); + expect(parts[1]).toBe('abc1'); + expect(parts[2]).toBe('John Doe'); + expect(parts[3]).toBe('john@example.com'); + expect(parts[4]).toBe('2023-01-01T12:00:00Z'); + expect(parts[5]).toBe('Initial commit'); + expect(parts[6]).toBe('This is a normal commit body'); + expect(parts[7]).toBe(''); + }); + + it('should have correct lengths for each part', () => { + expect(parts[0].length).toBe(6); // 'abc123' + expect(parts[1].length).toBe(4); // 'abc1' + expect(parts[2].length).toBe(8); // 'John Doe' + expect(parts[3].length).toBe(16); // 'john@example.com' + expect(parts[4].length).toBe(20); // '2023-01-01T12:00:00Z' + expect(parts[5].length).toBe(14); // 'Initial commit' + expect(parts[6].length).toBe(28); // 'This is a normal commit body' + expect(parts[7].length).toBe(0); // trailing empty + }); + }); + + describe('git format split and filter', () => { + const gitFormat = `abc123\x00abc1\x00John Doe\x00john@example.com\x002023-01-01T12:00:00Z\x00Initial commit\x00Body text here\x00def456\x00def4\x00Jane Smith\x00jane@example.com\x002023-01-02T12:00:00Z\x00Second commit\x00Body with ---END--- text\x00`; + + const gitParts = gitFormat.split('\0').filter((block) => block.trim()); + + it('should produce the expected number of non-empty parts after filtering', () => { + // 14 non-empty field strings (7 fields per commit × 2 commits); trailing empty is filtered out + expect(gitParts.length).toBe(14); + }); + + it('should contain correct field values for the first commit', () => { + const fields = gitParts.slice(0, 7); + expect(fields.length).toBe(7); + expect(fields[0]).toBe('abc123'); // hash + expect(fields[1]).toBe('abc1'); // shortHash + expect(fields[2]).toBe('John Doe'); // author + expect(fields[3]).toBe('john@example.com'); // authorEmail + expect(fields[4]).toBe('2023-01-01T12:00:00Z'); // date + expect(fields[5]).toBe('Initial commit'); // subject + expect(fields[6]).toBe('Body text here'); // body + }); + + it('should contain correct field values for the second commit', () => { + const fields = gitParts.slice(7, 14); + expect(fields.length).toBe(7); + expect(fields[0]).toBe('def456'); // hash + expect(fields[1]).toBe('def4'); // shortHash + expect(fields[2]).toBe('Jane Smith'); // author + expect(fields[3]).toBe('jane@example.com'); // authorEmail + expect(fields[4]).toBe('2023-01-02T12:00:00Z'); // date + expect(fields[5]).toBe('Second commit'); // subject + expect(fields[6]).toBe('Body with ---END--- text'); // body (---END--- handled correctly) + }); + + it('each part should have the expected number of newline-delimited fields', () => { + // Each gitPart is a single field value (no internal newlines), so split('\n') yields 1 field + gitParts.forEach((block) => { + const fields = block.split('\n'); + expect(fields.length).toBe(1); + }); + }); + }); +}); diff --git a/apps/server/tests/unit/lib/sdk-options.test.ts b/apps/server/tests/unit/lib/sdk-options.test.ts index 69d697946..f552efd95 100644 --- a/apps/server/tests/unit/lib/sdk-options.test.ts +++ b/apps/server/tests/unit/lib/sdk-options.test.ts @@ -50,15 +50,15 @@ describe('sdk-options.ts', () => { describe('getModelForUseCase', () => { it('should return explicit model when provided', async () => { const { getModelForUseCase } = await import('@/lib/sdk-options.js'); - const result = getModelForUseCase('spec', 'claude-sonnet-4-20250514'); - expect(result).toBe('claude-sonnet-4-20250514'); + const result = getModelForUseCase('spec', 'claude-sonnet-4-6'); + expect(result).toBe('claude-sonnet-4-6'); }); it('should use environment variable for spec model', async () => { - process.env.AUTOMAKER_MODEL_SPEC = 'claude-sonnet-4-20250514'; + process.env.AUTOMAKER_MODEL_SPEC = 'claude-sonnet-4-6'; const { getModelForUseCase } = await import('@/lib/sdk-options.js'); const result = getModelForUseCase('spec'); - expect(result).toBe('claude-sonnet-4-20250514'); + expect(result).toBe('claude-sonnet-4-6'); }); it('should use default model for spec when no override', async () => { @@ -71,10 +71,10 @@ describe('sdk-options.ts', () => { it('should fall back to AUTOMAKER_MODEL_DEFAULT', async () => { delete process.env.AUTOMAKER_MODEL_SPEC; - process.env.AUTOMAKER_MODEL_DEFAULT = 'claude-sonnet-4-20250514'; + process.env.AUTOMAKER_MODEL_DEFAULT = 'claude-sonnet-4-6'; const { getModelForUseCase } = await import('@/lib/sdk-options.js'); const result = getModelForUseCase('spec'); - expect(result).toBe('claude-sonnet-4-20250514'); + expect(result).toBe('claude-sonnet-4-6'); }); }); @@ -203,10 +203,10 @@ describe('sdk-options.ts', () => { const options = createChatOptions({ cwd: '/test/path', - sessionModel: 'claude-sonnet-4-20250514', + sessionModel: 'claude-sonnet-4-6', }); - expect(options.model).toBe('claude-sonnet-4-20250514'); + expect(options.model).toBe('claude-sonnet-4-6'); }); }); diff --git a/apps/server/tests/unit/providers/claude-provider.test.ts b/apps/server/tests/unit/providers/claude-provider.test.ts index a2ebd72aa..69e0c2602 100644 --- a/apps/server/tests/unit/providers/claude-provider.test.ts +++ b/apps/server/tests/unit/providers/claude-provider.test.ts @@ -5,6 +5,17 @@ import { collectAsyncGenerator } from '../../utils/helpers.js'; vi.mock('@anthropic-ai/claude-agent-sdk'); +vi.mock('@automaker/platform', () => ({ + getClaudeAuthIndicators: vi.fn().mockResolvedValue({ + hasCredentialsFile: false, + hasSettingsFile: false, + hasStatsCacheWithActivity: false, + hasProjectsSessions: false, + credentials: null, + checks: {}, + }), +})); + describe('claude-provider.ts', () => { let provider: ClaudeProvider; @@ -360,10 +371,10 @@ describe('claude-provider.ts', () => { }); describe('getAvailableModels', () => { - it('should return 4 Claude models', () => { + it('should return 5 Claude models', () => { const models = provider.getAvailableModels(); - expect(models).toHaveLength(4); + expect(models).toHaveLength(5); }); it('should include Claude Opus 4.6', () => { @@ -375,12 +386,12 @@ describe('claude-provider.ts', () => { expect(opus?.provider).toBe('anthropic'); }); - it('should include Claude Sonnet 4', () => { + it('should include Claude Sonnet 4.6', () => { const models = provider.getAvailableModels(); - const sonnet = models.find((m) => m.id === 'claude-sonnet-4-20250514'); + const sonnet = models.find((m) => m.id === 'claude-sonnet-4-6'); expect(sonnet).toBeDefined(); - expect(sonnet?.name).toBe('Claude Sonnet 4'); + expect(sonnet?.name).toBe('Claude Sonnet 4.6'); }); it('should include Claude 3.5 Sonnet', () => { diff --git a/apps/server/tests/unit/providers/codex-provider.test.ts b/apps/server/tests/unit/providers/codex-provider.test.ts index a0bd25f60..03cd5591d 100644 --- a/apps/server/tests/unit/providers/codex-provider.test.ts +++ b/apps/server/tests/unit/providers/codex-provider.test.ts @@ -247,6 +247,12 @@ describe('codex-provider.ts', () => { it('uses the SDK when no tools are requested and an API key is present', async () => { process.env[OPENAI_API_KEY_ENV] = 'sk-test'; + // Override auth indicators so CLI-native auth doesn't take priority over API key + vi.mocked(getCodexAuthIndicators).mockResolvedValue({ + hasAuthFile: false, + hasOAuthToken: false, + hasApiKey: false, + }); codexRunMock.mockResolvedValue({ finalResponse: 'Hello from SDK' }); const results = await collectAsyncGenerator( @@ -264,6 +270,12 @@ describe('codex-provider.ts', () => { it('uses the SDK when API key is present, even for tool requests (to avoid OAuth issues)', async () => { process.env[OPENAI_API_KEY_ENV] = 'sk-test'; + // Override auth indicators so CLI-native auth doesn't take priority over API key + vi.mocked(getCodexAuthIndicators).mockResolvedValue({ + hasAuthFile: false, + hasOAuthToken: false, + hasApiKey: false, + }); vi.mocked(spawnJSONLProcess).mockReturnValue((async function* () {})()); await collectAsyncGenerator( diff --git a/apps/server/tests/unit/providers/opencode-provider.test.ts b/apps/server/tests/unit/providers/opencode-provider.test.ts index 641838efc..a3a0d7269 100644 --- a/apps/server/tests/unit/providers/opencode-provider.test.ts +++ b/apps/server/tests/unit/providers/opencode-provider.test.ts @@ -69,19 +69,19 @@ describe('opencode-provider.ts', () => { it('should include free tier GLM model', () => { const models = provider.getAvailableModels(); - const glm = models.find((m) => m.id === 'opencode/glm-4.7-free'); + const glm = models.find((m) => m.id === 'opencode/glm-5-free'); expect(glm).toBeDefined(); - expect(glm?.name).toBe('GLM 4.7 Free'); + expect(glm?.name).toBe('GLM 5 Free'); expect(glm?.tier).toBe('basic'); }); it('should include free tier MiniMax model', () => { const models = provider.getAvailableModels(); - const minimax = models.find((m) => m.id === 'opencode/minimax-m2.1-free'); + const minimax = models.find((m) => m.id === 'opencode/minimax-m2.5-free'); expect(minimax).toBeDefined(); - expect(minimax?.name).toBe('MiniMax M2.1 Free'); + expect(minimax?.name).toBe('MiniMax M2.5 Free'); expect(minimax?.tier).toBe('basic'); }); diff --git a/apps/server/tests/unit/providers/provider-factory.test.ts b/apps/server/tests/unit/providers/provider-factory.test.ts index b9aef9281..f92c7256b 100644 --- a/apps/server/tests/unit/providers/provider-factory.test.ts +++ b/apps/server/tests/unit/providers/provider-factory.test.ts @@ -59,8 +59,8 @@ describe('provider-factory.ts', () => { expect(provider).toBeInstanceOf(ClaudeProvider); }); - it('should return ClaudeProvider for claude-sonnet-4-20250514', () => { - const provider = ProviderFactory.getProviderForModel('claude-sonnet-4-20250514'); + it('should return ClaudeProvider for claude-sonnet-4-6', () => { + const provider = ProviderFactory.getProviderForModel('claude-sonnet-4-6'); expect(provider).toBeInstanceOf(ClaudeProvider); }); diff --git a/apps/server/tests/unit/routes/worktree/switch-branch.test.ts b/apps/server/tests/unit/routes/worktree/switch-branch.test.ts index 2cd868c68..c599fd07e 100644 --- a/apps/server/tests/unit/routes/worktree/switch-branch.test.ts +++ b/apps/server/tests/unit/routes/worktree/switch-branch.test.ts @@ -1,27 +1,15 @@ -import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest'; +import { describe, it, expect, vi, beforeEach } from 'vitest'; import type { Request, Response } from 'express'; import { createMockExpressContext } from '../../../utils/mocks.js'; -vi.mock('child_process', async (importOriginal) => { - const actual = await importOriginal(); - return { - ...actual, - exec: vi.fn(), - }; -}); - -vi.mock('util', async (importOriginal) => { - const actual = await importOriginal(); - return { - ...actual, - promisify: (fn: unknown) => fn, - }; -}); +vi.mock('@/services/worktree-branch-service.js', () => ({ + performSwitchBranch: vi.fn(), +})); -import { exec } from 'child_process'; +import { performSwitchBranch } from '@/services/worktree-branch-service.js'; import { createSwitchBranchHandler } from '@/routes/worktree/routes/switch-branch.js'; -const mockExec = exec as Mock; +const mockPerformSwitchBranch = vi.mocked(performSwitchBranch); describe('switch-branch route', () => { let req: Request; @@ -34,26 +22,77 @@ describe('switch-branch route', () => { res = context.res; }); + it('should return 400 when branchName is missing', async () => { + req.body = { worktreePath: '/repo/path' }; + + const handler = createSwitchBranchHandler(); + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith({ + success: false, + error: 'branchName required', + }); + expect(mockPerformSwitchBranch).not.toHaveBeenCalled(); + }); + + it('should return 400 when branchName starts with a dash', async () => { + req.body = { worktreePath: '/repo/path', branchName: '-flag' }; + + const handler = createSwitchBranchHandler(); + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith({ + success: false, + error: 'Invalid branch name', + }); + expect(mockPerformSwitchBranch).not.toHaveBeenCalled(); + }); + + it('should return 400 when branchName starts with double dash', async () => { + req.body = { worktreePath: '/repo/path', branchName: '--option' }; + + const handler = createSwitchBranchHandler(); + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith({ + success: false, + error: 'Invalid branch name', + }); + expect(mockPerformSwitchBranch).not.toHaveBeenCalled(); + }); + + it('should return 400 when branchName contains invalid characters', async () => { + req.body = { worktreePath: '/repo/path', branchName: 'branch name with spaces' }; + + const handler = createSwitchBranchHandler(); + await handler(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith({ + success: false, + error: 'Invalid branch name', + }); + expect(mockPerformSwitchBranch).not.toHaveBeenCalled(); + }); + it('should allow switching when only untracked files exist', async () => { req.body = { worktreePath: '/repo/path', branchName: 'feature/test', }; - mockExec.mockImplementation(async (command: string) => { - if (command === 'git rev-parse --abbrev-ref HEAD') { - return { stdout: 'main\n', stderr: '' }; - } - if (command === 'git rev-parse --verify feature/test') { - return { stdout: 'abc123\n', stderr: '' }; - } - if (command === 'git status --porcelain') { - return { stdout: '?? .automaker/\n?? notes.txt\n', stderr: '' }; - } - if (command === 'git checkout "feature/test"') { - return { stdout: '', stderr: '' }; - } - return { stdout: '', stderr: '' }; + mockPerformSwitchBranch.mockResolvedValue({ + success: true, + result: { + previousBranch: 'main', + currentBranch: 'feature/test', + message: "Switched to branch 'feature/test'", + hasConflicts: false, + stashedChanges: false, + }, }); const handler = createSwitchBranchHandler(); @@ -65,42 +104,42 @@ describe('switch-branch route', () => { previousBranch: 'main', currentBranch: 'feature/test', message: "Switched to branch 'feature/test'", + hasConflicts: false, + stashedChanges: false, }, }); - expect(mockExec).toHaveBeenCalledWith('git checkout "feature/test"', { cwd: '/repo/path' }); + expect(mockPerformSwitchBranch).toHaveBeenCalledWith('/repo/path', 'feature/test', undefined); }); - it('should block switching when tracked files are modified', async () => { + it('should stash changes and switch when tracked files are modified', async () => { req.body = { worktreePath: '/repo/path', branchName: 'feature/test', }; - mockExec.mockImplementation(async (command: string) => { - if (command === 'git rev-parse --abbrev-ref HEAD') { - return { stdout: 'main\n', stderr: '' }; - } - if (command === 'git rev-parse --verify feature/test') { - return { stdout: 'abc123\n', stderr: '' }; - } - if (command === 'git status --porcelain') { - return { stdout: ' M src/index.ts\n?? notes.txt\n', stderr: '' }; - } - if (command === 'git status --short') { - return { stdout: ' M src/index.ts\n?? notes.txt\n', stderr: '' }; - } - return { stdout: '', stderr: '' }; + mockPerformSwitchBranch.mockResolvedValue({ + success: true, + result: { + previousBranch: 'main', + currentBranch: 'feature/test', + message: "Switched to branch 'feature/test' (local changes stashed and reapplied)", + hasConflicts: false, + stashedChanges: true, + }, }); const handler = createSwitchBranchHandler(); await handler(req, res); - expect(res.status).toHaveBeenCalledWith(400); expect(res.json).toHaveBeenCalledWith({ - success: false, - error: - 'Cannot switch branches: you have uncommitted changes (M src/index.ts). Please commit your changes first.', - code: 'UNCOMMITTED_CHANGES', + success: true, + result: { + previousBranch: 'main', + currentBranch: 'feature/test', + message: "Switched to branch 'feature/test' (local changes stashed and reapplied)", + hasConflicts: false, + stashedChanges: true, + }, }); }); }); diff --git a/apps/server/tests/unit/services/agent-executor.test.ts b/apps/server/tests/unit/services/agent-executor.test.ts index 983144882..09f12cf44 100644 --- a/apps/server/tests/unit/services/agent-executor.test.ts +++ b/apps/server/tests/unit/services/agent-executor.test.ts @@ -129,7 +129,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: {} as BaseProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', }; expect(options.featureId).toBe('test-feature'); }); @@ -166,7 +166,7 @@ describe('AgentExecutor', () => { projectPath: '/test/project', abortController: new AbortController(), provider: {} as BaseProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', }; expect(options.workDir).toBe('/test/workdir'); @@ -174,7 +174,7 @@ describe('AgentExecutor', () => { expect(options.prompt).toBe('Test prompt'); expect(options.projectPath).toBe('/test/project'); expect(options.abortController).toBeInstanceOf(AbortController); - expect(options.effectiveBareModel).toBe('claude-sonnet-4-20250514'); + expect(options.effectiveBareModel).toBe('claude-sonnet-4-6'); }); it('should accept optional options', () => { @@ -185,10 +185,10 @@ describe('AgentExecutor', () => { projectPath: '/test/project', abortController: new AbortController(), provider: {} as BaseProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', // Optional fields imagePaths: ['/image1.png', '/image2.png'], - model: 'claude-sonnet-4-20250514', + model: 'claude-sonnet-4-6', planningMode: 'spec', requirePlanApproval: true, previousContent: 'Previous content', @@ -419,7 +419,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController, provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; @@ -461,7 +461,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', previousContent: 'Previous context from earlier session', }; @@ -507,7 +507,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', // No spec detection in skip mode }; @@ -558,7 +558,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; @@ -618,7 +618,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; @@ -671,7 +671,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; @@ -712,7 +712,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; @@ -763,7 +763,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; @@ -810,7 +810,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; @@ -855,7 +855,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', branchName: 'feature/my-feature', }; @@ -906,7 +906,7 @@ describe('AgentExecutor', () => { projectPath: '/project', abortController: new AbortController(), provider: mockProvider, - effectiveBareModel: 'claude-sonnet-4-20250514', + effectiveBareModel: 'claude-sonnet-4-6', planningMode: 'skip', }; diff --git a/apps/server/tests/unit/services/agent-service.test.ts b/apps/server/tests/unit/services/agent-service.test.ts index fed1eae35..96090d2b9 100644 --- a/apps/server/tests/unit/services/agent-service.test.ts +++ b/apps/server/tests/unit/services/agent-service.test.ts @@ -123,9 +123,10 @@ describe('agent-service.ts', () => { }); expect(result.success).toBe(true); - // First call reads session file, metadata file, and queue state file (3 calls) + // First call reads metadata file and session file via ensureSession (2 calls) + // Since no metadata or messages exist, a fresh session is created without loading queue state. // Second call should reuse in-memory session (no additional calls) - expect(fs.readFile).toHaveBeenCalledTimes(3); + expect(fs.readFile).toHaveBeenCalledTimes(2); }); }); @@ -271,10 +272,10 @@ describe('agent-service.ts', () => { await service.sendMessage({ sessionId: 'session-1', message: 'Hello', - model: 'claude-sonnet-4-20250514', + model: 'claude-sonnet-4-6', }); - expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith('claude-sonnet-4-20250514'); + expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith('claude-sonnet-4-6'); }); it('should save session messages', async () => { @@ -330,15 +331,18 @@ describe('agent-service.ts', () => { sessionId: 'session-1', }); - const history = service.getHistory('session-1'); + const history = await service.getHistory('session-1'); expect(history).toBeDefined(); expect(history?.messages).toEqual([]); }); - it('should handle non-existent session', () => { - const history = service.getHistory('nonexistent'); - expect(history).toBeDefined(); // Returns error object + it('should handle non-existent session', async () => { + const history = await service.getHistory('nonexistent'); + expect(history).toBeDefined(); + expect(history.success).toBe(false); + expect(history.error).toBeDefined(); + expect(typeof history.error).toBe('string'); }); }); @@ -356,10 +360,108 @@ describe('agent-service.ts', () => { await service.clearSession('session-1'); - const history = service.getHistory('session-1'); + const history = await service.getHistory('session-1'); expect(history?.messages).toEqual([]); expect(fs.writeFile).toHaveBeenCalled(); }); + + it('should clear sdkSessionId from persisted metadata to prevent stale session errors', async () => { + // Setup: Session exists in metadata with an sdkSessionId (simulating + // a session that previously communicated with a CLI provider like OpenCode) + const metadata = { + 'session-1': { + id: 'session-1', + name: 'Test Session', + workingDirectory: '/test/dir', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + sdkSessionId: 'stale-opencode-session-id', + }, + }; + + vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(metadata)); + vi.mocked(fs.writeFile).mockResolvedValue(undefined); + vi.mocked(fs.mkdir).mockResolvedValue(undefined); + + // Start the session (loads from disk metadata) + await service.startConversation({ + sessionId: 'session-1', + workingDirectory: '/test/dir', + }); + + // Clear the session + await service.clearSession('session-1'); + + // Verify that the LAST writeFile call to sessions-metadata.json + // (from clearSdkSessionId) has sdkSessionId removed. + // Earlier writes may still include it (e.g., from updateSessionTimestamp). + const writeFileCalls = vi.mocked(fs.writeFile).mock.calls; + const metadataWriteCalls = writeFileCalls.filter( + (call) => + typeof call[0] === 'string' && (call[0] as string).includes('sessions-metadata.json') + ); + + expect(metadataWriteCalls.length).toBeGreaterThan(0); + const lastMetadataWriteCall = metadataWriteCalls[metadataWriteCalls.length - 1]; + const savedMetadata = JSON.parse(lastMetadataWriteCall[1] as string); + expect(savedMetadata['session-1'].sdkSessionId).toBeUndefined(); + }); + }); + + describe('clearSdkSessionId', () => { + it('should remove sdkSessionId from persisted metadata', async () => { + const metadata = { + 'session-1': { + id: 'session-1', + name: 'Test Session', + workingDirectory: '/test/dir', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + sdkSessionId: 'old-provider-session-id', + }, + }; + + vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(metadata)); + vi.mocked(fs.writeFile).mockResolvedValue(undefined); + + await service.clearSdkSessionId('session-1'); + + const writeFileCalls = vi.mocked(fs.writeFile).mock.calls; + expect(writeFileCalls.length).toBeGreaterThan(0); + + const savedMetadata = JSON.parse(writeFileCalls[0][1] as string); + expect(savedMetadata['session-1'].sdkSessionId).toBeUndefined(); + expect(savedMetadata['session-1'].updatedAt).not.toBe('2024-01-01T00:00:00Z'); + }); + + it('should do nothing if session has no sdkSessionId', async () => { + const metadata = { + 'session-1': { + id: 'session-1', + name: 'Test Session', + workingDirectory: '/test/dir', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + }, + }; + + vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(metadata)); + vi.mocked(fs.writeFile).mockResolvedValue(undefined); + + await service.clearSdkSessionId('session-1'); + + // writeFile should not have been called since there's no sdkSessionId to clear + expect(fs.writeFile).not.toHaveBeenCalled(); + }); + + it('should do nothing if session does not exist in metadata', async () => { + vi.mocked(fs.readFile).mockResolvedValue('{}'); + vi.mocked(fs.writeFile).mockResolvedValue(undefined); + + await service.clearSdkSessionId('nonexistent'); + + expect(fs.writeFile).not.toHaveBeenCalled(); + }); }); describe('createSession', () => { @@ -431,13 +533,13 @@ describe('agent-service.ts', () => { it('should set model for existing session', async () => { vi.mocked(fs.readFile).mockResolvedValue('{"session-1": {}}'); - const result = await service.setSessionModel('session-1', 'claude-sonnet-4-20250514'); + const result = await service.setSessionModel('session-1', 'claude-sonnet-4-6'); expect(result).toBe(true); }); it('should return false for non-existent session', async () => { - const result = await service.setSessionModel('nonexistent', 'claude-sonnet-4-20250514'); + const result = await service.setSessionModel('nonexistent', 'claude-sonnet-4-6'); expect(result).toBe(false); }); @@ -620,7 +722,7 @@ describe('agent-service.ts', () => { const result = await service.addToQueue('session-1', { message: 'Test prompt', imagePaths: ['/test/image.png'], - model: 'claude-sonnet-4-20250514', + model: 'claude-sonnet-4-6', }); expect(result.success).toBe(true); @@ -654,15 +756,15 @@ describe('agent-service.ts', () => { it('should return queue for session', async () => { await service.addToQueue('session-1', { message: 'Test prompt' }); - const result = service.getQueue('session-1'); + const result = await service.getQueue('session-1'); expect(result.success).toBe(true); expect(result.queue).toBeDefined(); expect(result.queue?.length).toBe(1); }); - it('should return error for non-existent session', () => { - const result = service.getQueue('nonexistent'); + it('should return error for non-existent session', async () => { + const result = await service.getQueue('nonexistent'); expect(result.success).toBe(false); expect(result.error).toBe('Session not found'); @@ -686,7 +788,7 @@ describe('agent-service.ts', () => { }); it('should remove prompt from queue', async () => { - const queueResult = service.getQueue('session-1'); + const queueResult = await service.getQueue('session-1'); const promptId = queueResult.queue![0].id; const result = await service.removeFromQueue('session-1', promptId); @@ -731,7 +833,7 @@ describe('agent-service.ts', () => { const result = await service.clearQueue('session-1'); expect(result.success).toBe(true); - const queueResult = service.getQueue('session-1'); + const queueResult = await service.getQueue('session-1'); expect(queueResult.queue?.length).toBe(0); expect(mockEvents.emit).toHaveBeenCalled(); }); diff --git a/apps/server/tests/unit/services/auto-loop-coordinator.test.ts b/apps/server/tests/unit/services/auto-loop-coordinator.test.ts index 31a117fe2..e9d10932d 100644 --- a/apps/server/tests/unit/services/auto-loop-coordinator.test.ts +++ b/apps/server/tests/unit/services/auto-loop-coordinator.test.ts @@ -6,6 +6,7 @@ import { type ProjectAutoLoopState, type ExecuteFeatureFn, type LoadPendingFeaturesFn, + type LoadAllFeaturesFn, type SaveExecutionStateFn, type ClearExecutionStateFn, type ResetStuckFeaturesFn, @@ -25,6 +26,7 @@ describe('auto-loop-coordinator.ts', () => { // Callback mocks let mockExecuteFeature: ExecuteFeatureFn; let mockLoadPendingFeatures: LoadPendingFeaturesFn; + let mockLoadAllFeatures: LoadAllFeaturesFn; let mockSaveExecutionState: SaveExecutionStateFn; let mockClearExecutionState: ClearExecutionStateFn; let mockResetStuckFeatures: ResetStuckFeaturesFn; @@ -65,6 +67,7 @@ describe('auto-loop-coordinator.ts', () => { // Callback mocks mockExecuteFeature = vi.fn().mockResolvedValue(undefined); mockLoadPendingFeatures = vi.fn().mockResolvedValue([]); + mockLoadAllFeatures = vi.fn().mockResolvedValue([]); mockSaveExecutionState = vi.fn().mockResolvedValue(undefined); mockClearExecutionState = vi.fn().mockResolvedValue(undefined); mockResetStuckFeatures = vi.fn().mockResolvedValue(undefined); @@ -81,7 +84,8 @@ describe('auto-loop-coordinator.ts', () => { mockClearExecutionState, mockResetStuckFeatures, mockIsFeatureFinished, - mockIsFeatureRunning + mockIsFeatureRunning, + mockLoadAllFeatures ); }); @@ -326,6 +330,282 @@ describe('auto-loop-coordinator.ts', () => { }); }); + describe('priority-based feature selection', () => { + it('selects highest priority feature first (lowest number)', async () => { + const lowPriority: Feature = { + ...testFeature, + id: 'feature-low', + priority: 3, + title: 'Low Priority', + }; + const highPriority: Feature = { + ...testFeature, + id: 'feature-high', + priority: 1, + title: 'High Priority', + }; + const medPriority: Feature = { + ...testFeature, + id: 'feature-med', + priority: 2, + title: 'Med Priority', + }; + + // Return features in non-priority order + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([ + lowPriority, + medPriority, + highPriority, + ]); + vi.mocked(mockLoadAllFeatures).mockResolvedValue([lowPriority, medPriority, highPriority]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + // Should execute the highest priority feature (priority=1) + expect(mockExecuteFeature).toHaveBeenCalledWith('/test/project', 'feature-high', true, true); + }); + + it('uses default priority of 2 when not specified', async () => { + const noPriority: Feature = { ...testFeature, id: 'feature-none', title: 'No Priority' }; + const highPriority: Feature = { + ...testFeature, + id: 'feature-high', + priority: 1, + title: 'High Priority', + }; + + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([noPriority, highPriority]); + vi.mocked(mockLoadAllFeatures).mockResolvedValue([noPriority, highPriority]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + // High priority (1) should be selected over default priority (2) + expect(mockExecuteFeature).toHaveBeenCalledWith('/test/project', 'feature-high', true, true); + }); + + it('selects first feature when priorities are equal', async () => { + const featureA: Feature = { + ...testFeature, + id: 'feature-a', + priority: 2, + title: 'Feature A', + }; + const featureB: Feature = { + ...testFeature, + id: 'feature-b', + priority: 2, + title: 'Feature B', + }; + + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([featureA, featureB]); + vi.mocked(mockLoadAllFeatures).mockResolvedValue([featureA, featureB]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + // When priorities equal, the first feature from the filtered list should be chosen + expect(mockExecuteFeature).toHaveBeenCalledWith('/test/project', 'feature-a', true, true); + }); + }); + + describe('dependency-aware feature selection', () => { + it('skips features with unsatisfied dependencies', async () => { + const depFeature: Feature = { + ...testFeature, + id: 'feature-dep', + status: 'in_progress', + title: 'Dependency Feature', + }; + const blockedFeature: Feature = { + ...testFeature, + id: 'feature-blocked', + dependencies: ['feature-dep'], + priority: 1, + title: 'Blocked Feature', + }; + const readyFeature: Feature = { + ...testFeature, + id: 'feature-ready', + priority: 2, + title: 'Ready Feature', + }; + + // Pending features (backlog/ready status) + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([blockedFeature, readyFeature]); + // All features (including the in-progress dependency) + vi.mocked(mockLoadAllFeatures).mockResolvedValue([depFeature, blockedFeature, readyFeature]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + // Should skip blocked feature (dependency not complete) and execute ready feature + expect(mockExecuteFeature).toHaveBeenCalledWith('/test/project', 'feature-ready', true, true); + expect(mockExecuteFeature).not.toHaveBeenCalledWith( + '/test/project', + 'feature-blocked', + true, + true + ); + }); + + it('picks features whose dependencies are completed', async () => { + const completedDep: Feature = { + ...testFeature, + id: 'feature-dep', + status: 'completed', + title: 'Completed Dependency', + }; + const unblockedFeature: Feature = { + ...testFeature, + id: 'feature-unblocked', + dependencies: ['feature-dep'], + priority: 1, + title: 'Unblocked Feature', + }; + + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([unblockedFeature]); + vi.mocked(mockLoadAllFeatures).mockResolvedValue([completedDep, unblockedFeature]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + // Should execute the unblocked feature since its dependency is completed + expect(mockExecuteFeature).toHaveBeenCalledWith( + '/test/project', + 'feature-unblocked', + true, + true + ); + }); + + it('picks features whose dependencies are verified', async () => { + const verifiedDep: Feature = { + ...testFeature, + id: 'feature-dep', + status: 'verified', + title: 'Verified Dependency', + }; + const unblockedFeature: Feature = { + ...testFeature, + id: 'feature-unblocked', + dependencies: ['feature-dep'], + priority: 1, + title: 'Unblocked Feature', + }; + + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([unblockedFeature]); + vi.mocked(mockLoadAllFeatures).mockResolvedValue([verifiedDep, unblockedFeature]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + expect(mockExecuteFeature).toHaveBeenCalledWith( + '/test/project', + 'feature-unblocked', + true, + true + ); + }); + + it('respects both priority and dependencies together', async () => { + const completedDep: Feature = { + ...testFeature, + id: 'feature-dep', + status: 'completed', + title: 'Completed Dep', + }; + const blockedHighPriority: Feature = { + ...testFeature, + id: 'feature-blocked-hp', + dependencies: ['feature-not-done'], + priority: 1, + title: 'Blocked High Priority', + }; + const unblockedLowPriority: Feature = { + ...testFeature, + id: 'feature-unblocked-lp', + dependencies: ['feature-dep'], + priority: 3, + title: 'Unblocked Low Priority', + }; + const unblockedMedPriority: Feature = { + ...testFeature, + id: 'feature-unblocked-mp', + priority: 2, + title: 'Unblocked Med Priority', + }; + + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([ + blockedHighPriority, + unblockedLowPriority, + unblockedMedPriority, + ]); + vi.mocked(mockLoadAllFeatures).mockResolvedValue([ + completedDep, + blockedHighPriority, + unblockedLowPriority, + unblockedMedPriority, + ]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + // Should skip blocked high-priority and pick the unblocked medium-priority + expect(mockExecuteFeature).toHaveBeenCalledWith( + '/test/project', + 'feature-unblocked-mp', + true, + true + ); + expect(mockExecuteFeature).not.toHaveBeenCalledWith( + '/test/project', + 'feature-blocked-hp', + true, + true + ); + }); + + it('handles features with no dependencies (always eligible)', async () => { + const noDeps: Feature = { + ...testFeature, + id: 'feature-no-deps', + priority: 2, + title: 'No Dependencies', + }; + + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([noDeps]); + vi.mocked(mockLoadAllFeatures).mockResolvedValue([noDeps]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + + await coordinator.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordinator.stopAutoLoopForProject('/test/project', null); + + expect(mockExecuteFeature).toHaveBeenCalledWith( + '/test/project', + 'feature-no-deps', + true, + true + ); + }); + }); + describe('failure tracking', () => { it('trackFailureAndCheckPauseForProject returns true after threshold', async () => { await coordinator.startAutoLoopForProject('/test/project', null, 1); @@ -606,5 +886,72 @@ describe('auto-loop-coordinator.ts', () => { expect.anything() ); }); + + it('bypasses dependency checks when loadAllFeaturesFn is omitted', async () => { + // Create a dependency feature that is NOT completed (in_progress) + const inProgressDep: Feature = { + ...testFeature, + id: 'dep-feature', + status: 'in_progress', + title: 'In-Progress Dependency', + }; + // Create a pending feature that depends on the in-progress dep + const pendingFeatureWithDep: Feature = { + ...testFeature, + id: 'feature-with-dep', + dependencies: ['dep-feature'], + status: 'ready', + title: 'Feature With Dependency', + }; + + // loadAllFeaturesFn is NOT provided, so dependency checks are bypassed entirely + // (the coordinator returns true instead of calling areDependenciesSatisfied) + const coordWithoutLoadAll = new AutoLoopCoordinator( + mockEventBus, + mockConcurrencyManager, + mockSettingsService, + mockExecuteFeature, + mockLoadPendingFeatures, + mockSaveExecutionState, + mockClearExecutionState, + mockResetStuckFeatures, + mockIsFeatureFinished, + mockIsFeatureRunning + // loadAllFeaturesFn omitted + ); + + // pendingFeatures includes the in-progress dep and the pending feature; + // since loadAllFeaturesFn is absent, dependency checks are bypassed, + // so pendingFeatureWithDep is eligible even though its dependency is not completed + vi.mocked(mockLoadPendingFeatures).mockResolvedValue([inProgressDep, pendingFeatureWithDep]); + vi.mocked(mockConcurrencyManager.getRunningCountForWorktree).mockResolvedValue(0); + // The in-progress dep is not finished and not running, so both features pass the + // isFeatureFinished filter; but only pendingFeatureWithDep should be executed + // because we mark dep-feature as running to prevent it from being picked + vi.mocked(mockIsFeatureFinished).mockReturnValue(false); + vi.mocked(mockIsFeatureRunning as ReturnType).mockImplementation( + (id: string) => id === 'dep-feature' + ); + + await coordWithoutLoadAll.startAutoLoopForProject('/test/project', null, 1); + await vi.advanceTimersByTimeAsync(3000); + await coordWithoutLoadAll.stopAutoLoopForProject('/test/project', null); + + // pendingFeatureWithDep executes despite its dependency not being completed, + // because dependency checks are bypassed when loadAllFeaturesFn is omitted + expect(mockExecuteFeature).toHaveBeenCalledWith( + '/test/project', + 'feature-with-dep', + true, + true + ); + // dep-feature is not executed because it is marked as running + expect(mockExecuteFeature).not.toHaveBeenCalledWith( + '/test/project', + 'dep-feature', + true, + true + ); + }); }); }); diff --git a/apps/server/tests/unit/services/ideation-service.test.ts b/apps/server/tests/unit/services/ideation-service.test.ts index 1be24cbed..7004362a2 100644 --- a/apps/server/tests/unit/services/ideation-service.test.ts +++ b/apps/server/tests/unit/services/ideation-service.test.ts @@ -25,7 +25,7 @@ const mockLogger = vi.hoisted(() => ({ const mockCreateChatOptions = vi.hoisted(() => vi.fn(() => ({ - model: 'claude-sonnet-4-20250514', + model: 'claude-sonnet-4-6', systemPrompt: 'test prompt', })) ); diff --git a/apps/server/tests/unit/services/pipeline-orchestrator.test.ts b/apps/server/tests/unit/services/pipeline-orchestrator.test.ts index aa543afbd..ff039d4cc 100644 --- a/apps/server/tests/unit/services/pipeline-orchestrator.test.ts +++ b/apps/server/tests/unit/services/pipeline-orchestrator.test.ts @@ -137,6 +137,7 @@ describe('PipelineOrchestrator', () => { mockEventBus = { emitAutoModeEvent: vi.fn(), + getUnderlyingEmitter: vi.fn().mockReturnValue({}), } as unknown as TypedEventBus; mockFeatureStateManager = { @@ -492,7 +493,8 @@ describe('PipelineOrchestrator', () => { 'feature/test-1', '/test/worktree', 'main', - { deleteWorktreeAndBranch: false } + { deleteWorktreeAndBranch: false }, + expect.anything() ); }); @@ -792,7 +794,8 @@ describe('PipelineOrchestrator', () => { 'feature/test-1', '/test/project', // Falls back to projectPath when worktreePath is null 'main', - { deleteWorktreeAndBranch: false } + { deleteWorktreeAndBranch: false }, + expect.anything() ); }); }); @@ -845,7 +848,8 @@ describe('PipelineOrchestrator', () => { 'feature/test-1', '/test/custom-worktree', 'main', - { deleteWorktreeAndBranch: false } + { deleteWorktreeAndBranch: false }, + expect.anything() ); }); @@ -861,7 +865,8 @@ describe('PipelineOrchestrator', () => { 'feature/custom-branch', '/test/worktree', 'main', - { deleteWorktreeAndBranch: false } + { deleteWorktreeAndBranch: false }, + expect.anything() ); }); diff --git a/apps/server/tests/unit/services/recovery-service.test.ts b/apps/server/tests/unit/services/recovery-service.test.ts index 90be3eb21..cd99fc088 100644 --- a/apps/server/tests/unit/services/recovery-service.test.ts +++ b/apps/server/tests/unit/services/recovery-service.test.ts @@ -491,6 +491,32 @@ describe('recovery-service.ts', () => { ); }); + it('finds features with interrupted status', async () => { + vi.mocked(secureFs.readdir).mockResolvedValueOnce([ + { name: 'feature-1', isDirectory: () => true } as any, + ]); + vi.mocked(utils.readJsonWithRecovery).mockResolvedValueOnce({ + data: { id: 'feature-1', title: 'Feature 1', status: 'interrupted' }, + wasRecovered: false, + }); + + mockLoadFeature.mockResolvedValue({ + id: 'feature-1', + title: 'Feature 1', + status: 'interrupted', + description: 'Test', + }); + + await service.resumeInterruptedFeatures('/test/project'); + + expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith( + 'auto_mode_resuming_features', + expect.objectContaining({ + featureIds: ['feature-1'], + }) + ); + }); + it('finds features with pipeline_* status', async () => { vi.mocked(secureFs.readdir).mockResolvedValueOnce([ { name: 'feature-1', isDirectory: () => true } as any, @@ -519,6 +545,100 @@ describe('recovery-service.ts', () => { ); }); + it('finds reconciled features using execution state (ready/backlog from previously running)', async () => { + // Simulate execution state with previously running feature IDs + const executionState = { + version: 1, + autoLoopWasRunning: true, + maxConcurrency: 2, + projectPath: '/test/project', + branchName: null, + runningFeatureIds: ['feature-1', 'feature-2'], + savedAt: '2026-01-27T12:00:00Z', + }; + vi.mocked(secureFs.readFile).mockResolvedValueOnce(JSON.stringify(executionState)); + + vi.mocked(secureFs.readdir).mockResolvedValueOnce([ + { name: 'feature-1', isDirectory: () => true } as any, + { name: 'feature-2', isDirectory: () => true } as any, + { name: 'feature-3', isDirectory: () => true } as any, + ]); + // feature-1 was reconciled from in_progress to ready + // feature-2 was reconciled from in_progress to backlog + // feature-3 is in backlog but was NOT previously running + vi.mocked(utils.readJsonWithRecovery) + .mockResolvedValueOnce({ + data: { id: 'feature-1', title: 'Feature 1', status: 'ready' }, + wasRecovered: false, + }) + .mockResolvedValueOnce({ + data: { id: 'feature-2', title: 'Feature 2', status: 'backlog' }, + wasRecovered: false, + }) + .mockResolvedValueOnce({ + data: { id: 'feature-3', title: 'Feature 3', status: 'backlog' }, + wasRecovered: false, + }); + + mockLoadFeature + .mockResolvedValueOnce({ + id: 'feature-1', + title: 'Feature 1', + status: 'ready', + description: 'Test', + }) + .mockResolvedValueOnce({ + id: 'feature-2', + title: 'Feature 2', + status: 'backlog', + description: 'Test', + }); + + await service.resumeInterruptedFeatures('/test/project'); + + // Should resume feature-1 and feature-2 (from execution state) but NOT feature-3 + expect(mockEventBus.emitAutoModeEvent).toHaveBeenCalledWith( + 'auto_mode_resuming_features', + expect.objectContaining({ + featureIds: ['feature-1', 'feature-2'], + }) + ); + }); + + it('clears execution state after successful resume', async () => { + // Simulate execution state + const executionState = { + version: 1, + autoLoopWasRunning: true, + maxConcurrency: 1, + projectPath: '/test/project', + branchName: null, + runningFeatureIds: ['feature-1'], + savedAt: '2026-01-27T12:00:00Z', + }; + vi.mocked(secureFs.readFile).mockResolvedValueOnce(JSON.stringify(executionState)); + + vi.mocked(secureFs.readdir).mockResolvedValueOnce([ + { name: 'feature-1', isDirectory: () => true } as any, + ]); + vi.mocked(utils.readJsonWithRecovery).mockResolvedValueOnce({ + data: { id: 'feature-1', title: 'Feature 1', status: 'ready' }, + wasRecovered: false, + }); + + mockLoadFeature.mockResolvedValue({ + id: 'feature-1', + title: 'Feature 1', + status: 'ready', + description: 'Test', + }); + + await service.resumeInterruptedFeatures('/test/project'); + + // Should clear execution state after resuming + expect(secureFs.unlink).toHaveBeenCalledWith('/test/project/.automaker/execution-state.json'); + }); + it('distinguishes features with/without context', async () => { vi.mocked(secureFs.readdir).mockResolvedValueOnce([ { name: 'feature-with', isDirectory: () => true } as any, diff --git a/apps/ui/eslint.config.mjs b/apps/ui/eslint.config.mjs index 2400404fe..2b40bd8f2 100644 --- a/apps/ui/eslint.config.mjs +++ b/apps/ui/eslint.config.mjs @@ -2,6 +2,7 @@ import { defineConfig, globalIgnores } from 'eslint/config'; import js from '@eslint/js'; import ts from '@typescript-eslint/eslint-plugin'; import tsParser from '@typescript-eslint/parser'; +import reactHooks from 'eslint-plugin-react-hooks'; const eslintConfig = defineConfig([ js.configs.recommended, @@ -51,6 +52,7 @@ const eslintConfig = defineConfig([ getComputedStyle: 'readonly', requestAnimationFrame: 'readonly', cancelAnimationFrame: 'readonly', + requestIdleCallback: 'readonly', alert: 'readonly', // DOM Element Types HTMLElement: 'readonly', @@ -62,6 +64,8 @@ const eslintConfig = defineConfig([ HTMLHeadingElement: 'readonly', HTMLParagraphElement: 'readonly', HTMLImageElement: 'readonly', + HTMLLinkElement: 'readonly', + HTMLScriptElement: 'readonly', Element: 'readonly', SVGElement: 'readonly', SVGSVGElement: 'readonly', @@ -76,6 +80,7 @@ const eslintConfig = defineConfig([ MouseEvent: 'readonly', UIEvent: 'readonly', MediaQueryListEvent: 'readonly', + PageTransitionEvent: 'readonly', DataTransfer: 'readonly', // Web APIs ResizeObserver: 'readonly', @@ -91,6 +96,7 @@ const eslintConfig = defineConfig([ Response: 'readonly', RequestInit: 'readonly', RequestCache: 'readonly', + ServiceWorkerRegistration: 'readonly', // Timers setTimeout: 'readonly', setInterval: 'readonly', @@ -112,13 +118,17 @@ const eslintConfig = defineConfig([ console: 'readonly', // Vite defines __APP_VERSION__: 'readonly', + __APP_BUILD_HASH__: 'readonly', }, }, plugins: { '@typescript-eslint': ts, + 'react-hooks': reactHooks, }, rules: { ...ts.configs.recommended.rules, + 'react-hooks/rules-of-hooks': 'error', + 'react-hooks/exhaustive-deps': 'warn', '@typescript-eslint/no-unused-vars': [ 'warn', { @@ -138,6 +148,32 @@ const eslintConfig = defineConfig([ ], }, }, + { + files: ['public/sw.js'], + languageOptions: { + globals: { + // Service Worker globals + self: 'readonly', + caches: 'readonly', + fetch: 'readonly', + Headers: 'readonly', + Response: 'readonly', + URL: 'readonly', + setTimeout: 'readonly', + console: 'readonly', + // Built-in globals used in sw.js + Date: 'readonly', + Promise: 'readonly', + Set: 'readonly', + JSON: 'readonly', + String: 'readonly', + Array: 'readonly', + }, + }, + rules: { + 'no-unused-vars': ['warn', { argsIgnorePattern: '^_', caughtErrorsIgnorePattern: '^_' }], + }, + }, globalIgnores([ 'dist/**', 'dist-electron/**', diff --git a/apps/ui/index.html b/apps/ui/index.html index 49a7aa1ee..3f12c9b0e 100644 --- a/apps/ui/index.html +++ b/apps/ui/index.html @@ -4,18 +4,84 @@ Automaker - Autonomous AI Development Studio - + + + + + + + + + + + + + + +