diff --git a/.claude_settings.json b/.claude_settings.json
index 246fb900a..969f1214b 100644
--- a/.claude_settings.json
+++ b/.claude_settings.json
@@ -21,4 +21,4 @@
"mcp__puppeteer__puppeteer_evaluate"
]
}
-}
\ No newline at end of file
+}
diff --git a/.github/actions/setup-project/action.yml b/.github/actions/setup-project/action.yml
index 8ef0f33bd..75030b905 100644
--- a/.github/actions/setup-project/action.yml
+++ b/.github/actions/setup-project/action.yml
@@ -1,28 +1,28 @@
-name: "Setup Project"
-description: "Common setup steps for CI workflows - checkout, Node.js, dependencies, and native modules"
+name: 'Setup Project'
+description: 'Common setup steps for CI workflows - checkout, Node.js, dependencies, and native modules'
inputs:
node-version:
- description: "Node.js version to use"
+ description: 'Node.js version to use'
required: false
- default: "22"
+ default: '22'
check-lockfile:
- description: "Run lockfile lint check for SSH URLs"
+ description: 'Run lockfile lint check for SSH URLs'
required: false
- default: "false"
+ default: 'false'
rebuild-node-pty-path:
- description: "Working directory for node-pty rebuild (empty = root)"
+ description: 'Working directory for node-pty rebuild (empty = root)'
required: false
- default: ""
+ default: ''
runs:
- using: "composite"
+ using: 'composite'
steps:
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ inputs.node-version }}
- cache: "npm"
+ cache: 'npm'
cache-dependency-path: package-lock.json
- name: Check for SSH URLs in lockfile
diff --git a/.github/scripts/upload-to-r2.js b/.github/scripts/upload-to-r2.js
index 4749bda17..b54d4b191 100644
--- a/.github/scripts/upload-to-r2.js
+++ b/.github/scripts/upload-to-r2.js
@@ -1,15 +1,11 @@
-const {
- S3Client,
- PutObjectCommand,
- GetObjectCommand,
-} = require("@aws-sdk/client-s3");
-const fs = require("fs");
-const path = require("path");
-const https = require("https");
-const { pipeline } = require("stream/promises");
+const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
+const fs = require('fs');
+const path = require('path');
+const https = require('https');
+const { pipeline } = require('stream/promises');
const s3Client = new S3Client({
- region: "auto",
+ region: 'auto',
endpoint: process.env.R2_ENDPOINT,
credentials: {
accessKeyId: process.env.R2_ACCESS_KEY_ID,
@@ -28,14 +24,14 @@ async function fetchExistingReleases() {
const response = await s3Client.send(
new GetObjectCommand({
Bucket: BUCKET,
- Key: "releases.json",
+ Key: 'releases.json',
})
);
const body = await response.Body.transformToString();
return JSON.parse(body);
} catch (error) {
- if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
- console.log("No existing releases.json found, creating new one");
+ if (error.name === 'NoSuchKey' || error.$metadata?.httpStatusCode === 404) {
+ console.log('No existing releases.json found, creating new one');
return { latestVersion: null, releases: [] };
}
throw error;
@@ -85,7 +81,7 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
resolve({
accessible: false,
statusCode,
- error: "Redirect without location header",
+ error: 'Redirect without location header',
});
return;
}
@@ -93,18 +89,16 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
return https
.get(redirectUrl, { timeout: 10000 }, (redirectResponse) => {
const redirectStatus = redirectResponse.statusCode;
- const contentType =
- redirectResponse.headers["content-type"] || "";
+ const contentType = redirectResponse.headers['content-type'] || '';
// Check if it's actually a file (zip/tar.gz) and not HTML
const isFile =
- contentType.includes("application/zip") ||
- contentType.includes("application/gzip") ||
- contentType.includes("application/x-gzip") ||
- contentType.includes("application/x-tar") ||
- redirectUrl.includes(".zip") ||
- redirectUrl.includes(".tar.gz");
- const isGood =
- redirectStatus >= 200 && redirectStatus < 300 && isFile;
+ contentType.includes('application/zip') ||
+ contentType.includes('application/gzip') ||
+ contentType.includes('application/x-gzip') ||
+ contentType.includes('application/x-tar') ||
+ redirectUrl.includes('.zip') ||
+ redirectUrl.includes('.tar.gz');
+ const isGood = redirectStatus >= 200 && redirectStatus < 300 && isFile;
redirectResponse.destroy();
resolve({
accessible: isGood,
@@ -113,38 +107,38 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
contentType,
});
})
- .on("error", (error) => {
+ .on('error', (error) => {
resolve({
accessible: false,
statusCode,
error: error.message,
});
})
- .on("timeout", function () {
+ .on('timeout', function () {
this.destroy();
resolve({
accessible: false,
statusCode,
- error: "Timeout following redirect",
+ error: 'Timeout following redirect',
});
});
}
// Check if status is good (200-299 range) and it's actually a file
- const contentType = response.headers["content-type"] || "";
+ const contentType = response.headers['content-type'] || '';
const isFile =
- contentType.includes("application/zip") ||
- contentType.includes("application/gzip") ||
- contentType.includes("application/x-gzip") ||
- contentType.includes("application/x-tar") ||
- url.includes(".zip") ||
- url.includes(".tar.gz");
+ contentType.includes('application/zip') ||
+ contentType.includes('application/gzip') ||
+ contentType.includes('application/x-gzip') ||
+ contentType.includes('application/x-tar') ||
+ url.includes('.zip') ||
+ url.includes('.tar.gz');
const isGood = statusCode >= 200 && statusCode < 300 && isFile;
response.destroy();
resolve({ accessible: isGood, statusCode, contentType });
});
- request.on("error", (error) => {
+ request.on('error', (error) => {
resolve({
accessible: false,
statusCode: null,
@@ -152,12 +146,12 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
});
});
- request.on("timeout", () => {
+ request.on('timeout', () => {
request.destroy();
resolve({
accessible: false,
statusCode: null,
- error: "Request timeout",
+ error: 'Request timeout',
});
});
});
@@ -168,22 +162,14 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
`✓ URL ${url} is now accessible after ${attempt} retries (status: ${result.statusCode})`
);
} else {
- console.log(
- `✓ URL ${url} is accessible (status: ${result.statusCode})`
- );
+ console.log(`✓ URL ${url} is accessible (status: ${result.statusCode})`);
}
return result.finalUrl || url; // Return the final URL (after redirects) if available
} else {
- const errorMsg = result.error ? ` - ${result.error}` : "";
- const statusMsg = result.statusCode
- ? ` (status: ${result.statusCode})`
- : "";
- const contentTypeMsg = result.contentType
- ? ` [content-type: ${result.contentType}]`
- : "";
- console.log(
- `✗ URL ${url} not accessible${statusMsg}${contentTypeMsg}${errorMsg}`
- );
+ const errorMsg = result.error ? ` - ${result.error}` : '';
+ const statusMsg = result.statusCode ? ` (status: ${result.statusCode})` : '';
+ const contentTypeMsg = result.contentType ? ` [content-type: ${result.contentType}]` : '';
+ console.log(`✗ URL ${url} not accessible${statusMsg}${contentTypeMsg}${errorMsg}`);
}
} catch (error) {
console.log(`✗ URL ${url} check failed: ${error.message}`);
@@ -191,9 +177,7 @@ async function checkUrlAccessible(url, maxRetries = 10, initialDelay = 1000) {
if (attempt < maxRetries - 1) {
const delay = initialDelay * Math.pow(2, attempt);
- console.log(
- ` Retrying in ${delay}ms... (attempt ${attempt + 1}/${maxRetries})`
- );
+ console.log(` Retrying in ${delay}ms... (attempt ${attempt + 1}/${maxRetries})`);
await new Promise((resolve) => setTimeout(resolve, delay));
}
}
@@ -207,12 +191,7 @@ async function downloadFromGitHub(url, outputPath) {
const statusCode = response.statusCode;
// Follow redirects (all redirect types)
- if (
- statusCode === 301 ||
- statusCode === 302 ||
- statusCode === 307 ||
- statusCode === 308
- ) {
+ if (statusCode === 301 || statusCode === 302 || statusCode === 307 || statusCode === 308) {
const redirectUrl = response.headers.location;
response.destroy();
if (!redirectUrl) {
@@ -220,39 +199,33 @@ async function downloadFromGitHub(url, outputPath) {
return;
}
// Resolve relative redirects
- const finalRedirectUrl = redirectUrl.startsWith("http")
+ const finalRedirectUrl = redirectUrl.startsWith('http')
? redirectUrl
: new URL(redirectUrl, url).href;
console.log(` Following redirect: ${finalRedirectUrl}`);
- return downloadFromGitHub(finalRedirectUrl, outputPath)
- .then(resolve)
- .catch(reject);
+ return downloadFromGitHub(finalRedirectUrl, outputPath).then(resolve).catch(reject);
}
if (statusCode !== 200) {
response.destroy();
- reject(
- new Error(
- `Failed to download ${url}: ${statusCode} ${response.statusMessage}`
- )
- );
+ reject(new Error(`Failed to download ${url}: ${statusCode} ${response.statusMessage}`));
return;
}
const fileStream = fs.createWriteStream(outputPath);
response.pipe(fileStream);
- fileStream.on("finish", () => {
+ fileStream.on('finish', () => {
fileStream.close();
resolve();
});
- fileStream.on("error", (error) => {
+ fileStream.on('error', (error) => {
response.destroy();
reject(error);
});
});
- request.on("error", reject);
- request.on("timeout", () => {
+ request.on('error', reject);
+ request.on('timeout', () => {
request.destroy();
reject(new Error(`Request timeout for ${url}`));
});
@@ -260,8 +233,8 @@ async function downloadFromGitHub(url, outputPath) {
}
async function main() {
- const artifactsDir = "artifacts";
- const tempDir = path.join(artifactsDir, "temp");
+ const artifactsDir = 'artifacts';
+ const tempDir = path.join(artifactsDir, 'temp');
// Create temp directory for downloaded GitHub archives
if (!fs.existsSync(tempDir)) {
@@ -292,40 +265,30 @@ async function main() {
// Find all artifacts
const artifacts = {
- windows: findArtifacts(path.join(artifactsDir, "windows-builds"), /\.exe$/),
- macos: findArtifacts(path.join(artifactsDir, "macos-builds"), /-x64\.dmg$/),
- macosArm: findArtifacts(
- path.join(artifactsDir, "macos-builds"),
- /-arm64\.dmg$/
- ),
- linux: findArtifacts(
- path.join(artifactsDir, "linux-builds"),
- /\.AppImage$/
- ),
+ windows: findArtifacts(path.join(artifactsDir, 'windows-builds'), /\.exe$/),
+ macos: findArtifacts(path.join(artifactsDir, 'macos-builds'), /-x64\.dmg$/),
+ macosArm: findArtifacts(path.join(artifactsDir, 'macos-builds'), /-arm64\.dmg$/),
+ linux: findArtifacts(path.join(artifactsDir, 'linux-builds'), /\.AppImage$/),
sourceZip: [sourceZipPath],
sourceTarGz: [sourceTarGzPath],
};
- console.log("Found artifacts:");
+ console.log('Found artifacts:');
for (const [platform, files] of Object.entries(artifacts)) {
console.log(
- ` ${platform}: ${
- files.length > 0
- ? files.map((f) => path.basename(f)).join(", ")
- : "none"
- }`
+ ` ${platform}: ${files.length > 0 ? files.map((f) => path.basename(f)).join(', ') : 'none'}`
);
}
// Upload each artifact to R2
const assets = {};
const contentTypes = {
- windows: "application/x-msdownload",
- macos: "application/x-apple-diskimage",
- macosArm: "application/x-apple-diskimage",
- linux: "application/x-executable",
- sourceZip: "application/zip",
- sourceTarGz: "application/gzip",
+ windows: 'application/x-msdownload',
+ macos: 'application/x-apple-diskimage',
+ macosArm: 'application/x-apple-diskimage',
+ linux: 'application/x-executable',
+ sourceZip: 'application/zip',
+ sourceTarGz: 'application/gzip',
};
for (const [platform, files] of Object.entries(artifacts)) {
@@ -345,11 +308,11 @@ async function main() {
filename,
size,
arch:
- platform === "macosArm"
- ? "arm64"
- : platform === "sourceZip" || platform === "sourceTarGz"
- ? "source"
- : "x64",
+ platform === 'macosArm'
+ ? 'arm64'
+ : platform === 'sourceZip' || platform === 'sourceTarGz'
+ ? 'source'
+ : 'x64',
};
}
@@ -364,9 +327,7 @@ async function main() {
};
// Remove existing entry for this version if re-running
- releasesData.releases = releasesData.releases.filter(
- (r) => r.version !== VERSION
- );
+ releasesData.releases = releasesData.releases.filter((r) => r.version !== VERSION);
// Prepend new release
releasesData.releases.unshift(newRelease);
@@ -376,19 +337,19 @@ async function main() {
await s3Client.send(
new PutObjectCommand({
Bucket: BUCKET,
- Key: "releases.json",
+ Key: 'releases.json',
Body: JSON.stringify(releasesData, null, 2),
- ContentType: "application/json",
- CacheControl: "public, max-age=60",
+ ContentType: 'application/json',
+ CacheControl: 'public, max-age=60',
})
);
- console.log("Successfully updated releases.json");
+ console.log('Successfully updated releases.json');
console.log(`Latest version: ${VERSION}`);
console.log(`Total releases: ${releasesData.releases.length}`);
}
main().catch((err) => {
- console.error("Failed to upload to R2:", err);
+ console.error('Failed to upload to R2:', err);
process.exit(1);
});
diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml
index d300267f1..9471a0591 100644
--- a/.github/workflows/claude.yml
+++ b/.github/workflows/claude.yml
@@ -47,4 +47,3 @@ jobs:
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options
# claude_args: '--allowed-tools Bash(gh pr:*)'
-
diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml
index 9f8e49a8c..a4064bdab 100644
--- a/.github/workflows/e2e-tests.yml
+++ b/.github/workflows/e2e-tests.yml
@@ -3,7 +3,7 @@ name: E2E Tests
on:
pull_request:
branches:
- - "*"
+ - '*'
push:
branches:
- main
@@ -21,8 +21,8 @@ jobs:
- name: Setup project
uses: ./.github/actions/setup-project
with:
- check-lockfile: "true"
- rebuild-node-pty-path: "apps/server"
+ check-lockfile: 'true'
+ rebuild-node-pty-path: 'apps/server'
- name: Install Playwright browsers
run: npx playwright install --with-deps chromium
@@ -58,7 +58,7 @@ jobs:
env:
CI: true
VITE_SERVER_URL: http://localhost:3008
- VITE_SKIP_SETUP: "true"
+ VITE_SKIP_SETUP: 'true'
- name: Upload Playwright report
uses: actions/upload-artifact@v4
diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml
index 38e0c9783..4311eeb0b 100644
--- a/.github/workflows/pr-check.yml
+++ b/.github/workflows/pr-check.yml
@@ -3,7 +3,7 @@ name: PR Build Check
on:
pull_request:
branches:
- - "*"
+ - '*'
push:
branches:
- main
@@ -20,7 +20,7 @@ jobs:
- name: Setup project
uses: ./.github/actions/setup-project
with:
- check-lockfile: "true"
+ check-lockfile: 'true'
- name: Run build:electron (dir only - faster CI)
run: npm run build:electron:dir
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 84cc49414..dacea6312 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -3,7 +3,7 @@ name: Test Suite
on:
pull_request:
branches:
- - "*"
+ - '*'
push:
branches:
- main
@@ -20,8 +20,8 @@ jobs:
- name: Setup project
uses: ./.github/actions/setup-project
with:
- check-lockfile: "true"
- rebuild-node-pty-path: "apps/server"
+ check-lockfile: 'true'
+ rebuild-node-pty-path: 'apps/server'
- name: Run package tests
run: npm run test:packages
diff --git a/apps/app/next-env.d.ts b/apps/app/next-env.d.ts
index c4b7818fb..20e7bcfb0 100644
--- a/apps/app/next-env.d.ts
+++ b/apps/app/next-env.d.ts
@@ -1,6 +1,6 @@
///
///
-import "./.next/dev/types/routes.d.ts";
+import './.next/dev/types/routes.d.ts';
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
diff --git a/apps/server/src/index.ts b/apps/server/src/index.ts
index adf39f3a5..4a19c4c74 100644
--- a/apps/server/src/index.ts
+++ b/apps/server/src/index.ts
@@ -6,53 +6,53 @@
* In web mode, this server runs on a remote host.
*/
-import express from "express";
-import cors from "cors";
-import morgan from "morgan";
-import { WebSocketServer, WebSocket } from "ws";
-import { createServer } from "http";
-import dotenv from "dotenv";
-
-import { createEventEmitter, type EventEmitter } from "./lib/events.js";
-import { initAllowedPaths } from "@automaker/platform";
-import { authMiddleware, getAuthStatus } from "./lib/auth.js";
-import { createFsRoutes } from "./routes/fs/index.js";
-import { createHealthRoutes } from "./routes/health/index.js";
-import { createAgentRoutes } from "./routes/agent/index.js";
-import { createSessionsRoutes } from "./routes/sessions/index.js";
-import { createFeaturesRoutes } from "./routes/features/index.js";
-import { createAutoModeRoutes } from "./routes/auto-mode/index.js";
-import { createEnhancePromptRoutes } from "./routes/enhance-prompt/index.js";
-import { createWorktreeRoutes } from "./routes/worktree/index.js";
-import { createGitRoutes } from "./routes/git/index.js";
-import { createSetupRoutes } from "./routes/setup/index.js";
-import { createSuggestionsRoutes } from "./routes/suggestions/index.js";
-import { createModelsRoutes } from "./routes/models/index.js";
-import { createRunningAgentsRoutes } from "./routes/running-agents/index.js";
-import { createWorkspaceRoutes } from "./routes/workspace/index.js";
-import { createTemplatesRoutes } from "./routes/templates/index.js";
+import express from 'express';
+import cors from 'cors';
+import morgan from 'morgan';
+import { WebSocketServer, WebSocket } from 'ws';
+import { createServer } from 'http';
+import dotenv from 'dotenv';
+
+import { createEventEmitter, type EventEmitter } from './lib/events.js';
+import { initAllowedPaths } from '@automaker/platform';
+import { authMiddleware, getAuthStatus } from './lib/auth.js';
+import { createFsRoutes } from './routes/fs/index.js';
+import { createHealthRoutes } from './routes/health/index.js';
+import { createAgentRoutes } from './routes/agent/index.js';
+import { createSessionsRoutes } from './routes/sessions/index.js';
+import { createFeaturesRoutes } from './routes/features/index.js';
+import { createAutoModeRoutes } from './routes/auto-mode/index.js';
+import { createEnhancePromptRoutes } from './routes/enhance-prompt/index.js';
+import { createWorktreeRoutes } from './routes/worktree/index.js';
+import { createGitRoutes } from './routes/git/index.js';
+import { createSetupRoutes } from './routes/setup/index.js';
+import { createSuggestionsRoutes } from './routes/suggestions/index.js';
+import { createModelsRoutes } from './routes/models/index.js';
+import { createRunningAgentsRoutes } from './routes/running-agents/index.js';
+import { createWorkspaceRoutes } from './routes/workspace/index.js';
+import { createTemplatesRoutes } from './routes/templates/index.js';
import {
createTerminalRoutes,
validateTerminalToken,
isTerminalEnabled,
isTerminalPasswordRequired,
-} from "./routes/terminal/index.js";
-import { createSettingsRoutes } from "./routes/settings/index.js";
-import { AgentService } from "./services/agent-service.js";
-import { FeatureLoader } from "./services/feature-loader.js";
-import { AutoModeService } from "./services/auto-mode-service.js";
-import { getTerminalService } from "./services/terminal-service.js";
-import { SettingsService } from "./services/settings-service.js";
-import { createSpecRegenerationRoutes } from "./routes/app-spec/index.js";
-import { createClaudeRoutes } from "./routes/claude/index.js";
-import { ClaudeUsageService } from "./services/claude-usage-service.js";
+} from './routes/terminal/index.js';
+import { createSettingsRoutes } from './routes/settings/index.js';
+import { AgentService } from './services/agent-service.js';
+import { FeatureLoader } from './services/feature-loader.js';
+import { AutoModeService } from './services/auto-mode-service.js';
+import { getTerminalService } from './services/terminal-service.js';
+import { SettingsService } from './services/settings-service.js';
+import { createSpecRegenerationRoutes } from './routes/app-spec/index.js';
+import { createClaudeRoutes } from './routes/claude/index.js';
+import { ClaudeUsageService } from './services/claude-usage-service.js';
// Load environment variables
dotenv.config();
-const PORT = parseInt(process.env.PORT || "3008", 10);
-const DATA_DIR = process.env.DATA_DIR || "./data";
-const ENABLE_REQUEST_LOGGING = process.env.ENABLE_REQUEST_LOGGING !== "false"; // Default to true
+const PORT = parseInt(process.env.PORT || '3008', 10);
+const DATA_DIR = process.env.DATA_DIR || './data';
+const ENABLE_REQUEST_LOGGING = process.env.ENABLE_REQUEST_LOGGING !== 'false'; // Default to true
// Check for required environment variables
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
@@ -71,7 +71,7 @@ if (!hasAnthropicKey) {
╚═══════════════════════════════════════════════════════════════════════╝
`);
} else {
- console.log("[Server] ✓ ANTHROPIC_API_KEY detected (API key auth)");
+ console.log('[Server] ✓ ANTHROPIC_API_KEY detected (API key auth)');
}
// Initialize security
@@ -83,7 +83,7 @@ const app = express();
// Middleware
// Custom colored logger showing only endpoint and status code (configurable via ENABLE_REQUEST_LOGGING env var)
if (ENABLE_REQUEST_LOGGING) {
- morgan.token("status-colored", (req, res) => {
+ morgan.token('status-colored', (req, res) => {
const status = res.statusCode;
if (status >= 500) return `\x1b[31m${status}\x1b[0m`; // Red for server errors
if (status >= 400) return `\x1b[33m${status}\x1b[0m`; // Yellow for client errors
@@ -92,18 +92,18 @@ if (ENABLE_REQUEST_LOGGING) {
});
app.use(
- morgan(":method :url :status-colored", {
- skip: (req) => req.url === "/api/health", // Skip health check logs
+ morgan(':method :url :status-colored', {
+ skip: (req) => req.url === '/api/health', // Skip health check logs
})
);
}
app.use(
cors({
- origin: process.env.CORS_ORIGIN || "*",
+ origin: process.env.CORS_ORIGIN || '*',
credentials: true,
})
);
-app.use(express.json({ limit: "50mb" }));
+app.use(express.json({ limit: '50mb' }));
// Create shared event emitter for streaming
const events: EventEmitter = createEventEmitter();
@@ -118,33 +118,33 @@ const claudeUsageService = new ClaudeUsageService();
// Initialize services
(async () => {
await agentService.initialize();
- console.log("[Server] Agent service initialized");
+ console.log('[Server] Agent service initialized');
})();
// Mount API routes - health is unauthenticated for monitoring
-app.use("/api/health", createHealthRoutes());
+app.use('/api/health', createHealthRoutes());
// Apply authentication to all other routes
-app.use("/api", authMiddleware);
-
-app.use("/api/fs", createFsRoutes(events));
-app.use("/api/agent", createAgentRoutes(agentService, events));
-app.use("/api/sessions", createSessionsRoutes(agentService));
-app.use("/api/features", createFeaturesRoutes(featureLoader));
-app.use("/api/auto-mode", createAutoModeRoutes(autoModeService));
-app.use("/api/enhance-prompt", createEnhancePromptRoutes());
-app.use("/api/worktree", createWorktreeRoutes());
-app.use("/api/git", createGitRoutes());
-app.use("/api/setup", createSetupRoutes());
-app.use("/api/suggestions", createSuggestionsRoutes(events));
-app.use("/api/models", createModelsRoutes());
-app.use("/api/spec-regeneration", createSpecRegenerationRoutes(events));
-app.use("/api/running-agents", createRunningAgentsRoutes(autoModeService));
-app.use("/api/workspace", createWorkspaceRoutes());
-app.use("/api/templates", createTemplatesRoutes());
-app.use("/api/terminal", createTerminalRoutes());
-app.use("/api/settings", createSettingsRoutes(settingsService));
-app.use("/api/claude", createClaudeRoutes(claudeUsageService));
+app.use('/api', authMiddleware);
+
+app.use('/api/fs', createFsRoutes(events));
+app.use('/api/agent', createAgentRoutes(agentService, events));
+app.use('/api/sessions', createSessionsRoutes(agentService));
+app.use('/api/features', createFeaturesRoutes(featureLoader));
+app.use('/api/auto-mode', createAutoModeRoutes(autoModeService));
+app.use('/api/enhance-prompt', createEnhancePromptRoutes());
+app.use('/api/worktree', createWorktreeRoutes());
+app.use('/api/git', createGitRoutes());
+app.use('/api/setup', createSetupRoutes());
+app.use('/api/suggestions', createSuggestionsRoutes(events));
+app.use('/api/models', createModelsRoutes());
+app.use('/api/spec-regeneration', createSpecRegenerationRoutes(events));
+app.use('/api/running-agents', createRunningAgentsRoutes(autoModeService));
+app.use('/api/workspace', createWorkspaceRoutes());
+app.use('/api/templates', createTemplatesRoutes());
+app.use('/api/terminal', createTerminalRoutes());
+app.use('/api/settings', createSettingsRoutes(settingsService));
+app.use('/api/claude', createClaudeRoutes(claudeUsageService));
// Create HTTP server
const server = createServer(app);
@@ -155,19 +155,16 @@ const terminalWss = new WebSocketServer({ noServer: true });
const terminalService = getTerminalService();
// Handle HTTP upgrade requests manually to route to correct WebSocket server
-server.on("upgrade", (request, socket, head) => {
- const { pathname } = new URL(
- request.url || "",
- `http://${request.headers.host}`
- );
+server.on('upgrade', (request, socket, head) => {
+ const { pathname } = new URL(request.url || '', `http://${request.headers.host}`);
- if (pathname === "/api/events") {
+ if (pathname === '/api/events') {
wss.handleUpgrade(request, socket, head, (ws) => {
- wss.emit("connection", ws, request);
+ wss.emit('connection', ws, request);
});
- } else if (pathname === "/api/terminal/ws") {
+ } else if (pathname === '/api/terminal/ws') {
terminalWss.handleUpgrade(request, socket, head, (ws) => {
- terminalWss.emit("connection", ws, request);
+ terminalWss.emit('connection', ws, request);
});
} else {
socket.destroy();
@@ -175,8 +172,8 @@ server.on("upgrade", (request, socket, head) => {
});
// Events WebSocket connection handler
-wss.on("connection", (ws: WebSocket) => {
- console.log("[WebSocket] Client connected");
+wss.on('connection', (ws: WebSocket) => {
+ console.log('[WebSocket] Client connected');
// Subscribe to all events and forward to this client
const unsubscribe = events.subscribe((type, payload) => {
@@ -185,13 +182,13 @@ wss.on("connection", (ws: WebSocket) => {
}
});
- ws.on("close", () => {
- console.log("[WebSocket] Client disconnected");
+ ws.on('close', () => {
+ console.log('[WebSocket] Client disconnected');
unsubscribe();
});
- ws.on("error", (error) => {
- console.error("[WebSocket] Error:", error);
+ ws.on('error', (error) => {
+ console.error('[WebSocket] Error:', error);
unsubscribe();
});
});
@@ -212,184 +209,199 @@ terminalService.onExit((sessionId) => {
});
// Terminal WebSocket connection handler
-terminalWss.on(
- "connection",
- (ws: WebSocket, req: import("http").IncomingMessage) => {
- // Parse URL to get session ID and token
- const url = new URL(req.url || "", `http://${req.headers.host}`);
- const sessionId = url.searchParams.get("sessionId");
- const token = url.searchParams.get("token");
-
- console.log(`[Terminal WS] Connection attempt for session: ${sessionId}`);
-
- // Check if terminal is enabled
- if (!isTerminalEnabled()) {
- console.log("[Terminal WS] Terminal is disabled");
- ws.close(4003, "Terminal access is disabled");
- return;
- }
+terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage) => {
+ // Parse URL to get session ID and token
+ const url = new URL(req.url || '', `http://${req.headers.host}`);
+ const sessionId = url.searchParams.get('sessionId');
+ const token = url.searchParams.get('token');
+
+ console.log(`[Terminal WS] Connection attempt for session: ${sessionId}`);
+
+ // Check if terminal is enabled
+ if (!isTerminalEnabled()) {
+ console.log('[Terminal WS] Terminal is disabled');
+ ws.close(4003, 'Terminal access is disabled');
+ return;
+ }
- // Validate token if password is required
- if (
- isTerminalPasswordRequired() &&
- !validateTerminalToken(token || undefined)
- ) {
- console.log("[Terminal WS] Invalid or missing token");
- ws.close(4001, "Authentication required");
- return;
- }
+ // Validate token if password is required
+ if (isTerminalPasswordRequired() && !validateTerminalToken(token || undefined)) {
+ console.log('[Terminal WS] Invalid or missing token');
+ ws.close(4001, 'Authentication required');
+ return;
+ }
- if (!sessionId) {
- console.log("[Terminal WS] No session ID provided");
- ws.close(4002, "Session ID required");
- return;
- }
+ if (!sessionId) {
+ console.log('[Terminal WS] No session ID provided');
+ ws.close(4002, 'Session ID required');
+ return;
+ }
- // Check if session exists
- const session = terminalService.getSession(sessionId);
- if (!session) {
- console.log(`[Terminal WS] Session ${sessionId} not found`);
- ws.close(4004, "Session not found");
- return;
- }
+ // Check if session exists
+ const session = terminalService.getSession(sessionId);
+ if (!session) {
+ console.log(`[Terminal WS] Session ${sessionId} not found`);
+ ws.close(4004, 'Session not found');
+ return;
+ }
- console.log(`[Terminal WS] Client connected to session ${sessionId}`);
+ console.log(`[Terminal WS] Client connected to session ${sessionId}`);
- // Track this connection
- if (!terminalConnections.has(sessionId)) {
- terminalConnections.set(sessionId, new Set());
- }
- terminalConnections.get(sessionId)!.add(ws);
+ // Track this connection
+ if (!terminalConnections.has(sessionId)) {
+ terminalConnections.set(sessionId, new Set());
+ }
+ terminalConnections.get(sessionId)!.add(ws);
+
+ // Send initial connection success FIRST
+ ws.send(
+ JSON.stringify({
+ type: 'connected',
+ sessionId,
+ shell: session.shell,
+ cwd: session.cwd,
+ })
+ );
- // Send initial connection success FIRST
+ // Send scrollback buffer BEFORE subscribing to prevent race condition
+ // Also clear pending output buffer to prevent duplicates from throttled flush
+ const scrollback = terminalService.getScrollbackAndClearPending(sessionId);
+ if (scrollback && scrollback.length > 0) {
ws.send(
JSON.stringify({
- type: "connected",
- sessionId,
- shell: session.shell,
- cwd: session.cwd,
+ type: 'scrollback',
+ data: scrollback,
})
);
+ }
- // Send scrollback buffer BEFORE subscribing to prevent race condition
- // Also clear pending output buffer to prevent duplicates from throttled flush
- const scrollback = terminalService.getScrollbackAndClearPending(sessionId);
- if (scrollback && scrollback.length > 0) {
- ws.send(
- JSON.stringify({
- type: "scrollback",
- data: scrollback,
- })
- );
+ // NOW subscribe to terminal data (after scrollback is sent)
+ const unsubscribeData = terminalService.onData((sid, data) => {
+ if (sid === sessionId && ws.readyState === WebSocket.OPEN) {
+ ws.send(JSON.stringify({ type: 'data', data }));
}
+ });
- // NOW subscribe to terminal data (after scrollback is sent)
- const unsubscribeData = terminalService.onData((sid, data) => {
- if (sid === sessionId && ws.readyState === WebSocket.OPEN) {
- ws.send(JSON.stringify({ type: "data", data }));
- }
- });
-
- // Subscribe to terminal exit
- const unsubscribeExit = terminalService.onExit((sid, exitCode) => {
- if (sid === sessionId && ws.readyState === WebSocket.OPEN) {
- ws.send(JSON.stringify({ type: "exit", exitCode }));
- ws.close(1000, "Session ended");
- }
- });
+ // Subscribe to terminal exit
+ const unsubscribeExit = terminalService.onExit((sid, exitCode) => {
+ if (sid === sessionId && ws.readyState === WebSocket.OPEN) {
+ ws.send(JSON.stringify({ type: 'exit', exitCode }));
+ ws.close(1000, 'Session ended');
+ }
+ });
- // Handle incoming messages
- ws.on("message", (message) => {
- try {
- const msg = JSON.parse(message.toString());
+ // Handle incoming messages
+ ws.on('message', (message) => {
+ try {
+ const msg = JSON.parse(message.toString());
- switch (msg.type) {
- case "input":
- // Write user input to terminal
- terminalService.write(sessionId, msg.data);
+ switch (msg.type) {
+ case 'input':
+ // Validate input data type and length
+ if (typeof msg.data !== 'string') {
+ ws.send(JSON.stringify({ type: 'error', message: 'Invalid input type' }));
break;
+ }
+ // Limit input size to 1MB to prevent memory issues
+ if (msg.data.length > 1024 * 1024) {
+ ws.send(JSON.stringify({ type: 'error', message: 'Input too large' }));
+ break;
+ }
+ // Write user input to terminal
+ terminalService.write(sessionId, msg.data);
+ break;
+
+ case 'resize':
+ // Validate resize dimensions are positive integers within reasonable bounds
+ if (
+ typeof msg.cols !== 'number' ||
+ typeof msg.rows !== 'number' ||
+ !Number.isInteger(msg.cols) ||
+ !Number.isInteger(msg.rows) ||
+ msg.cols < 1 ||
+ msg.cols > 1000 ||
+ msg.rows < 1 ||
+ msg.rows > 500
+ ) {
+ break; // Silently ignore invalid resize requests
+ }
+ // Resize terminal with deduplication and rate limiting
+ if (msg.cols && msg.rows) {
+ const now = Date.now();
+ const lastTime = lastResizeTime.get(sessionId) || 0;
+ const lastDimensions = lastResizeDimensions.get(sessionId);
+
+ // Skip if resized too recently (prevents resize storm during splits)
+ if (now - lastTime < RESIZE_MIN_INTERVAL_MS) {
+ break;
+ }
- case "resize":
- // Resize terminal with deduplication and rate limiting
- if (msg.cols && msg.rows) {
- const now = Date.now();
- const lastTime = lastResizeTime.get(sessionId) || 0;
- const lastDimensions = lastResizeDimensions.get(sessionId);
-
- // Skip if resized too recently (prevents resize storm during splits)
- if (now - lastTime < RESIZE_MIN_INTERVAL_MS) {
- break;
- }
-
- // Check if dimensions are different from last resize
- if (
- !lastDimensions ||
- lastDimensions.cols !== msg.cols ||
- lastDimensions.rows !== msg.rows
- ) {
- // Only suppress output on subsequent resizes, not the first one
- // The first resize happens on terminal open and we don't want to drop the initial prompt
- const isFirstResize = !lastDimensions;
- terminalService.resize(sessionId, msg.cols, msg.rows, !isFirstResize);
- lastResizeDimensions.set(sessionId, {
- cols: msg.cols,
- rows: msg.rows,
- });
- lastResizeTime.set(sessionId, now);
- }
+ // Check if dimensions are different from last resize
+ if (
+ !lastDimensions ||
+ lastDimensions.cols !== msg.cols ||
+ lastDimensions.rows !== msg.rows
+ ) {
+ // Only suppress output on subsequent resizes, not the first one
+ // The first resize happens on terminal open and we don't want to drop the initial prompt
+ const isFirstResize = !lastDimensions;
+ terminalService.resize(sessionId, msg.cols, msg.rows, !isFirstResize);
+ lastResizeDimensions.set(sessionId, {
+ cols: msg.cols,
+ rows: msg.rows,
+ });
+ lastResizeTime.set(sessionId, now);
}
- break;
+ }
+ break;
- case "ping":
- // Respond to ping
- ws.send(JSON.stringify({ type: "pong" }));
- break;
+ case 'ping':
+ // Respond to ping
+ ws.send(JSON.stringify({ type: 'pong' }));
+ break;
- default:
- console.warn(`[Terminal WS] Unknown message type: ${msg.type}`);
- }
- } catch (error) {
- console.error("[Terminal WS] Error processing message:", error);
+ default:
+ console.warn(`[Terminal WS] Unknown message type: ${msg.type}`);
}
- });
+ } catch (error) {
+ console.error('[Terminal WS] Error processing message:', error);
+ }
+ });
- ws.on("close", () => {
- console.log(
- `[Terminal WS] Client disconnected from session ${sessionId}`
- );
- unsubscribeData();
- unsubscribeExit();
-
- // Remove from connections tracking
- const connections = terminalConnections.get(sessionId);
- if (connections) {
- connections.delete(ws);
- if (connections.size === 0) {
- terminalConnections.delete(sessionId);
- // DON'T delete lastResizeDimensions/lastResizeTime here!
- // The session still exists, and reconnecting clients need to know
- // this isn't the "first resize" to prevent duplicate prompts.
- // These get cleaned up when the session actually exits.
- }
+ ws.on('close', () => {
+ console.log(`[Terminal WS] Client disconnected from session ${sessionId}`);
+ unsubscribeData();
+ unsubscribeExit();
+
+ // Remove from connections tracking
+ const connections = terminalConnections.get(sessionId);
+ if (connections) {
+ connections.delete(ws);
+ if (connections.size === 0) {
+ terminalConnections.delete(sessionId);
+ // DON'T delete lastResizeDimensions/lastResizeTime here!
+ // The session still exists, and reconnecting clients need to know
+ // this isn't the "first resize" to prevent duplicate prompts.
+ // These get cleaned up when the session actually exits.
}
- });
+ }
+ });
- ws.on("error", (error) => {
- console.error(`[Terminal WS] Error on session ${sessionId}:`, error);
- unsubscribeData();
- unsubscribeExit();
- });
- }
-);
+ ws.on('error', (error) => {
+ console.error(`[Terminal WS] Error on session ${sessionId}:`, error);
+ unsubscribeData();
+ unsubscribeExit();
+ });
+});
// Start server with error handling for port conflicts
const startServer = (port: number) => {
server.listen(port, () => {
const terminalStatus = isTerminalEnabled()
? isTerminalPasswordRequired()
- ? "enabled (password protected)"
- : "enabled"
- : "disabled";
+ ? 'enabled (password protected)'
+ : 'enabled'
+ : 'disabled';
const portStr = port.toString().padEnd(4);
console.log(`
╔═══════════════════════════════════════════════════════╗
@@ -404,8 +416,8 @@ const startServer = (port: number) => {
`);
});
- server.on("error", (error: NodeJS.ErrnoException) => {
- if (error.code === "EADDRINUSE") {
+ server.on('error', (error: NodeJS.ErrnoException) => {
+ if (error.code === 'EADDRINUSE') {
console.error(`
╔═══════════════════════════════════════════════════════╗
║ ❌ ERROR: Port ${port} is already in use ║
@@ -426,7 +438,7 @@ const startServer = (port: number) => {
`);
process.exit(1);
} else {
- console.error("[Server] Error starting server:", error);
+ console.error('[Server] Error starting server:', error);
process.exit(1);
}
});
@@ -435,20 +447,20 @@ const startServer = (port: number) => {
startServer(PORT);
// Graceful shutdown
-process.on("SIGTERM", () => {
- console.log("SIGTERM received, shutting down...");
+process.on('SIGTERM', () => {
+ console.log('SIGTERM received, shutting down...');
terminalService.cleanup();
server.close(() => {
- console.log("Server closed");
+ console.log('Server closed');
process.exit(0);
});
});
-process.on("SIGINT", () => {
- console.log("SIGINT received, shutting down...");
+process.on('SIGINT', () => {
+ console.log('SIGINT received, shutting down...');
terminalService.cleanup();
server.close(() => {
- console.log("Server closed");
+ console.log('Server closed');
process.exit(0);
});
});
diff --git a/apps/server/src/lib/app-spec-format.ts b/apps/server/src/lib/app-spec-format.ts
index 523af533a..2894bbc4c 100644
--- a/apps/server/src/lib/app-spec-format.ts
+++ b/apps/server/src/lib/app-spec-format.ts
@@ -6,26 +6,26 @@
*/
// Import and re-export spec types from shared package
-export type { SpecOutput } from "@automaker/types";
-export { specOutputSchema } from "@automaker/types";
+export type { SpecOutput } from '@automaker/types';
+export { specOutputSchema } from '@automaker/types';
/**
* Escape special XML characters
*/
function escapeXml(str: string): string {
return str
- .replace(/&/g, "&")
- .replace(//g, ">")
- .replace(/"/g, """)
- .replace(/'/g, "'");
+ .replace(/&/g, '&')
+ .replace(//g, '>')
+ .replace(/"/g, '"')
+ .replace(/'/g, ''');
}
/**
* Convert structured spec output to XML format
*/
-export function specToXml(spec: import("@automaker/types").SpecOutput): string {
- const indent = " ";
+export function specToXml(spec: import('@automaker/types').SpecOutput): string {
+ const indent = ' ';
let xml = `
@@ -36,11 +36,11 @@ ${indent}${indent}${escapeXml(spec.overview)}
${indent}
${indent}
-${spec.technology_stack.map((t) => `${indent}${indent}${escapeXml(t)}`).join("\n")}
+${spec.technology_stack.map((t) => `${indent}${indent}${escapeXml(t)}`).join('\n')}
${indent}
${indent}
-${spec.core_capabilities.map((c) => `${indent}${indent}${escapeXml(c)}`).join("\n")}
+${spec.core_capabilities.map((c) => `${indent}${indent}${escapeXml(c)}`).join('\n')}
${indent}
${indent}
@@ -51,13 +51,13 @@ ${indent}${indent}${indent}${escapeXml(f.name)}
${indent}${indent}${indent}${escapeXml(f.description)}${
f.file_locations && f.file_locations.length > 0
? `\n${indent}${indent}${indent}
-${f.file_locations.map((loc) => `${indent}${indent}${indent}${indent}${escapeXml(loc)}`).join("\n")}
+${f.file_locations.map((loc) => `${indent}${indent}${indent}${indent}${escapeXml(loc)}`).join('\n')}
${indent}${indent}${indent}`
- : ""
+ : ''
}
${indent}${indent}`
)
- .join("\n")}
+ .join('\n')}
${indent}`;
// Optional sections
@@ -65,7 +65,7 @@ ${indent}`;
xml += `
${indent}
-${spec.additional_requirements.map((r) => `${indent}${indent}${escapeXml(r)}`).join("\n")}
+${spec.additional_requirements.map((r) => `${indent}${indent}${escapeXml(r)}`).join('\n')}
${indent}`;
}
@@ -73,7 +73,7 @@ ${indent}`;
xml += `
${indent}
-${spec.development_guidelines.map((g) => `${indent}${indent}${escapeXml(g)}`).join("\n")}
+${spec.development_guidelines.map((g) => `${indent}${indent}${escapeXml(g)}`).join('\n')}
${indent}`;
}
@@ -89,7 +89,7 @@ ${indent}${indent}${indent}${escapeXml(r.status)}
${indent}${indent}${indent}${escapeXml(r.description)}
${indent}${indent}`
)
- .join("\n")}
+ .join('\n')}
${indent}`;
}
diff --git a/apps/server/src/lib/auth.ts b/apps/server/src/lib/auth.ts
index 331af2cf4..145c7b9dc 100644
--- a/apps/server/src/lib/auth.ts
+++ b/apps/server/src/lib/auth.ts
@@ -4,7 +4,7 @@
* Supports API key authentication via header or environment variable.
*/
-import type { Request, Response, NextFunction } from "express";
+import type { Request, Response, NextFunction } from 'express';
// API key from environment (optional - if not set, auth is disabled)
const API_KEY = process.env.AUTOMAKER_API_KEY;
@@ -23,12 +23,12 @@ export function authMiddleware(req: Request, res: Response, next: NextFunction):
}
// Check for API key in header
- const providedKey = req.headers["x-api-key"] as string | undefined;
+ const providedKey = req.headers['x-api-key'] as string | undefined;
if (!providedKey) {
res.status(401).json({
success: false,
- error: "Authentication required. Provide X-API-Key header.",
+ error: 'Authentication required. Provide X-API-Key header.',
});
return;
}
@@ -36,7 +36,7 @@ export function authMiddleware(req: Request, res: Response, next: NextFunction):
if (providedKey !== API_KEY) {
res.status(403).json({
success: false,
- error: "Invalid API key.",
+ error: 'Invalid API key.',
});
return;
}
@@ -57,6 +57,6 @@ export function isAuthEnabled(): boolean {
export function getAuthStatus(): { enabled: boolean; method: string } {
return {
enabled: !!API_KEY,
- method: API_KEY ? "api_key" : "none",
+ method: API_KEY ? 'api_key' : 'none',
};
}
diff --git a/apps/server/src/lib/events.ts b/apps/server/src/lib/events.ts
index 83b211845..c7bd15128 100644
--- a/apps/server/src/lib/events.ts
+++ b/apps/server/src/lib/events.ts
@@ -2,7 +2,7 @@
* Event emitter for streaming events to WebSocket clients
*/
-import type { EventType, EventCallback } from "@automaker/types";
+import type { EventType, EventCallback } from '@automaker/types';
// Re-export event types from shared package
export type { EventType, EventCallback };
@@ -21,7 +21,7 @@ export function createEventEmitter(): EventEmitter {
try {
callback(type, payload);
} catch (error) {
- console.error("Error in event subscriber:", error);
+ console.error('Error in event subscriber:', error);
}
}
},
diff --git a/apps/server/src/lib/secure-fs.ts b/apps/server/src/lib/secure-fs.ts
index eab1be0a5..cf927cbd7 100644
--- a/apps/server/src/lib/secure-fs.ts
+++ b/apps/server/src/lib/secure-fs.ts
@@ -3,7 +3,7 @@
* This file exists for backward compatibility with existing imports
*/
-import { secureFs } from "@automaker/platform";
+import { secureFs } from '@automaker/platform';
export const {
access,
diff --git a/apps/server/src/middleware/validate-paths.ts b/apps/server/src/middleware/validate-paths.ts
index 5973451fe..51b8ccb1a 100644
--- a/apps/server/src/middleware/validate-paths.ts
+++ b/apps/server/src/middleware/validate-paths.ts
@@ -4,8 +4,8 @@
* try-catch block in every route handler
*/
-import type { Request, Response, NextFunction } from "express";
-import { validatePath, PathNotAllowedError } from "@automaker/platform";
+import type { Request, Response, NextFunction } from 'express';
+import { validatePath, PathNotAllowedError } from '@automaker/platform';
/**
* Creates a middleware that validates specified path parameters in req.body
@@ -24,7 +24,7 @@ export function validatePathParams(...paramNames: string[]) {
try {
for (const paramName of paramNames) {
// Handle optional parameters (paramName?)
- if (paramName.endsWith("?")) {
+ if (paramName.endsWith('?')) {
const actualName = paramName.slice(0, -1);
const value = req.body[actualName];
if (value) {
@@ -34,7 +34,7 @@ export function validatePathParams(...paramNames: string[]) {
}
// Handle array parameters (paramName[])
- if (paramName.endsWith("[]")) {
+ if (paramName.endsWith('[]')) {
const actualName = paramName.slice(0, -2);
const values = req.body[actualName];
if (Array.isArray(values) && values.length > 0) {
diff --git a/apps/server/src/providers/base-provider.ts b/apps/server/src/providers/base-provider.ts
index f481b83c1..2b1880d3c 100644
--- a/apps/server/src/providers/base-provider.ts
+++ b/apps/server/src/providers/base-provider.ts
@@ -9,7 +9,7 @@ import type {
InstallationStatus,
ValidationResult,
ModelDefinition,
-} from "./types.js";
+} from './types.js';
/**
* Base provider class that all provider implementations must extend
@@ -33,9 +33,7 @@ export abstract class BaseProvider {
* @param options Execution options
* @returns AsyncGenerator yielding provider messages
*/
- abstract executeQuery(
- options: ExecuteOptions
- ): AsyncGenerator;
+ abstract executeQuery(options: ExecuteOptions): AsyncGenerator;
/**
* Detect if the provider is installed and configured
@@ -59,7 +57,7 @@ export abstract class BaseProvider {
// Base validation (can be overridden)
if (!this.config) {
- errors.push("Provider config is missing");
+ errors.push('Provider config is missing');
}
return {
@@ -76,7 +74,7 @@ export abstract class BaseProvider {
*/
supportsFeature(feature: string): boolean {
// Default implementation - override in subclasses
- const commonFeatures = ["tools", "text"];
+ const commonFeatures = ['tools', 'text'];
return commonFeatures.includes(feature);
}
diff --git a/apps/server/src/providers/claude-provider.ts b/apps/server/src/providers/claude-provider.ts
index ea8471e1f..21df839ed 100644
--- a/apps/server/src/providers/claude-provider.ts
+++ b/apps/server/src/providers/claude-provider.ts
@@ -5,26 +5,24 @@
* with the provider architecture.
*/
-import { query, type Options } from "@anthropic-ai/claude-agent-sdk";
-import { BaseProvider } from "./base-provider.js";
+import { query, type Options } from '@anthropic-ai/claude-agent-sdk';
+import { BaseProvider } from './base-provider.js';
import type {
ExecuteOptions,
ProviderMessage,
InstallationStatus,
ModelDefinition,
-} from "./types.js";
+} from './types.js';
export class ClaudeProvider extends BaseProvider {
getName(): string {
- return "claude";
+ return 'claude';
}
/**
* Execute a query using Claude Agent SDK
*/
- async *executeQuery(
- options: ExecuteOptions
- ): AsyncGenerator {
+ async *executeQuery(options: ExecuteOptions): AsyncGenerator {
const {
prompt,
model,
@@ -38,16 +36,7 @@ export class ClaudeProvider extends BaseProvider {
} = options;
// Build Claude SDK options
- const defaultTools = [
- "Read",
- "Write",
- "Edit",
- "Glob",
- "Grep",
- "Bash",
- "WebSearch",
- "WebFetch",
- ];
+ const defaultTools = ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'Bash', 'WebSearch', 'WebFetch'];
const toolsToUse = allowedTools || defaultTools;
const sdkOptions: Options = {
@@ -56,7 +45,7 @@ export class ClaudeProvider extends BaseProvider {
maxTurns,
cwd,
allowedTools: toolsToUse,
- permissionMode: "acceptEdits",
+ permissionMode: 'acceptEdits',
sandbox: {
enabled: true,
autoAllowBashIfSandboxed: true,
@@ -75,10 +64,10 @@ export class ClaudeProvider extends BaseProvider {
// Multi-part prompt (with images)
promptPayload = (async function* () {
const multiPartPrompt = {
- type: "user" as const,
- session_id: "",
+ type: 'user' as const,
+ session_id: '',
message: {
- role: "user" as const,
+ role: 'user' as const,
content: prompt,
},
parent_tool_use_id: null,
@@ -99,10 +88,7 @@ export class ClaudeProvider extends BaseProvider {
yield msg as ProviderMessage;
}
} catch (error) {
- console.error(
- "[ClaudeProvider] executeQuery() error during execution:",
- error
- );
+ console.error('[ClaudeProvider] executeQuery() error during execution:', error);
throw error;
}
}
@@ -116,7 +102,7 @@ export class ClaudeProvider extends BaseProvider {
const status: InstallationStatus = {
installed: true,
- method: "sdk",
+ method: 'sdk',
hasApiKey,
authenticated: hasApiKey,
};
@@ -130,53 +116,53 @@ export class ClaudeProvider extends BaseProvider {
getAvailableModels(): ModelDefinition[] {
const models = [
{
- id: "claude-opus-4-5-20251101",
- name: "Claude Opus 4.5",
- modelString: "claude-opus-4-5-20251101",
- provider: "anthropic",
- description: "Most capable Claude model",
+ id: 'claude-opus-4-5-20251101',
+ name: 'Claude Opus 4.5',
+ modelString: 'claude-opus-4-5-20251101',
+ provider: 'anthropic',
+ description: 'Most capable Claude model',
contextWindow: 200000,
maxOutputTokens: 16000,
supportsVision: true,
supportsTools: true,
- tier: "premium" as const,
+ tier: 'premium' as const,
default: true,
},
{
- id: "claude-sonnet-4-20250514",
- name: "Claude Sonnet 4",
- modelString: "claude-sonnet-4-20250514",
- provider: "anthropic",
- description: "Balanced performance and cost",
+ id: 'claude-sonnet-4-20250514',
+ name: 'Claude Sonnet 4',
+ modelString: 'claude-sonnet-4-20250514',
+ provider: 'anthropic',
+ description: 'Balanced performance and cost',
contextWindow: 200000,
maxOutputTokens: 16000,
supportsVision: true,
supportsTools: true,
- tier: "standard" as const,
+ tier: 'standard' as const,
},
{
- id: "claude-3-5-sonnet-20241022",
- name: "Claude 3.5 Sonnet",
- modelString: "claude-3-5-sonnet-20241022",
- provider: "anthropic",
- description: "Fast and capable",
+ id: 'claude-3-5-sonnet-20241022',
+ name: 'Claude 3.5 Sonnet',
+ modelString: 'claude-3-5-sonnet-20241022',
+ provider: 'anthropic',
+ description: 'Fast and capable',
contextWindow: 200000,
maxOutputTokens: 8000,
supportsVision: true,
supportsTools: true,
- tier: "standard" as const,
+ tier: 'standard' as const,
},
{
- id: "claude-3-5-haiku-20241022",
- name: "Claude 3.5 Haiku",
- modelString: "claude-3-5-haiku-20241022",
- provider: "anthropic",
- description: "Fastest Claude model",
+ id: 'claude-3-5-haiku-20241022',
+ name: 'Claude 3.5 Haiku',
+ modelString: 'claude-3-5-haiku-20241022',
+ provider: 'anthropic',
+ description: 'Fastest Claude model',
contextWindow: 200000,
maxOutputTokens: 8000,
supportsVision: true,
supportsTools: true,
- tier: "basic" as const,
+ tier: 'basic' as const,
},
] satisfies ModelDefinition[];
return models;
@@ -186,7 +172,7 @@ export class ClaudeProvider extends BaseProvider {
* Check if the provider supports a specific feature
*/
supportsFeature(feature: string): boolean {
- const supportedFeatures = ["tools", "text", "vision", "thinking"];
+ const supportedFeatures = ['tools', 'text', 'vision', 'thinking'];
return supportedFeatures.includes(feature);
}
}
diff --git a/apps/server/src/providers/provider-factory.ts b/apps/server/src/providers/provider-factory.ts
index f45bf0088..0ef9b36ea 100644
--- a/apps/server/src/providers/provider-factory.ts
+++ b/apps/server/src/providers/provider-factory.ts
@@ -6,9 +6,9 @@
* new providers (Cursor, OpenCode, etc.) trivial - just add one line.
*/
-import { BaseProvider } from "./base-provider.js";
-import { ClaudeProvider } from "./claude-provider.js";
-import type { InstallationStatus } from "./types.js";
+import { BaseProvider } from './base-provider.js';
+import { ClaudeProvider } from './claude-provider.js';
+import type { InstallationStatus } from './types.js';
export class ProviderFactory {
/**
@@ -21,10 +21,7 @@ export class ProviderFactory {
const lowerModel = modelId.toLowerCase();
// Claude models (claude-*, opus, sonnet, haiku)
- if (
- lowerModel.startsWith("claude-") ||
- ["haiku", "sonnet", "opus"].includes(lowerModel)
- ) {
+ if (lowerModel.startsWith('claude-') || ['haiku', 'sonnet', 'opus'].includes(lowerModel)) {
return new ClaudeProvider();
}
@@ -37,9 +34,7 @@ export class ProviderFactory {
// }
// Default to Claude for unknown models
- console.warn(
- `[ProviderFactory] Unknown model prefix for "${modelId}", defaulting to Claude`
- );
+ console.warn(`[ProviderFactory] Unknown model prefix for "${modelId}", defaulting to Claude`);
return new ClaudeProvider();
}
@@ -58,9 +53,7 @@ export class ProviderFactory {
*
* @returns Map of provider name to installation status
*/
- static async checkAllProviders(): Promise<
- Record
- > {
+ static async checkAllProviders(): Promise> {
const providers = this.getAllProviders();
const statuses: Record = {};
@@ -83,8 +76,8 @@ export class ProviderFactory {
const lowerName = name.toLowerCase();
switch (lowerName) {
- case "claude":
- case "anthropic":
+ case 'claude':
+ case 'anthropic':
return new ClaudeProvider();
// Future providers:
diff --git a/apps/server/src/providers/types.ts b/apps/server/src/providers/types.ts
index 6a05b6dfc..f3aa22d51 100644
--- a/apps/server/src/providers/types.ts
+++ b/apps/server/src/providers/types.ts
@@ -15,7 +15,7 @@ export interface ProviderConfig {
* Message in conversation history
*/
export interface ConversationMessage {
- role: "user" | "assistant";
+ role: 'user' | 'assistant';
content: string | Array<{ type: string; text?: string; source?: object }>;
}
@@ -39,7 +39,7 @@ export interface ExecuteOptions {
* Content block in a provider message (matches Claude SDK format)
*/
export interface ContentBlock {
- type: "text" | "tool_use" | "thinking" | "tool_result";
+ type: 'text' | 'tool_use' | 'thinking' | 'tool_result';
text?: string;
thinking?: string;
name?: string;
@@ -52,11 +52,11 @@ export interface ContentBlock {
* Message returned by a provider (matches Claude SDK streaming format)
*/
export interface ProviderMessage {
- type: "assistant" | "user" | "error" | "result";
- subtype?: "success" | "error";
+ type: 'assistant' | 'user' | 'error' | 'result';
+ subtype?: 'success' | 'error';
session_id?: string;
message?: {
- role: "user" | "assistant";
+ role: 'user' | 'assistant';
content: ContentBlock[];
};
result?: string;
@@ -71,7 +71,7 @@ export interface InstallationStatus {
installed: boolean;
path?: string;
version?: string;
- method?: "cli" | "npm" | "brew" | "sdk";
+ method?: 'cli' | 'npm' | 'brew' | 'sdk';
hasApiKey?: boolean;
authenticated?: boolean;
error?: string;
@@ -99,6 +99,6 @@ export interface ModelDefinition {
maxOutputTokens?: number;
supportsVision?: boolean;
supportsTools?: boolean;
- tier?: "basic" | "standard" | "premium";
+ tier?: 'basic' | 'standard' | 'premium';
default?: boolean;
}
diff --git a/apps/server/src/routes/agent/common.ts b/apps/server/src/routes/agent/common.ts
index 0eeeacf02..9b24a76a7 100644
--- a/apps/server/src/routes/agent/common.ts
+++ b/apps/server/src/routes/agent/common.ts
@@ -2,13 +2,10 @@
* Common utilities for agent routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Agent");
+const logger = createLogger('Agent');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/agent/index.ts b/apps/server/src/routes/agent/index.ts
index 61f346569..0ddf8741b 100644
--- a/apps/server/src/routes/agent/index.ts
+++ b/apps/server/src/routes/agent/index.ts
@@ -2,29 +2,30 @@
* Agent routes - HTTP API for Claude agent interactions
*/
-import { Router } from "express";
-import { AgentService } from "../../services/agent-service.js";
-import type { EventEmitter } from "../../lib/events.js";
-import { validatePathParams } from "../../middleware/validate-paths.js";
-import { createStartHandler } from "./routes/start.js";
-import { createSendHandler } from "./routes/send.js";
-import { createHistoryHandler } from "./routes/history.js";
-import { createStopHandler } from "./routes/stop.js";
-import { createClearHandler } from "./routes/clear.js";
-import { createModelHandler } from "./routes/model.js";
+import { Router } from 'express';
+import { AgentService } from '../../services/agent-service.js';
+import type { EventEmitter } from '../../lib/events.js';
+import { validatePathParams } from '../../middleware/validate-paths.js';
+import { createStartHandler } from './routes/start.js';
+import { createSendHandler } from './routes/send.js';
+import { createHistoryHandler } from './routes/history.js';
+import { createStopHandler } from './routes/stop.js';
+import { createClearHandler } from './routes/clear.js';
+import { createModelHandler } from './routes/model.js';
-export function createAgentRoutes(
- agentService: AgentService,
- _events: EventEmitter
-): Router {
+export function createAgentRoutes(agentService: AgentService, _events: EventEmitter): Router {
const router = Router();
- router.post("/start", validatePathParams("workingDirectory?"), createStartHandler(agentService));
- router.post("/send", validatePathParams("workingDirectory?", "imagePaths[]"), createSendHandler(agentService));
- router.post("/history", createHistoryHandler(agentService));
- router.post("/stop", createStopHandler(agentService));
- router.post("/clear", createClearHandler(agentService));
- router.post("/model", createModelHandler(agentService));
+ router.post('/start', validatePathParams('workingDirectory?'), createStartHandler(agentService));
+ router.post(
+ '/send',
+ validatePathParams('workingDirectory?', 'imagePaths[]'),
+ createSendHandler(agentService)
+ );
+ router.post('/history', createHistoryHandler(agentService));
+ router.post('/stop', createStopHandler(agentService));
+ router.post('/clear', createClearHandler(agentService));
+ router.post('/model', createModelHandler(agentService));
return router;
}
diff --git a/apps/server/src/routes/agent/routes/clear.ts b/apps/server/src/routes/agent/routes/clear.ts
index 42418331a..3ee605b60 100644
--- a/apps/server/src/routes/agent/routes/clear.ts
+++ b/apps/server/src/routes/agent/routes/clear.ts
@@ -2,9 +2,9 @@
* POST /clear endpoint - Clear conversation
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createClearHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -12,16 +12,14 @@ export function createClearHandler(agentService: AgentService) {
const { sessionId } = req.body as { sessionId: string };
if (!sessionId) {
- res
- .status(400)
- .json({ success: false, error: "sessionId is required" });
+ res.status(400).json({ success: false, error: 'sessionId is required' });
return;
}
const result = await agentService.clearSession(sessionId);
res.json(result);
} catch (error) {
- logError(error, "Clear session failed");
+ logError(error, 'Clear session failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/agent/routes/history.ts b/apps/server/src/routes/agent/routes/history.ts
index c2b23be82..0859a1420 100644
--- a/apps/server/src/routes/agent/routes/history.ts
+++ b/apps/server/src/routes/agent/routes/history.ts
@@ -2,9 +2,9 @@
* POST /history endpoint - Get conversation history
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createHistoryHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -12,16 +12,14 @@ export function createHistoryHandler(agentService: AgentService) {
const { sessionId } = req.body as { sessionId: string };
if (!sessionId) {
- res
- .status(400)
- .json({ success: false, error: "sessionId is required" });
+ res.status(400).json({ success: false, error: 'sessionId is required' });
return;
}
const result = agentService.getHistory(sessionId);
res.json(result);
} catch (error) {
- logError(error, "Get history failed");
+ logError(error, 'Get history failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/agent/routes/model.ts b/apps/server/src/routes/agent/routes/model.ts
index 2e1b933ec..8e1a1dddf 100644
--- a/apps/server/src/routes/agent/routes/model.ts
+++ b/apps/server/src/routes/agent/routes/model.ts
@@ -2,9 +2,9 @@
* POST /model endpoint - Set session model
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createModelHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -15,16 +15,14 @@ export function createModelHandler(agentService: AgentService) {
};
if (!sessionId || !model) {
- res
- .status(400)
- .json({ success: false, error: "sessionId and model are required" });
+ res.status(400).json({ success: false, error: 'sessionId and model are required' });
return;
}
const result = await agentService.setSessionModel(sessionId, model);
res.json({ success: result });
} catch (error) {
- logError(error, "Set session model failed");
+ logError(error, 'Set session model failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/agent/routes/send.ts b/apps/server/src/routes/agent/routes/send.ts
index b39ede761..0dd2f424d 100644
--- a/apps/server/src/routes/agent/routes/send.ts
+++ b/apps/server/src/routes/agent/routes/send.ts
@@ -2,28 +2,27 @@
* POST /send endpoint - Send a message
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { createLogger } from "@automaker/utils";
-import { getErrorMessage, logError } from "../common.js";
-const logger = createLogger("Agent");
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage, logError } from '../common.js';
+const logger = createLogger('Agent');
export function createSendHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
try {
- const { sessionId, message, workingDirectory, imagePaths, model } =
- req.body as {
- sessionId: string;
- message: string;
- workingDirectory?: string;
- imagePaths?: string[];
- model?: string;
- };
+ const { sessionId, message, workingDirectory, imagePaths, model } = req.body as {
+ sessionId: string;
+ message: string;
+ workingDirectory?: string;
+ imagePaths?: string[];
+ model?: string;
+ };
if (!sessionId || !message) {
res.status(400).json({
success: false,
- error: "sessionId and message are required",
+ error: 'sessionId and message are required',
});
return;
}
@@ -38,13 +37,13 @@ export function createSendHandler(agentService: AgentService) {
model,
})
.catch((error) => {
- logError(error, "Send message failed (background)");
+ logError(error, 'Send message failed (background)');
});
// Return immediately - responses come via WebSocket
- res.json({ success: true, message: "Message sent" });
+ res.json({ success: true, message: 'Message sent' });
} catch (error) {
- logError(error, "Send message failed");
+ logError(error, 'Send message failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/agent/routes/start.ts b/apps/server/src/routes/agent/routes/start.ts
index 9f7d8da5d..1023fa389 100644
--- a/apps/server/src/routes/agent/routes/start.ts
+++ b/apps/server/src/routes/agent/routes/start.ts
@@ -2,11 +2,11 @@
* POST /start endpoint - Start a conversation
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { createLogger } from "@automaker/utils";
-import { getErrorMessage, logError } from "../common.js";
-const logger = createLogger("Agent");
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage, logError } from '../common.js';
+const logger = createLogger('Agent');
export function createStartHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -17,9 +17,7 @@ export function createStartHandler(agentService: AgentService) {
};
if (!sessionId) {
- res
- .status(400)
- .json({ success: false, error: "sessionId is required" });
+ res.status(400).json({ success: false, error: 'sessionId is required' });
return;
}
@@ -30,7 +28,7 @@ export function createStartHandler(agentService: AgentService) {
res.json(result);
} catch (error) {
- logError(error, "Start conversation failed");
+ logError(error, 'Start conversation failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/agent/routes/stop.ts b/apps/server/src/routes/agent/routes/stop.ts
index 204c7d4ae..c5a5fe014 100644
--- a/apps/server/src/routes/agent/routes/stop.ts
+++ b/apps/server/src/routes/agent/routes/stop.ts
@@ -2,9 +2,9 @@
* POST /stop endpoint - Stop execution
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createStopHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -12,16 +12,14 @@ export function createStopHandler(agentService: AgentService) {
const { sessionId } = req.body as { sessionId: string };
if (!sessionId) {
- res
- .status(400)
- .json({ success: false, error: "sessionId is required" });
+ res.status(400).json({ success: false, error: 'sessionId is required' });
return;
}
const result = await agentService.stopExecution(sessionId);
res.json(result);
} catch (error) {
- logError(error, "Stop execution failed");
+ logError(error, 'Stop execution failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/app-spec/common.ts b/apps/server/src/routes/app-spec/common.ts
index 7d730043a..df412dc67 100644
--- a/apps/server/src/routes/app-spec/common.ts
+++ b/apps/server/src/routes/app-spec/common.ts
@@ -2,9 +2,9 @@
* Common utilities and state management for spec regeneration
*/
-import { createLogger } from "@automaker/utils";
+import { createLogger } from '@automaker/utils';
-const logger = createLogger("SpecRegeneration");
+const logger = createLogger('SpecRegeneration');
// Shared state for tracking generation status - private
let isRunning = false;
@@ -23,10 +23,7 @@ export function getSpecRegenerationStatus(): {
/**
* Set the running state and abort controller
*/
-export function setRunningState(
- running: boolean,
- controller: AbortController | null = null
-): void {
+export function setRunningState(running: boolean, controller: AbortController | null = null): void {
isRunning = running;
currentAbortController = controller;
}
@@ -40,14 +37,12 @@ export function logAuthStatus(context: string): void {
logger.info(`${context} - Auth Status:`);
logger.info(
` ANTHROPIC_API_KEY: ${
- hasApiKey
- ? "SET (" + process.env.ANTHROPIC_API_KEY?.substring(0, 20) + "...)"
- : "NOT SET"
+ hasApiKey ? 'SET (' + process.env.ANTHROPIC_API_KEY?.substring(0, 20) + '...)' : 'NOT SET'
}`
);
if (!hasApiKey) {
- logger.warn("⚠️ WARNING: No authentication configured! SDK will fail.");
+ logger.warn('⚠️ WARNING: No authentication configured! SDK will fail.');
}
}
@@ -56,16 +51,13 @@ export function logAuthStatus(context: string): void {
*/
export function logError(error: unknown, context: string): void {
logger.error(`❌ ${context}:`);
- logger.error("Error name:", (error as any)?.name);
- logger.error("Error message:", (error as Error)?.message);
- logger.error("Error stack:", (error as Error)?.stack);
- logger.error(
- "Full error object:",
- JSON.stringify(error, Object.getOwnPropertyNames(error), 2)
- );
+ logger.error('Error name:', (error as any)?.name);
+ logger.error('Error message:', (error as Error)?.message);
+ logger.error('Error stack:', (error as Error)?.stack);
+ logger.error('Full error object:', JSON.stringify(error, Object.getOwnPropertyNames(error), 2));
}
-import { getErrorMessage as getErrorMessageShared } from "../common.js";
+import { getErrorMessage as getErrorMessageShared } from '../common.js';
// Re-export shared utility
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/app-spec/index.ts b/apps/server/src/routes/app-spec/index.ts
index b37907c8d..47950cd3c 100644
--- a/apps/server/src/routes/app-spec/index.ts
+++ b/apps/server/src/routes/app-spec/index.ts
@@ -2,25 +2,22 @@
* Spec Regeneration routes - HTTP API for AI-powered spec generation
*/
-import { Router } from "express";
-import type { EventEmitter } from "../../lib/events.js";
-import { createCreateHandler } from "./routes/create.js";
-import { createGenerateHandler } from "./routes/generate.js";
-import { createGenerateFeaturesHandler } from "./routes/generate-features.js";
-import { createStopHandler } from "./routes/stop.js";
-import { createStatusHandler } from "./routes/status.js";
+import { Router } from 'express';
+import type { EventEmitter } from '../../lib/events.js';
+import { createCreateHandler } from './routes/create.js';
+import { createGenerateHandler } from './routes/generate.js';
+import { createGenerateFeaturesHandler } from './routes/generate-features.js';
+import { createStopHandler } from './routes/stop.js';
+import { createStatusHandler } from './routes/status.js';
export function createSpecRegenerationRoutes(events: EventEmitter): Router {
const router = Router();
- router.post("/create", createCreateHandler(events));
- router.post("/generate", createGenerateHandler(events));
- router.post("/generate-features", createGenerateFeaturesHandler(events));
- router.post("/stop", createStopHandler());
- router.get("/status", createStatusHandler());
+ router.post('/create', createCreateHandler(events));
+ router.post('/generate', createGenerateHandler(events));
+ router.post('/generate-features', createGenerateFeaturesHandler(events));
+ router.post('/stop', createStopHandler());
+ router.get('/status', createStatusHandler());
return router;
}
-
-
-
diff --git a/apps/server/src/routes/app-spec/routes/create.ts b/apps/server/src/routes/app-spec/routes/create.ts
index 8ac211cb8..ed6f68f11 100644
--- a/apps/server/src/routes/app-spec/routes/create.ts
+++ b/apps/server/src/routes/app-spec/routes/create.ts
@@ -2,24 +2,24 @@
* POST /create endpoint - Create project spec from overview
*/
-import type { Request, Response } from "express";
-import type { EventEmitter } from "../../../lib/events.js";
-import { createLogger } from "@automaker/utils";
+import type { Request, Response } from 'express';
+import type { EventEmitter } from '../../../lib/events.js';
+import { createLogger } from '@automaker/utils';
import {
getSpecRegenerationStatus,
setRunningState,
logAuthStatus,
logError,
getErrorMessage,
-} from "../common.js";
-import { generateSpec } from "../generate-spec.js";
+} from '../common.js';
+import { generateSpec } from '../generate-spec.js';
-const logger = createLogger("SpecRegeneration");
+const logger = createLogger('SpecRegeneration');
export function createCreateHandler(events: EventEmitter) {
return async (req: Request, res: Response): Promise => {
- logger.info("========== /create endpoint called ==========");
- logger.debug("Request body:", JSON.stringify(req.body, null, 2));
+ logger.info('========== /create endpoint called ==========');
+ logger.debug('Request body:', JSON.stringify(req.body, null, 2));
try {
const { projectPath, projectOverview, generateFeatures, analyzeProject, maxFeatures } =
@@ -31,37 +31,34 @@ export function createCreateHandler(events: EventEmitter) {
maxFeatures?: number;
};
- logger.debug("Parsed params:");
- logger.debug(" projectPath:", projectPath);
- logger.debug(
- " projectOverview length:",
- `${projectOverview?.length || 0} chars`
- );
- logger.debug(" generateFeatures:", generateFeatures);
- logger.debug(" analyzeProject:", analyzeProject);
- logger.debug(" maxFeatures:", maxFeatures);
+ logger.debug('Parsed params:');
+ logger.debug(' projectPath:', projectPath);
+ logger.debug(' projectOverview length:', `${projectOverview?.length || 0} chars`);
+ logger.debug(' generateFeatures:', generateFeatures);
+ logger.debug(' analyzeProject:', analyzeProject);
+ logger.debug(' maxFeatures:', maxFeatures);
if (!projectPath || !projectOverview) {
- logger.error("Missing required parameters");
+ logger.error('Missing required parameters');
res.status(400).json({
success: false,
- error: "projectPath and projectOverview required",
+ error: 'projectPath and projectOverview required',
});
return;
}
const { isRunning } = getSpecRegenerationStatus();
if (isRunning) {
- logger.warn("Generation already running, rejecting request");
- res.json({ success: false, error: "Spec generation already running" });
+ logger.warn('Generation already running, rejecting request');
+ res.json({ success: false, error: 'Spec generation already running' });
return;
}
- logAuthStatus("Before starting generation");
+ logAuthStatus('Before starting generation');
const abortController = new AbortController();
setRunningState(true, abortController);
- logger.info("Starting background generation task...");
+ logger.info('Starting background generation task...');
// Start generation in background
generateSpec(
@@ -74,24 +71,22 @@ export function createCreateHandler(events: EventEmitter) {
maxFeatures
)
.catch((error) => {
- logError(error, "Generation failed with error");
- events.emit("spec-regeneration:event", {
- type: "spec_regeneration_error",
+ logError(error, 'Generation failed with error');
+ events.emit('spec-regeneration:event', {
+ type: 'spec_regeneration_error',
error: getErrorMessage(error),
projectPath: projectPath,
});
})
.finally(() => {
- logger.info("Generation task finished (success or error)");
+ logger.info('Generation task finished (success or error)');
setRunningState(false, null);
});
- logger.info(
- "Returning success response (generation running in background)"
- );
+ logger.info('Returning success response (generation running in background)');
res.json({ success: true });
} catch (error) {
- logError(error, "Create spec route handler failed");
+ logError(error, 'Create spec route handler failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/app-spec/routes/generate-features.ts b/apps/server/src/routes/app-spec/routes/generate-features.ts
index 0226cf153..a2e6143a6 100644
--- a/apps/server/src/routes/app-spec/routes/generate-features.ts
+++ b/apps/server/src/routes/app-spec/routes/generate-features.ts
@@ -2,24 +2,24 @@
* POST /generate-features endpoint - Generate features from existing spec
*/
-import type { Request, Response } from "express";
-import type { EventEmitter } from "../../../lib/events.js";
-import { createLogger } from "@automaker/utils";
+import type { Request, Response } from 'express';
+import type { EventEmitter } from '../../../lib/events.js';
+import { createLogger } from '@automaker/utils';
import {
getSpecRegenerationStatus,
setRunningState,
logAuthStatus,
logError,
getErrorMessage,
-} from "../common.js";
-import { generateFeaturesFromSpec } from "../generate-features-from-spec.js";
+} from '../common.js';
+import { generateFeaturesFromSpec } from '../generate-features-from-spec.js';
-const logger = createLogger("SpecRegeneration");
+const logger = createLogger('SpecRegeneration');
export function createGenerateFeaturesHandler(events: EventEmitter) {
return async (req: Request, res: Response): Promise => {
- logger.info("========== /generate-features endpoint called ==========");
- logger.debug("Request body:", JSON.stringify(req.body, null, 2));
+ logger.info('========== /generate-features endpoint called ==========');
+ logger.debug('Request body:', JSON.stringify(req.body, null, 2));
try {
const { projectPath, maxFeatures } = req.body as {
@@ -27,52 +27,45 @@ export function createGenerateFeaturesHandler(events: EventEmitter) {
maxFeatures?: number;
};
- logger.debug("projectPath:", projectPath);
- logger.debug("maxFeatures:", maxFeatures);
+ logger.debug('projectPath:', projectPath);
+ logger.debug('maxFeatures:', maxFeatures);
if (!projectPath) {
- logger.error("Missing projectPath parameter");
- res.status(400).json({ success: false, error: "projectPath required" });
+ logger.error('Missing projectPath parameter');
+ res.status(400).json({ success: false, error: 'projectPath required' });
return;
}
const { isRunning } = getSpecRegenerationStatus();
if (isRunning) {
- logger.warn("Generation already running, rejecting request");
- res.json({ success: false, error: "Generation already running" });
+ logger.warn('Generation already running, rejecting request');
+ res.json({ success: false, error: 'Generation already running' });
return;
}
- logAuthStatus("Before starting feature generation");
+ logAuthStatus('Before starting feature generation');
const abortController = new AbortController();
setRunningState(true, abortController);
- logger.info("Starting background feature generation task...");
+ logger.info('Starting background feature generation task...');
- generateFeaturesFromSpec(
- projectPath,
- events,
- abortController,
- maxFeatures
- )
+ generateFeaturesFromSpec(projectPath, events, abortController, maxFeatures)
.catch((error) => {
- logError(error, "Feature generation failed with error");
- events.emit("spec-regeneration:event", {
- type: "features_error",
+ logError(error, 'Feature generation failed with error');
+ events.emit('spec-regeneration:event', {
+ type: 'features_error',
error: getErrorMessage(error),
});
})
.finally(() => {
- logger.info("Feature generation task finished (success or error)");
+ logger.info('Feature generation task finished (success or error)');
setRunningState(false, null);
});
- logger.info(
- "Returning success response (generation running in background)"
- );
+ logger.info('Returning success response (generation running in background)');
res.json({ success: true });
} catch (error) {
- logError(error, "Generate features route handler failed");
+ logError(error, 'Generate features route handler failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/app-spec/routes/generate.ts b/apps/server/src/routes/app-spec/routes/generate.ts
index b866fa4ed..341d634d7 100644
--- a/apps/server/src/routes/app-spec/routes/generate.ts
+++ b/apps/server/src/routes/app-spec/routes/generate.ts
@@ -2,71 +2,63 @@
* POST /generate endpoint - Generate spec from project definition
*/
-import type { Request, Response } from "express";
-import type { EventEmitter } from "../../../lib/events.js";
-import { createLogger } from "@automaker/utils";
+import type { Request, Response } from 'express';
+import type { EventEmitter } from '../../../lib/events.js';
+import { createLogger } from '@automaker/utils';
import {
getSpecRegenerationStatus,
setRunningState,
logAuthStatus,
logError,
getErrorMessage,
-} from "../common.js";
-import { generateSpec } from "../generate-spec.js";
+} from '../common.js';
+import { generateSpec } from '../generate-spec.js';
-const logger = createLogger("SpecRegeneration");
+const logger = createLogger('SpecRegeneration');
export function createGenerateHandler(events: EventEmitter) {
return async (req: Request, res: Response): Promise => {
- logger.info("========== /generate endpoint called ==========");
- logger.debug("Request body:", JSON.stringify(req.body, null, 2));
+ logger.info('========== /generate endpoint called ==========');
+ logger.debug('Request body:', JSON.stringify(req.body, null, 2));
try {
- const {
- projectPath,
- projectDefinition,
- generateFeatures,
- analyzeProject,
- maxFeatures,
- } = req.body as {
- projectPath: string;
- projectDefinition: string;
- generateFeatures?: boolean;
- analyzeProject?: boolean;
- maxFeatures?: number;
- };
+ const { projectPath, projectDefinition, generateFeatures, analyzeProject, maxFeatures } =
+ req.body as {
+ projectPath: string;
+ projectDefinition: string;
+ generateFeatures?: boolean;
+ analyzeProject?: boolean;
+ maxFeatures?: number;
+ };
- logger.debug("Parsed params:");
- logger.debug(" projectPath:", projectPath);
- logger.debug(
- " projectDefinition length:",
- `${projectDefinition?.length || 0} chars`
- );
- logger.debug(" generateFeatures:", generateFeatures);
- logger.debug(" analyzeProject:", analyzeProject);
- logger.debug(" maxFeatures:", maxFeatures);
+ logger.debug('Parsed params:');
+ logger.debug(' projectPath:', projectPath);
+ logger.debug(' projectDefinition length:', `${projectDefinition?.length || 0} chars`);
+ logger.debug(' generateFeatures:', generateFeatures);
+ logger.debug(' analyzeProject:', analyzeProject);
+ logger.debug(' maxFeatures:', maxFeatures);
if (!projectPath || !projectDefinition) {
- logger.error("Missing required parameters");
+ logger.error('Missing required parameters');
res.status(400).json({
success: false,
- error: "projectPath and projectDefinition required",
+ error: 'projectPath and projectDefinition required',
});
return;
}
const { isRunning } = getSpecRegenerationStatus();
if (isRunning) {
- logger.warn("Generation already running, rejecting request");
- res.json({ success: false, error: "Spec generation already running" });
+ logger.warn('Generation already running, rejecting request');
+ res.json({ success: false, error: 'Spec generation already running' });
return;
}
- logAuthStatus("Before starting generation");
+ logAuthStatus('Before starting generation');
const abortController = new AbortController();
setRunningState(true, abortController);
- logger.info("Starting background generation task...");
+ logger.info('Starting background generation task...');
generateSpec(
projectPath,
@@ -78,24 +70,22 @@ export function createGenerateHandler(events: EventEmitter) {
maxFeatures
)
.catch((error) => {
- logError(error, "Generation failed with error");
- events.emit("spec-regeneration:event", {
- type: "spec_regeneration_error",
+ logError(error, 'Generation failed with error');
+ events.emit('spec-regeneration:event', {
+ type: 'spec_regeneration_error',
error: getErrorMessage(error),
projectPath: projectPath,
});
})
.finally(() => {
- logger.info("Generation task finished (success or error)");
+ logger.info('Generation task finished (success or error)');
setRunningState(false, null);
});
- logger.info(
- "Returning success response (generation running in background)"
- );
+ logger.info('Returning success response (generation running in background)');
res.json({ success: true });
} catch (error) {
- logError(error, "Generate spec route handler failed");
+ logError(error, 'Generate spec route handler failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/app-spec/routes/status.ts b/apps/server/src/routes/app-spec/routes/status.ts
index a3c1aac10..542dd4f30 100644
--- a/apps/server/src/routes/app-spec/routes/status.ts
+++ b/apps/server/src/routes/app-spec/routes/status.ts
@@ -2,8 +2,8 @@
* GET /status endpoint - Get generation status
*/
-import type { Request, Response } from "express";
-import { getSpecRegenerationStatus, getErrorMessage } from "../common.js";
+import type { Request, Response } from 'express';
+import { getSpecRegenerationStatus, getErrorMessage } from '../common.js';
export function createStatusHandler() {
return async (_req: Request, res: Response): Promise => {
diff --git a/apps/server/src/routes/app-spec/routes/stop.ts b/apps/server/src/routes/app-spec/routes/stop.ts
index 7c3bd5cac..0751147b9 100644
--- a/apps/server/src/routes/app-spec/routes/stop.ts
+++ b/apps/server/src/routes/app-spec/routes/stop.ts
@@ -2,12 +2,8 @@
* POST /stop endpoint - Stop generation
*/
-import type { Request, Response } from "express";
-import {
- getSpecRegenerationStatus,
- setRunningState,
- getErrorMessage,
-} from "../common.js";
+import type { Request, Response } from 'express';
+import { getSpecRegenerationStatus, setRunningState, getErrorMessage } from '../common.js';
export function createStopHandler() {
return async (_req: Request, res: Response): Promise => {
diff --git a/apps/server/src/routes/auto-mode/common.ts b/apps/server/src/routes/auto-mode/common.ts
index 048d47fae..8fe9c3ab2 100644
--- a/apps/server/src/routes/auto-mode/common.ts
+++ b/apps/server/src/routes/auto-mode/common.ts
@@ -2,13 +2,10 @@
* Common utilities for auto-mode routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("AutoMode");
+const logger = createLogger('AutoMode');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/auto-mode/index.ts b/apps/server/src/routes/auto-mode/index.ts
index 6bdd7dbba..5f36d691a 100644
--- a/apps/server/src/routes/auto-mode/index.ts
+++ b/apps/server/src/routes/auto-mode/index.ts
@@ -4,37 +4,65 @@
* Uses the AutoModeService for real feature execution with Claude Agent SDK
*/
-import { Router } from "express";
-import type { AutoModeService } from "../../services/auto-mode-service.js";
-import { validatePathParams } from "../../middleware/validate-paths.js";
-import { createStopFeatureHandler } from "./routes/stop-feature.js";
-import { createStatusHandler } from "./routes/status.js";
-import { createRunFeatureHandler } from "./routes/run-feature.js";
-import { createVerifyFeatureHandler } from "./routes/verify-feature.js";
-import { createResumeFeatureHandler } from "./routes/resume-feature.js";
-import { createContextExistsHandler } from "./routes/context-exists.js";
-import { createAnalyzeProjectHandler } from "./routes/analyze-project.js";
-import { createFollowUpFeatureHandler } from "./routes/follow-up-feature.js";
-import { createCommitFeatureHandler } from "./routes/commit-feature.js";
-import { createApprovePlanHandler } from "./routes/approve-plan.js";
+import { Router } from 'express';
+import type { AutoModeService } from '../../services/auto-mode-service.js';
+import { validatePathParams } from '../../middleware/validate-paths.js';
+import { createStopFeatureHandler } from './routes/stop-feature.js';
+import { createStatusHandler } from './routes/status.js';
+import { createRunFeatureHandler } from './routes/run-feature.js';
+import { createVerifyFeatureHandler } from './routes/verify-feature.js';
+import { createResumeFeatureHandler } from './routes/resume-feature.js';
+import { createContextExistsHandler } from './routes/context-exists.js';
+import { createAnalyzeProjectHandler } from './routes/analyze-project.js';
+import { createFollowUpFeatureHandler } from './routes/follow-up-feature.js';
+import { createCommitFeatureHandler } from './routes/commit-feature.js';
+import { createApprovePlanHandler } from './routes/approve-plan.js';
export function createAutoModeRoutes(autoModeService: AutoModeService): Router {
const router = Router();
- router.post("/stop-feature", createStopFeatureHandler(autoModeService));
- router.post("/status", validatePathParams("projectPath?"), createStatusHandler(autoModeService));
- router.post("/run-feature", validatePathParams("projectPath"), createRunFeatureHandler(autoModeService));
- router.post("/verify-feature", validatePathParams("projectPath"), createVerifyFeatureHandler(autoModeService));
- router.post("/resume-feature", validatePathParams("projectPath"), createResumeFeatureHandler(autoModeService));
- router.post("/context-exists", validatePathParams("projectPath"), createContextExistsHandler(autoModeService));
- router.post("/analyze-project", validatePathParams("projectPath"), createAnalyzeProjectHandler(autoModeService));
- router.post(
- "/follow-up-feature",
- validatePathParams("projectPath", "imagePaths[]"),
+ router.post('/stop-feature', createStopFeatureHandler(autoModeService));
+ router.post('/status', validatePathParams('projectPath?'), createStatusHandler(autoModeService));
+ router.post(
+ '/run-feature',
+ validatePathParams('projectPath'),
+ createRunFeatureHandler(autoModeService)
+ );
+ router.post(
+ '/verify-feature',
+ validatePathParams('projectPath'),
+ createVerifyFeatureHandler(autoModeService)
+ );
+ router.post(
+ '/resume-feature',
+ validatePathParams('projectPath'),
+ createResumeFeatureHandler(autoModeService)
+ );
+ router.post(
+ '/context-exists',
+ validatePathParams('projectPath'),
+ createContextExistsHandler(autoModeService)
+ );
+ router.post(
+ '/analyze-project',
+ validatePathParams('projectPath'),
+ createAnalyzeProjectHandler(autoModeService)
+ );
+ router.post(
+ '/follow-up-feature',
+ validatePathParams('projectPath', 'imagePaths[]'),
createFollowUpFeatureHandler(autoModeService)
);
- router.post("/commit-feature", validatePathParams("projectPath", "worktreePath?"), createCommitFeatureHandler(autoModeService));
- router.post("/approve-plan", validatePathParams("projectPath"), createApprovePlanHandler(autoModeService));
+ router.post(
+ '/commit-feature',
+ validatePathParams('projectPath', 'worktreePath?'),
+ createCommitFeatureHandler(autoModeService)
+ );
+ router.post(
+ '/approve-plan',
+ validatePathParams('projectPath'),
+ createApprovePlanHandler(autoModeService)
+ );
return router;
}
diff --git a/apps/server/src/routes/auto-mode/routes/analyze-project.ts b/apps/server/src/routes/auto-mode/routes/analyze-project.ts
index 492b28b53..77c95e27d 100644
--- a/apps/server/src/routes/auto-mode/routes/analyze-project.ts
+++ b/apps/server/src/routes/auto-mode/routes/analyze-project.ts
@@ -2,12 +2,12 @@
* POST /analyze-project endpoint - Analyze project
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { createLogger } from "@automaker/utils";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage, logError } from '../common.js';
-const logger = createLogger("AutoMode");
+const logger = createLogger('AutoMode');
export function createAnalyzeProjectHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -15,9 +15,7 @@ export function createAnalyzeProjectHandler(autoModeService: AutoModeService) {
const { projectPath } = req.body as { projectPath: string };
if (!projectPath) {
- res
- .status(400)
- .json({ success: false, error: "projectPath is required" });
+ res.status(400).json({ success: false, error: 'projectPath is required' });
return;
}
@@ -26,9 +24,9 @@ export function createAnalyzeProjectHandler(autoModeService: AutoModeService) {
logger.error(`[AutoMode] Project analysis error:`, error);
});
- res.json({ success: true, message: "Project analysis started" });
+ res.json({ success: true, message: 'Project analysis started' });
} catch (error) {
- logError(error, "Analyze project failed");
+ logError(error, 'Analyze project failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/approve-plan.ts b/apps/server/src/routes/auto-mode/routes/approve-plan.ts
index ce3db20be..c006e5065 100644
--- a/apps/server/src/routes/auto-mode/routes/approve-plan.ts
+++ b/apps/server/src/routes/auto-mode/routes/approve-plan.ts
@@ -2,12 +2,12 @@
* POST /approve-plan endpoint - Approve or reject a generated plan/spec
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { createLogger } from "@automaker/utils";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage, logError } from '../common.js';
-const logger = createLogger("AutoMode");
+const logger = createLogger('AutoMode');
export function createApprovePlanHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -23,15 +23,15 @@ export function createApprovePlanHandler(autoModeService: AutoModeService) {
if (!featureId) {
res.status(400).json({
success: false,
- error: "featureId is required",
+ error: 'featureId is required',
});
return;
}
- if (typeof approved !== "boolean") {
+ if (typeof approved !== 'boolean') {
res.status(400).json({
success: false,
- error: "approved must be a boolean",
+ error: 'approved must be a boolean',
});
return;
}
@@ -41,9 +41,9 @@ export function createApprovePlanHandler(autoModeService: AutoModeService) {
// This supports cases where the server restarted while waiting for approval
logger.info(
- `[AutoMode] Plan ${approved ? "approved" : "rejected"} for feature ${featureId}${
- editedPlan ? " (with edits)" : ""
- }${feedback ? ` - Feedback: ${feedback}` : ""}`
+ `[AutoMode] Plan ${approved ? 'approved' : 'rejected'} for feature ${featureId}${
+ editedPlan ? ' (with edits)' : ''
+ }${feedback ? ` - Feedback: ${feedback}` : ''}`
);
// Resolve the pending approval (with recovery support)
@@ -67,11 +67,11 @@ export function createApprovePlanHandler(autoModeService: AutoModeService) {
success: true,
approved,
message: approved
- ? "Plan approved - implementation will continue"
- : "Plan rejected - feature execution stopped",
+ ? 'Plan approved - implementation will continue'
+ : 'Plan rejected - feature execution stopped',
});
} catch (error) {
- logError(error, "Approve plan failed");
+ logError(error, 'Approve plan failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/commit-feature.ts b/apps/server/src/routes/auto-mode/routes/commit-feature.ts
index aaf2e6f58..7db0ae323 100644
--- a/apps/server/src/routes/auto-mode/routes/commit-feature.ts
+++ b/apps/server/src/routes/auto-mode/routes/commit-feature.ts
@@ -2,9 +2,9 @@
* POST /commit-feature endpoint - Commit feature changes
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createCommitFeatureHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -16,23 +16,17 @@ export function createCommitFeatureHandler(autoModeService: AutoModeService) {
};
if (!projectPath || !featureId) {
- res
- .status(400)
- .json({
- success: false,
- error: "projectPath and featureId are required",
- });
+ res.status(400).json({
+ success: false,
+ error: 'projectPath and featureId are required',
+ });
return;
}
- const commitHash = await autoModeService.commitFeature(
- projectPath,
- featureId,
- worktreePath
- );
+ const commitHash = await autoModeService.commitFeature(projectPath, featureId, worktreePath);
res.json({ success: true, commitHash });
} catch (error) {
- logError(error, "Commit feature failed");
+ logError(error, 'Commit feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/context-exists.ts b/apps/server/src/routes/auto-mode/routes/context-exists.ts
index 32ebb4ce5..ef028f3f3 100644
--- a/apps/server/src/routes/auto-mode/routes/context-exists.ts
+++ b/apps/server/src/routes/auto-mode/routes/context-exists.ts
@@ -2,9 +2,9 @@
* POST /context-exists endpoint - Check if context exists for a feature
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createContextExistsHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -15,22 +15,17 @@ export function createContextExistsHandler(autoModeService: AutoModeService) {
};
if (!projectPath || !featureId) {
- res
- .status(400)
- .json({
- success: false,
- error: "projectPath and featureId are required",
- });
+ res.status(400).json({
+ success: false,
+ error: 'projectPath and featureId are required',
+ });
return;
}
- const exists = await autoModeService.contextExists(
- projectPath,
- featureId
- );
+ const exists = await autoModeService.contextExists(projectPath, featureId);
res.json({ success: true, exists });
} catch (error) {
- logError(error, "Check context exists failed");
+ logError(error, 'Check context exists failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/follow-up-feature.ts b/apps/server/src/routes/auto-mode/routes/follow-up-feature.ts
index 4560f09b5..1ed14c39c 100644
--- a/apps/server/src/routes/auto-mode/routes/follow-up-feature.ts
+++ b/apps/server/src/routes/auto-mode/routes/follow-up-feature.ts
@@ -2,29 +2,28 @@
* POST /follow-up-feature endpoint - Follow up on a feature
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { createLogger } from "@automaker/utils";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage, logError } from '../common.js';
-const logger = createLogger("AutoMode");
+const logger = createLogger('AutoMode');
export function createFollowUpFeatureHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
try {
- const { projectPath, featureId, prompt, imagePaths, useWorktrees } =
- req.body as {
- projectPath: string;
- featureId: string;
- prompt: string;
- imagePaths?: string[];
- useWorktrees?: boolean;
- };
+ const { projectPath, featureId, prompt, imagePaths, useWorktrees } = req.body as {
+ projectPath: string;
+ featureId: string;
+ prompt: string;
+ imagePaths?: string[];
+ useWorktrees?: boolean;
+ };
if (!projectPath || !featureId || !prompt) {
res.status(400).json({
success: false,
- error: "projectPath, featureId, and prompt are required",
+ error: 'projectPath, featureId, and prompt are required',
});
return;
}
@@ -32,18 +31,9 @@ export function createFollowUpFeatureHandler(autoModeService: AutoModeService) {
// Start follow-up in background
// followUpFeature derives workDir from feature.branchName
autoModeService
- .followUpFeature(
- projectPath,
- featureId,
- prompt,
- imagePaths,
- useWorktrees ?? true
- )
+ .followUpFeature(projectPath, featureId, prompt, imagePaths, useWorktrees ?? true)
.catch((error) => {
- logger.error(
- `[AutoMode] Follow up feature ${featureId} error:`,
- error
- );
+ logger.error(`[AutoMode] Follow up feature ${featureId} error:`, error);
})
.finally(() => {
// Release the starting slot when follow-up completes (success or error)
@@ -52,7 +42,7 @@ export function createFollowUpFeatureHandler(autoModeService: AutoModeService) {
res.json({ success: true });
} catch (error) {
- logError(error, "Follow up feature failed");
+ logError(error, 'Follow up feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/resume-feature.ts b/apps/server/src/routes/auto-mode/routes/resume-feature.ts
index 12471fc49..198f24efc 100644
--- a/apps/server/src/routes/auto-mode/routes/resume-feature.ts
+++ b/apps/server/src/routes/auto-mode/routes/resume-feature.ts
@@ -2,12 +2,12 @@
* POST /resume-feature endpoint - Resume a feature
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { createLogger } from "@automaker/utils";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage, logError } from '../common.js';
-const logger = createLogger("AutoMode");
+const logger = createLogger('AutoMode');
export function createResumeFeatureHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -21,7 +21,7 @@ export function createResumeFeatureHandler(autoModeService: AutoModeService) {
if (!projectPath || !featureId) {
res.status(400).json({
success: false,
- error: "projectPath and featureId are required",
+ error: 'projectPath and featureId are required',
});
return;
}
@@ -36,7 +36,7 @@ export function createResumeFeatureHandler(autoModeService: AutoModeService) {
res.json({ success: true });
} catch (error) {
- logError(error, "Resume feature failed");
+ logError(error, 'Resume feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/run-feature.ts b/apps/server/src/routes/auto-mode/routes/run-feature.ts
index bb6f6ef77..16ed475a7 100644
--- a/apps/server/src/routes/auto-mode/routes/run-feature.ts
+++ b/apps/server/src/routes/auto-mode/routes/run-feature.ts
@@ -2,12 +2,12 @@
* POST /run-feature endpoint - Run a single feature
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { createLogger } from "@automaker/utils";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage, logError } from '../common.js';
-const logger = createLogger("AutoMode");
+const logger = createLogger('AutoMode');
export function createRunFeatureHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -21,7 +21,7 @@ export function createRunFeatureHandler(autoModeService: AutoModeService) {
if (!projectPath || !featureId) {
res.status(400).json({
success: false,
- error: "projectPath and featureId are required",
+ error: 'projectPath and featureId are required',
});
return;
}
@@ -40,7 +40,7 @@ export function createRunFeatureHandler(autoModeService: AutoModeService) {
res.json({ success: true });
} catch (error) {
- logError(error, "Run feature failed");
+ logError(error, 'Run feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/status.ts b/apps/server/src/routes/auto-mode/routes/status.ts
index ba0ee8a17..9a1b46903 100644
--- a/apps/server/src/routes/auto-mode/routes/status.ts
+++ b/apps/server/src/routes/auto-mode/routes/status.ts
@@ -2,9 +2,9 @@
* POST /status endpoint - Get auto mode status
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createStatusHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -15,7 +15,7 @@ export function createStatusHandler(autoModeService: AutoModeService) {
...status,
});
} catch (error) {
- logError(error, "Get status failed");
+ logError(error, 'Get status failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/stop-feature.ts b/apps/server/src/routes/auto-mode/routes/stop-feature.ts
index 0468e9d38..bec9a4aa0 100644
--- a/apps/server/src/routes/auto-mode/routes/stop-feature.ts
+++ b/apps/server/src/routes/auto-mode/routes/stop-feature.ts
@@ -2,9 +2,9 @@
* POST /stop-feature endpoint - Stop a specific feature
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createStopFeatureHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -12,16 +12,14 @@ export function createStopFeatureHandler(autoModeService: AutoModeService) {
const { featureId } = req.body as { featureId: string };
if (!featureId) {
- res
- .status(400)
- .json({ success: false, error: "featureId is required" });
+ res.status(400).json({ success: false, error: 'featureId is required' });
return;
}
const stopped = await autoModeService.stopFeature(featureId);
res.json({ success: true, stopped });
} catch (error) {
- logError(error, "Stop feature failed");
+ logError(error, 'Stop feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/auto-mode/routes/verify-feature.ts b/apps/server/src/routes/auto-mode/routes/verify-feature.ts
index 456eecb27..f8f4f6f70 100644
--- a/apps/server/src/routes/auto-mode/routes/verify-feature.ts
+++ b/apps/server/src/routes/auto-mode/routes/verify-feature.ts
@@ -2,9 +2,9 @@
* POST /verify-feature endpoint - Verify a feature
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createVerifyFeatureHandler(autoModeService: AutoModeService) {
return async (req: Request, res: Response): Promise => {
@@ -15,22 +15,17 @@ export function createVerifyFeatureHandler(autoModeService: AutoModeService) {
};
if (!projectPath || !featureId) {
- res
- .status(400)
- .json({
- success: false,
- error: "projectPath and featureId are required",
- });
+ res.status(400).json({
+ success: false,
+ error: 'projectPath and featureId are required',
+ });
return;
}
- const passes = await autoModeService.verifyFeature(
- projectPath,
- featureId
- );
+ const passes = await autoModeService.verifyFeature(projectPath, featureId);
res.json({ success: true, passes });
} catch (error) {
- logError(error, "Verify feature failed");
+ logError(error, 'Verify feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/claude/index.ts b/apps/server/src/routes/claude/index.ts
index f951aa34e..326b6d907 100644
--- a/apps/server/src/routes/claude/index.ts
+++ b/apps/server/src/routes/claude/index.ts
@@ -1,18 +1,18 @@
-import { Router, Request, Response } from "express";
-import { ClaudeUsageService } from "../../services/claude-usage-service.js";
+import { Router, Request, Response } from 'express';
+import { ClaudeUsageService } from '../../services/claude-usage-service.js';
export function createClaudeRoutes(service: ClaudeUsageService): Router {
const router = Router();
// Get current usage (fetches from Claude CLI)
- router.get("/usage", async (req: Request, res: Response) => {
+ router.get('/usage', async (req: Request, res: Response) => {
try {
// Check if Claude CLI is available first
const isAvailable = await service.isAvailable();
if (!isAvailable) {
res.status(503).json({
- error: "Claude CLI not found",
- message: "Please install Claude Code CLI and run 'claude login' to authenticate"
+ error: 'Claude CLI not found',
+ message: "Please install Claude Code CLI and run 'claude login' to authenticate",
});
return;
}
@@ -20,20 +20,20 @@ export function createClaudeRoutes(service: ClaudeUsageService): Router {
const usage = await service.fetchUsageData();
res.json(usage);
} catch (error) {
- const message = error instanceof Error ? error.message : "Unknown error";
+ const message = error instanceof Error ? error.message : 'Unknown error';
- if (message.includes("Authentication required") || message.includes("token_expired")) {
+ if (message.includes('Authentication required') || message.includes('token_expired')) {
res.status(401).json({
- error: "Authentication required",
- message: "Please run 'claude login' to authenticate"
+ error: 'Authentication required',
+ message: "Please run 'claude login' to authenticate",
});
- } else if (message.includes("timed out")) {
+ } else if (message.includes('timed out')) {
res.status(504).json({
- error: "Command timed out",
- message: "The Claude CLI took too long to respond"
+ error: 'Command timed out',
+ message: 'The Claude CLI took too long to respond',
});
} else {
- console.error("Error fetching usage:", error);
+ console.error('Error fetching usage:', error);
res.status(500).json({ error: message });
}
}
diff --git a/apps/server/src/routes/claude/types.ts b/apps/server/src/routes/claude/types.ts
index 2f6eb5974..bd8927462 100644
--- a/apps/server/src/routes/claude/types.ts
+++ b/apps/server/src/routes/claude/types.ts
@@ -29,7 +29,7 @@ export type ClaudeUsage = {
export type ClaudeStatus = {
indicator: {
- color: "green" | "yellow" | "orange" | "red" | "gray";
+ color: 'green' | 'yellow' | 'orange' | 'red' | 'gray';
};
description: string;
};
diff --git a/apps/server/src/routes/common.ts b/apps/server/src/routes/common.ts
index c2bc9a848..14589ffd8 100644
--- a/apps/server/src/routes/common.ts
+++ b/apps/server/src/routes/common.ts
@@ -2,7 +2,7 @@
* Common utilities shared across all route modules
*/
-import { createLogger } from "@automaker/utils";
+import { createLogger } from '@automaker/utils';
// Re-export git utilities from shared package
export {
@@ -16,7 +16,7 @@ export {
listAllFilesInDirectory,
generateDiffsForNonGitDirectory,
getGitRepositoryDiffs,
-} from "@automaker/git-utils";
+} from '@automaker/git-utils';
type Logger = ReturnType;
@@ -24,7 +24,7 @@ type Logger = ReturnType;
* Get error message from error object
*/
export function getErrorMessage(error: unknown): string {
- return error instanceof Error ? error.message : "Unknown error";
+ return error instanceof Error ? error.message : 'Unknown error';
}
/**
diff --git a/apps/server/src/routes/enhance-prompt/index.ts b/apps/server/src/routes/enhance-prompt/index.ts
index bd414a5db..952bf3474 100644
--- a/apps/server/src/routes/enhance-prompt/index.ts
+++ b/apps/server/src/routes/enhance-prompt/index.ts
@@ -5,8 +5,8 @@
* with different enhancement modes (improve, expand, simplify, etc.)
*/
-import { Router } from "express";
-import { createEnhanceHandler } from "./routes/enhance.js";
+import { Router } from 'express';
+import { createEnhanceHandler } from './routes/enhance.js';
/**
* Create the enhance-prompt router
@@ -16,7 +16,7 @@ import { createEnhanceHandler } from "./routes/enhance.js";
export function createEnhancePromptRoutes(): Router {
const router = Router();
- router.post("/", createEnhanceHandler());
+ router.post('/', createEnhanceHandler());
return router;
}
diff --git a/apps/server/src/routes/enhance-prompt/routes/enhance.ts b/apps/server/src/routes/enhance-prompt/routes/enhance.ts
index 9c7611b59..e0edd515f 100644
--- a/apps/server/src/routes/enhance-prompt/routes/enhance.ts
+++ b/apps/server/src/routes/enhance-prompt/routes/enhance.ts
@@ -5,19 +5,19 @@
* Supports modes: improve, technical, simplify, acceptance
*/
-import type { Request, Response } from "express";
-import { query } from "@anthropic-ai/claude-agent-sdk";
-import { createLogger } from "@automaker/utils";
-import { resolveModelString } from "@automaker/model-resolver";
-import { CLAUDE_MODEL_MAP } from "@automaker/types";
+import type { Request, Response } from 'express';
+import { query } from '@anthropic-ai/claude-agent-sdk';
+import { createLogger } from '@automaker/utils';
+import { resolveModelString } from '@automaker/model-resolver';
+import { CLAUDE_MODEL_MAP } from '@automaker/types';
import {
getSystemPrompt,
buildUserPrompt,
isValidEnhancementMode,
type EnhancementMode,
-} from "../../../lib/enhancement-prompts.js";
+} from '../../../lib/enhancement-prompts.js';
-const logger = createLogger("EnhancePrompt");
+const logger = createLogger('EnhancePrompt');
/**
* Request body for the enhance endpoint
@@ -63,16 +63,16 @@ async function extractTextFromStream(
};
}>
): Promise {
- let responseText = "";
+ let responseText = '';
for await (const msg of stream) {
- if (msg.type === "assistant" && msg.message?.content) {
+ if (msg.type === 'assistant' && msg.message?.content) {
for (const block of msg.message.content) {
- if (block.type === "text" && block.text) {
+ if (block.type === 'text' && block.text) {
responseText += block.text;
}
}
- } else if (msg.type === "result" && msg.subtype === "success") {
+ } else if (msg.type === 'result' && msg.subtype === 'success') {
responseText = msg.result || responseText;
}
}
@@ -85,29 +85,25 @@ async function extractTextFromStream(
*
* @returns Express request handler for text enhancement
*/
-export function createEnhanceHandler(): (
- req: Request,
- res: Response
-) => Promise {
+export function createEnhanceHandler(): (req: Request, res: Response) => Promise {
return async (req: Request, res: Response): Promise => {
try {
- const { originalText, enhancementMode, model } =
- req.body as EnhanceRequestBody;
+ const { originalText, enhancementMode, model } = req.body as EnhanceRequestBody;
// Validate required fields
- if (!originalText || typeof originalText !== "string") {
+ if (!originalText || typeof originalText !== 'string') {
const response: EnhanceErrorResponse = {
success: false,
- error: "originalText is required and must be a string",
+ error: 'originalText is required and must be a string',
};
res.status(400).json(response);
return;
}
- if (!enhancementMode || typeof enhancementMode !== "string") {
+ if (!enhancementMode || typeof enhancementMode !== 'string') {
const response: EnhanceErrorResponse = {
success: false,
- error: "enhancementMode is required and must be a string",
+ error: 'enhancementMode is required and must be a string',
};
res.status(400).json(response);
return;
@@ -118,7 +114,7 @@ export function createEnhanceHandler(): (
if (trimmedText.length === 0) {
const response: EnhanceErrorResponse = {
success: false,
- error: "originalText cannot be empty",
+ error: 'originalText cannot be empty',
};
res.status(400).json(response);
return;
@@ -128,11 +124,9 @@ export function createEnhanceHandler(): (
const normalizedMode = enhancementMode.toLowerCase();
const validMode: EnhancementMode = isValidEnhancementMode(normalizedMode)
? normalizedMode
- : "improve";
+ : 'improve';
- logger.info(
- `Enhancing text with mode: ${validMode}, length: ${trimmedText.length} chars`
- );
+ logger.info(`Enhancing text with mode: ${validMode}, length: ${trimmedText.length} chars`);
// Get the system prompt for this mode
const systemPrompt = getSystemPrompt(validMode);
@@ -155,7 +149,7 @@ export function createEnhanceHandler(): (
systemPrompt,
maxTurns: 1,
allowedTools: [],
- permissionMode: "acceptEdits",
+ permissionMode: 'acceptEdits',
},
});
@@ -163,18 +157,16 @@ export function createEnhanceHandler(): (
const enhancedText = await extractTextFromStream(stream);
if (!enhancedText || enhancedText.trim().length === 0) {
- logger.warn("Received empty response from Claude");
+ logger.warn('Received empty response from Claude');
const response: EnhanceErrorResponse = {
success: false,
- error: "Failed to generate enhanced text - empty response",
+ error: 'Failed to generate enhanced text - empty response',
};
res.status(500).json(response);
return;
}
- logger.info(
- `Enhancement complete, output length: ${enhancedText.length} chars`
- );
+ logger.info(`Enhancement complete, output length: ${enhancedText.length} chars`);
const response: EnhanceSuccessResponse = {
success: true,
@@ -182,9 +174,8 @@ export function createEnhanceHandler(): (
};
res.json(response);
} catch (error) {
- const errorMessage =
- error instanceof Error ? error.message : "Unknown error occurred";
- logger.error("Enhancement failed:", errorMessage);
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
+ logger.error('Enhancement failed:', errorMessage);
const response: EnhanceErrorResponse = {
success: false,
diff --git a/apps/server/src/routes/features/common.ts b/apps/server/src/routes/features/common.ts
index 5006586ff..7a5bf8f16 100644
--- a/apps/server/src/routes/features/common.ts
+++ b/apps/server/src/routes/features/common.ts
@@ -2,13 +2,10 @@
* Common utilities for features routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Features");
+const logger = createLogger('Features');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/features/index.ts b/apps/server/src/routes/features/index.ts
index dcd98f56b..5863c4d49 100644
--- a/apps/server/src/routes/features/index.ts
+++ b/apps/server/src/routes/features/index.ts
@@ -2,27 +2,27 @@
* Features routes - HTTP API for feature management
*/
-import { Router } from "express";
-import { FeatureLoader } from "../../services/feature-loader.js";
-import { validatePathParams } from "../../middleware/validate-paths.js";
-import { createListHandler } from "./routes/list.js";
-import { createGetHandler } from "./routes/get.js";
-import { createCreateHandler } from "./routes/create.js";
-import { createUpdateHandler } from "./routes/update.js";
-import { createDeleteHandler } from "./routes/delete.js";
-import { createAgentOutputHandler } from "./routes/agent-output.js";
-import { createGenerateTitleHandler } from "./routes/generate-title.js";
+import { Router } from 'express';
+import { FeatureLoader } from '../../services/feature-loader.js';
+import { validatePathParams } from '../../middleware/validate-paths.js';
+import { createListHandler } from './routes/list.js';
+import { createGetHandler } from './routes/get.js';
+import { createCreateHandler } from './routes/create.js';
+import { createUpdateHandler } from './routes/update.js';
+import { createDeleteHandler } from './routes/delete.js';
+import { createAgentOutputHandler } from './routes/agent-output.js';
+import { createGenerateTitleHandler } from './routes/generate-title.js';
export function createFeaturesRoutes(featureLoader: FeatureLoader): Router {
const router = Router();
- router.post("/list", validatePathParams("projectPath"), createListHandler(featureLoader));
- router.post("/get", validatePathParams("projectPath"), createGetHandler(featureLoader));
- router.post("/create", validatePathParams("projectPath"), createCreateHandler(featureLoader));
- router.post("/update", validatePathParams("projectPath"), createUpdateHandler(featureLoader));
- router.post("/delete", validatePathParams("projectPath"), createDeleteHandler(featureLoader));
- router.post("/agent-output", createAgentOutputHandler(featureLoader));
- router.post("/generate-title", createGenerateTitleHandler());
+ router.post('/list', validatePathParams('projectPath'), createListHandler(featureLoader));
+ router.post('/get', validatePathParams('projectPath'), createGetHandler(featureLoader));
+ router.post('/create', validatePathParams('projectPath'), createCreateHandler(featureLoader));
+ router.post('/update', validatePathParams('projectPath'), createUpdateHandler(featureLoader));
+ router.post('/delete', validatePathParams('projectPath'), createDeleteHandler(featureLoader));
+ router.post('/agent-output', createAgentOutputHandler(featureLoader));
+ router.post('/generate-title', createGenerateTitleHandler());
return router;
}
diff --git a/apps/server/src/routes/features/routes/agent-output.ts b/apps/server/src/routes/features/routes/agent-output.ts
index 62f8f50ac..f928644a2 100644
--- a/apps/server/src/routes/features/routes/agent-output.ts
+++ b/apps/server/src/routes/features/routes/agent-output.ts
@@ -2,9 +2,9 @@
* POST /agent-output endpoint - Get agent output for a feature
*/
-import type { Request, Response } from "express";
-import { FeatureLoader } from "../../../services/feature-loader.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { FeatureLoader } from '../../../services/feature-loader.js';
+import { getErrorMessage, logError } from '../common.js';
export function createAgentOutputHandler(featureLoader: FeatureLoader) {
return async (req: Request, res: Response): Promise => {
@@ -15,22 +15,17 @@ export function createAgentOutputHandler(featureLoader: FeatureLoader) {
};
if (!projectPath || !featureId) {
- res
- .status(400)
- .json({
- success: false,
- error: "projectPath and featureId are required",
- });
+ res.status(400).json({
+ success: false,
+ error: 'projectPath and featureId are required',
+ });
return;
}
- const content = await featureLoader.getAgentOutput(
- projectPath,
- featureId
- );
+ const content = await featureLoader.getAgentOutput(projectPath, featureId);
res.json({ success: true, content });
} catch (error) {
- logError(error, "Get agent output failed");
+ logError(error, 'Get agent output failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/features/routes/create.ts b/apps/server/src/routes/features/routes/create.ts
index cd95b4870..5f04ecdb3 100644
--- a/apps/server/src/routes/features/routes/create.ts
+++ b/apps/server/src/routes/features/routes/create.ts
@@ -2,10 +2,10 @@
* POST /create endpoint - Create a new feature
*/
-import type { Request, Response } from "express";
-import { FeatureLoader } from "../../../services/feature-loader.js";
-import type { Feature } from "@automaker/types";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { FeatureLoader } from '../../../services/feature-loader.js';
+import type { Feature } from '@automaker/types';
+import { getErrorMessage, logError } from '../common.js';
export function createCreateHandler(featureLoader: FeatureLoader) {
return async (req: Request, res: Response): Promise => {
@@ -18,7 +18,7 @@ export function createCreateHandler(featureLoader: FeatureLoader) {
if (!projectPath || !feature) {
res.status(400).json({
success: false,
- error: "projectPath and feature are required",
+ error: 'projectPath and feature are required',
});
return;
}
@@ -26,7 +26,7 @@ export function createCreateHandler(featureLoader: FeatureLoader) {
const created = await featureLoader.create(projectPath, feature);
res.json({ success: true, feature: created });
} catch (error) {
- logError(error, "Create feature failed");
+ logError(error, 'Create feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/features/routes/delete.ts b/apps/server/src/routes/features/routes/delete.ts
index bf5408d5b..2b6831f67 100644
--- a/apps/server/src/routes/features/routes/delete.ts
+++ b/apps/server/src/routes/features/routes/delete.ts
@@ -2,9 +2,9 @@
* POST /delete endpoint - Delete a feature
*/
-import type { Request, Response } from "express";
-import { FeatureLoader } from "../../../services/feature-loader.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { FeatureLoader } from '../../../services/feature-loader.js';
+import { getErrorMessage, logError } from '../common.js';
export function createDeleteHandler(featureLoader: FeatureLoader) {
return async (req: Request, res: Response): Promise => {
@@ -15,19 +15,17 @@ export function createDeleteHandler(featureLoader: FeatureLoader) {
};
if (!projectPath || !featureId) {
- res
- .status(400)
- .json({
- success: false,
- error: "projectPath and featureId are required",
- });
+ res.status(400).json({
+ success: false,
+ error: 'projectPath and featureId are required',
+ });
return;
}
const success = await featureLoader.delete(projectPath, featureId);
res.json({ success });
} catch (error) {
- logError(error, "Delete feature failed");
+ logError(error, 'Delete feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/features/routes/generate-title.ts b/apps/server/src/routes/features/routes/generate-title.ts
index 8c6c9a30a..1225a8256 100644
--- a/apps/server/src/routes/features/routes/generate-title.ts
+++ b/apps/server/src/routes/features/routes/generate-title.ts
@@ -4,12 +4,12 @@
* Uses Claude Haiku to generate a short, descriptive title from feature description.
*/
-import type { Request, Response } from "express";
-import { query } from "@anthropic-ai/claude-agent-sdk";
-import { createLogger } from "@automaker/utils";
-import { CLAUDE_MODEL_MAP } from "@automaker/model-resolver";
+import type { Request, Response } from 'express';
+import { query } from '@anthropic-ai/claude-agent-sdk';
+import { createLogger } from '@automaker/utils';
+import { CLAUDE_MODEL_MAP } from '@automaker/model-resolver';
-const logger = createLogger("GenerateTitle");
+const logger = createLogger('GenerateTitle');
interface GenerateTitleRequestBody {
description: string;
@@ -44,16 +44,16 @@ async function extractTextFromStream(
};
}>
): Promise {
- let responseText = "";
+ let responseText = '';
for await (const msg of stream) {
- if (msg.type === "assistant" && msg.message?.content) {
+ if (msg.type === 'assistant' && msg.message?.content) {
for (const block of msg.message.content) {
- if (block.type === "text" && block.text) {
+ if (block.type === 'text' && block.text) {
responseText += block.text;
}
}
- } else if (msg.type === "result" && msg.subtype === "success") {
+ } else if (msg.type === 'result' && msg.subtype === 'success') {
responseText = msg.result || responseText;
}
}
@@ -61,18 +61,15 @@ async function extractTextFromStream(
return responseText;
}
-export function createGenerateTitleHandler(): (
- req: Request,
- res: Response
-) => Promise {
+export function createGenerateTitleHandler(): (req: Request, res: Response) => Promise {
return async (req: Request, res: Response): Promise => {
try {
const { description } = req.body as GenerateTitleRequestBody;
- if (!description || typeof description !== "string") {
+ if (!description || typeof description !== 'string') {
const response: GenerateTitleErrorResponse = {
success: false,
- error: "description is required and must be a string",
+ error: 'description is required and must be a string',
};
res.status(400).json(response);
return;
@@ -82,7 +79,7 @@ export function createGenerateTitleHandler(): (
if (trimmedDescription.length === 0) {
const response: GenerateTitleErrorResponse = {
success: false,
- error: "description cannot be empty",
+ error: 'description cannot be empty',
};
res.status(400).json(response);
return;
@@ -99,17 +96,17 @@ export function createGenerateTitleHandler(): (
systemPrompt: SYSTEM_PROMPT,
maxTurns: 1,
allowedTools: [],
- permissionMode: "acceptEdits",
+ permissionMode: 'acceptEdits',
},
});
const title = await extractTextFromStream(stream);
if (!title || title.trim().length === 0) {
- logger.warn("Received empty response from Claude");
+ logger.warn('Received empty response from Claude');
const response: GenerateTitleErrorResponse = {
success: false,
- error: "Failed to generate title - empty response",
+ error: 'Failed to generate title - empty response',
};
res.status(500).json(response);
return;
@@ -123,9 +120,8 @@ export function createGenerateTitleHandler(): (
};
res.json(response);
} catch (error) {
- const errorMessage =
- error instanceof Error ? error.message : "Unknown error occurred";
- logger.error("Title generation failed:", errorMessage);
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
+ logger.error('Title generation failed:', errorMessage);
const response: GenerateTitleErrorResponse = {
success: false,
diff --git a/apps/server/src/routes/features/routes/get.ts b/apps/server/src/routes/features/routes/get.ts
index 17900bb0a..96f63fb8a 100644
--- a/apps/server/src/routes/features/routes/get.ts
+++ b/apps/server/src/routes/features/routes/get.ts
@@ -2,9 +2,9 @@
* POST /get endpoint - Get a single feature
*/
-import type { Request, Response } from "express";
-import { FeatureLoader } from "../../../services/feature-loader.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { FeatureLoader } from '../../../services/feature-loader.js';
+import { getErrorMessage, logError } from '../common.js';
export function createGetHandler(featureLoader: FeatureLoader) {
return async (req: Request, res: Response): Promise => {
@@ -15,24 +15,22 @@ export function createGetHandler(featureLoader: FeatureLoader) {
};
if (!projectPath || !featureId) {
- res
- .status(400)
- .json({
- success: false,
- error: "projectPath and featureId are required",
- });
+ res.status(400).json({
+ success: false,
+ error: 'projectPath and featureId are required',
+ });
return;
}
const feature = await featureLoader.get(projectPath, featureId);
if (!feature) {
- res.status(404).json({ success: false, error: "Feature not found" });
+ res.status(404).json({ success: false, error: 'Feature not found' });
return;
}
res.json({ success: true, feature });
} catch (error) {
- logError(error, "Get feature failed");
+ logError(error, 'Get feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/features/routes/list.ts b/apps/server/src/routes/features/routes/list.ts
index cc20b1a17..00127fc98 100644
--- a/apps/server/src/routes/features/routes/list.ts
+++ b/apps/server/src/routes/features/routes/list.ts
@@ -2,9 +2,9 @@
* POST /list endpoint - List all features for a project
*/
-import type { Request, Response } from "express";
-import { FeatureLoader } from "../../../services/feature-loader.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { FeatureLoader } from '../../../services/feature-loader.js';
+import { getErrorMessage, logError } from '../common.js';
export function createListHandler(featureLoader: FeatureLoader) {
return async (req: Request, res: Response): Promise => {
@@ -12,16 +12,14 @@ export function createListHandler(featureLoader: FeatureLoader) {
const { projectPath } = req.body as { projectPath: string };
if (!projectPath) {
- res
- .status(400)
- .json({ success: false, error: "projectPath is required" });
+ res.status(400).json({ success: false, error: 'projectPath is required' });
return;
}
const features = await featureLoader.getAll(projectPath);
res.json({ success: true, features });
} catch (error) {
- logError(error, "List features failed");
+ logError(error, 'List features failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/features/routes/update.ts b/apps/server/src/routes/features/routes/update.ts
index 8c4c7b685..830fb21a2 100644
--- a/apps/server/src/routes/features/routes/update.ts
+++ b/apps/server/src/routes/features/routes/update.ts
@@ -2,10 +2,10 @@
* POST /update endpoint - Update a feature
*/
-import type { Request, Response } from "express";
-import { FeatureLoader } from "../../../services/feature-loader.js";
-import type { Feature } from "@automaker/types";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { FeatureLoader } from '../../../services/feature-loader.js';
+import type { Feature } from '@automaker/types';
+import { getErrorMessage, logError } from '../common.js';
export function createUpdateHandler(featureLoader: FeatureLoader) {
return async (req: Request, res: Response): Promise => {
@@ -19,19 +19,15 @@ export function createUpdateHandler(featureLoader: FeatureLoader) {
if (!projectPath || !featureId || !updates) {
res.status(400).json({
success: false,
- error: "projectPath, featureId, and updates are required",
+ error: 'projectPath, featureId, and updates are required',
});
return;
}
- const updated = await featureLoader.update(
- projectPath,
- featureId,
- updates
- );
+ const updated = await featureLoader.update(projectPath, featureId, updates);
res.json({ success: true, feature: updated });
} catch (error) {
- logError(error, "Update feature failed");
+ logError(error, 'Update feature failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/fs/common.ts b/apps/server/src/routes/fs/common.ts
index 84191451c..6386c83e5 100644
--- a/apps/server/src/routes/fs/common.ts
+++ b/apps/server/src/routes/fs/common.ts
@@ -2,13 +2,10 @@
* Common utilities for fs routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("FS");
+const logger = createLogger('FS');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/fs/index.ts b/apps/server/src/routes/fs/index.ts
index 6fc67dadb..58732b3a9 100644
--- a/apps/server/src/routes/fs/index.ts
+++ b/apps/server/src/routes/fs/index.ts
@@ -3,40 +3,40 @@
* Provides REST API equivalents for Electron IPC file operations
*/
-import { Router } from "express";
-import type { EventEmitter } from "../../lib/events.js";
-import { createReadHandler } from "./routes/read.js";
-import { createWriteHandler } from "./routes/write.js";
-import { createMkdirHandler } from "./routes/mkdir.js";
-import { createReaddirHandler } from "./routes/readdir.js";
-import { createExistsHandler } from "./routes/exists.js";
-import { createStatHandler } from "./routes/stat.js";
-import { createDeleteHandler } from "./routes/delete.js";
-import { createValidatePathHandler } from "./routes/validate-path.js";
-import { createResolveDirectoryHandler } from "./routes/resolve-directory.js";
-import { createSaveImageHandler } from "./routes/save-image.js";
-import { createBrowseHandler } from "./routes/browse.js";
-import { createImageHandler } from "./routes/image.js";
-import { createSaveBoardBackgroundHandler } from "./routes/save-board-background.js";
-import { createDeleteBoardBackgroundHandler } from "./routes/delete-board-background.js";
+import { Router } from 'express';
+import type { EventEmitter } from '../../lib/events.js';
+import { createReadHandler } from './routes/read.js';
+import { createWriteHandler } from './routes/write.js';
+import { createMkdirHandler } from './routes/mkdir.js';
+import { createReaddirHandler } from './routes/readdir.js';
+import { createExistsHandler } from './routes/exists.js';
+import { createStatHandler } from './routes/stat.js';
+import { createDeleteHandler } from './routes/delete.js';
+import { createValidatePathHandler } from './routes/validate-path.js';
+import { createResolveDirectoryHandler } from './routes/resolve-directory.js';
+import { createSaveImageHandler } from './routes/save-image.js';
+import { createBrowseHandler } from './routes/browse.js';
+import { createImageHandler } from './routes/image.js';
+import { createSaveBoardBackgroundHandler } from './routes/save-board-background.js';
+import { createDeleteBoardBackgroundHandler } from './routes/delete-board-background.js';
export function createFsRoutes(_events: EventEmitter): Router {
const router = Router();
- router.post("/read", createReadHandler());
- router.post("/write", createWriteHandler());
- router.post("/mkdir", createMkdirHandler());
- router.post("/readdir", createReaddirHandler());
- router.post("/exists", createExistsHandler());
- router.post("/stat", createStatHandler());
- router.post("/delete", createDeleteHandler());
- router.post("/validate-path", createValidatePathHandler());
- router.post("/resolve-directory", createResolveDirectoryHandler());
- router.post("/save-image", createSaveImageHandler());
- router.post("/browse", createBrowseHandler());
- router.get("/image", createImageHandler());
- router.post("/save-board-background", createSaveBoardBackgroundHandler());
- router.post("/delete-board-background", createDeleteBoardBackgroundHandler());
+ router.post('/read', createReadHandler());
+ router.post('/write', createWriteHandler());
+ router.post('/mkdir', createMkdirHandler());
+ router.post('/readdir', createReaddirHandler());
+ router.post('/exists', createExistsHandler());
+ router.post('/stat', createStatHandler());
+ router.post('/delete', createDeleteHandler());
+ router.post('/validate-path', createValidatePathHandler());
+ router.post('/resolve-directory', createResolveDirectoryHandler());
+ router.post('/save-image', createSaveImageHandler());
+ router.post('/browse', createBrowseHandler());
+ router.get('/image', createImageHandler());
+ router.post('/save-board-background', createSaveBoardBackgroundHandler());
+ router.post('/delete-board-background', createDeleteBoardBackgroundHandler());
return router;
}
diff --git a/apps/server/src/routes/git/common.ts b/apps/server/src/routes/git/common.ts
index 4d7b9f927..5fd7013e2 100644
--- a/apps/server/src/routes/git/common.ts
+++ b/apps/server/src/routes/git/common.ts
@@ -2,13 +2,10 @@
* Common utilities for git routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Git");
+const logger = createLogger('Git');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/git/index.ts b/apps/server/src/routes/git/index.ts
index 25dc333dd..5e959ec97 100644
--- a/apps/server/src/routes/git/index.ts
+++ b/apps/server/src/routes/git/index.ts
@@ -2,16 +2,16 @@
* Git routes - HTTP API for git operations (non-worktree)
*/
-import { Router } from "express";
-import { validatePathParams } from "../../middleware/validate-paths.js";
-import { createDiffsHandler } from "./routes/diffs.js";
-import { createFileDiffHandler } from "./routes/file-diff.js";
+import { Router } from 'express';
+import { validatePathParams } from '../../middleware/validate-paths.js';
+import { createDiffsHandler } from './routes/diffs.js';
+import { createFileDiffHandler } from './routes/file-diff.js';
export function createGitRoutes(): Router {
const router = Router();
- router.post("/diffs", validatePathParams("projectPath"), createDiffsHandler());
- router.post("/file-diff", validatePathParams("projectPath", "filePath"), createFileDiffHandler());
+ router.post('/diffs', validatePathParams('projectPath'), createDiffsHandler());
+ router.post('/file-diff', validatePathParams('projectPath', 'filePath'), createFileDiffHandler());
return router;
}
diff --git a/apps/server/src/routes/git/routes/diffs.ts b/apps/server/src/routes/git/routes/diffs.ts
index eb532a034..ca919dcfb 100644
--- a/apps/server/src/routes/git/routes/diffs.ts
+++ b/apps/server/src/routes/git/routes/diffs.ts
@@ -2,9 +2,9 @@
* POST /diffs endpoint - Get diffs for the main project
*/
-import type { Request, Response } from "express";
-import { getErrorMessage, logError } from "../common.js";
-import { getGitRepositoryDiffs } from "../../common.js";
+import type { Request, Response } from 'express';
+import { getErrorMessage, logError } from '../common.js';
+import { getGitRepositoryDiffs } from '../../common.js';
export function createDiffsHandler() {
return async (req: Request, res: Response): Promise => {
@@ -12,7 +12,7 @@ export function createDiffsHandler() {
const { projectPath } = req.body as { projectPath: string };
if (!projectPath) {
- res.status(400).json({ success: false, error: "projectPath required" });
+ res.status(400).json({ success: false, error: 'projectPath required' });
return;
}
@@ -25,11 +25,11 @@ export function createDiffsHandler() {
hasChanges: result.hasChanges,
});
} catch (innerError) {
- logError(innerError, "Git diff failed");
- res.json({ success: true, diff: "", files: [], hasChanges: false });
+ logError(innerError, 'Git diff failed');
+ res.json({ success: true, diff: '', files: [], hasChanges: false });
}
} catch (error) {
- logError(error, "Get diffs failed");
+ logError(error, 'Get diffs failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/git/routes/file-diff.ts b/apps/server/src/routes/git/routes/file-diff.ts
index fdf66998e..6203ecc47 100644
--- a/apps/server/src/routes/git/routes/file-diff.ts
+++ b/apps/server/src/routes/git/routes/file-diff.ts
@@ -2,11 +2,11 @@
* POST /file-diff endpoint - Get diff for a specific file
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logError } from "../common.js";
-import { generateSyntheticDiffForNewFile } from "../../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logError } from '../common.js';
+import { generateSyntheticDiffForNewFile } from '../../common.js';
const execAsync = promisify(exec);
@@ -19,20 +19,17 @@ export function createFileDiffHandler() {
};
if (!projectPath || !filePath) {
- res
- .status(400)
- .json({ success: false, error: "projectPath and filePath required" });
+ res.status(400).json({ success: false, error: 'projectPath and filePath required' });
return;
}
try {
// First check if the file is untracked
- const { stdout: status } = await execAsync(
- `git status --porcelain -- "${filePath}"`,
- { cwd: projectPath }
- );
+ const { stdout: status } = await execAsync(`git status --porcelain -- "${filePath}"`, {
+ cwd: projectPath,
+ });
- const isUntracked = status.trim().startsWith("??");
+ const isUntracked = status.trim().startsWith('??');
let diff: string;
if (isUntracked) {
@@ -40,23 +37,20 @@ export function createFileDiffHandler() {
diff = await generateSyntheticDiffForNewFile(projectPath, filePath);
} else {
// Use regular git diff for tracked files
- const result = await execAsync(
- `git diff HEAD -- "${filePath}"`,
- {
- cwd: projectPath,
- maxBuffer: 10 * 1024 * 1024,
- }
- );
+ const result = await execAsync(`git diff HEAD -- "${filePath}"`, {
+ cwd: projectPath,
+ maxBuffer: 10 * 1024 * 1024,
+ });
diff = result.stdout;
}
res.json({ success: true, diff, filePath });
} catch (innerError) {
- logError(innerError, "Git file diff failed");
- res.json({ success: true, diff: "", filePath });
+ logError(innerError, 'Git file diff failed');
+ res.json({ success: true, diff: '', filePath });
}
} catch (error) {
- logError(error, "Get file diff failed");
+ logError(error, 'Get file diff failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/health/common.ts b/apps/server/src/routes/health/common.ts
index 4977f8318..ac335c3d1 100644
--- a/apps/server/src/routes/health/common.ts
+++ b/apps/server/src/routes/health/common.ts
@@ -2,13 +2,10 @@
* Common utilities for health routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Health");
+const logger = createLogger('Health');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/health/index.ts b/apps/server/src/routes/health/index.ts
index 6ec625326..31439e664 100644
--- a/apps/server/src/routes/health/index.ts
+++ b/apps/server/src/routes/health/index.ts
@@ -2,15 +2,15 @@
* Health check routes
*/
-import { Router } from "express";
-import { createIndexHandler } from "./routes/index.js";
-import { createDetailedHandler } from "./routes/detailed.js";
+import { Router } from 'express';
+import { createIndexHandler } from './routes/index.js';
+import { createDetailedHandler } from './routes/detailed.js';
export function createHealthRoutes(): Router {
const router = Router();
- router.get("/", createIndexHandler());
- router.get("/detailed", createDetailedHandler());
+ router.get('/', createIndexHandler());
+ router.get('/detailed', createDetailedHandler());
return router;
}
diff --git a/apps/server/src/routes/health/routes/detailed.ts b/apps/server/src/routes/health/routes/detailed.ts
index 22deba78c..5aa2e6b16 100644
--- a/apps/server/src/routes/health/routes/detailed.ts
+++ b/apps/server/src/routes/health/routes/detailed.ts
@@ -2,18 +2,18 @@
* GET /detailed endpoint - Detailed health check
*/
-import type { Request, Response } from "express";
-import { getAuthStatus } from "../../../lib/auth.js";
+import type { Request, Response } from 'express';
+import { getAuthStatus } from '../../../lib/auth.js';
export function createDetailedHandler() {
return (_req: Request, res: Response): void => {
res.json({
- status: "ok",
+ status: 'ok',
timestamp: new Date().toISOString(),
- version: process.env.npm_package_version || "0.1.0",
+ version: process.env.npm_package_version || '0.1.0',
uptime: process.uptime(),
memory: process.memoryUsage(),
- dataDir: process.env.DATA_DIR || "./data",
+ dataDir: process.env.DATA_DIR || './data',
auth: getAuthStatus(),
env: {
nodeVersion: process.version,
diff --git a/apps/server/src/routes/health/routes/index.ts b/apps/server/src/routes/health/routes/index.ts
index e571b78eb..1501f6a68 100644
--- a/apps/server/src/routes/health/routes/index.ts
+++ b/apps/server/src/routes/health/routes/index.ts
@@ -2,14 +2,14 @@
* GET / endpoint - Basic health check
*/
-import type { Request, Response } from "express";
+import type { Request, Response } from 'express';
export function createIndexHandler() {
return (_req: Request, res: Response): void => {
res.json({
- status: "ok",
+ status: 'ok',
timestamp: new Date().toISOString(),
- version: process.env.npm_package_version || "0.1.0",
+ version: process.env.npm_package_version || '0.1.0',
});
};
}
diff --git a/apps/server/src/routes/models/common.ts b/apps/server/src/routes/models/common.ts
index 8baace0a9..7f30c0287 100644
--- a/apps/server/src/routes/models/common.ts
+++ b/apps/server/src/routes/models/common.ts
@@ -2,13 +2,10 @@
* Common utilities for models routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Models");
+const logger = createLogger('Models');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/models/index.ts b/apps/server/src/routes/models/index.ts
index 4ed1fda2f..14d0beabb 100644
--- a/apps/server/src/routes/models/index.ts
+++ b/apps/server/src/routes/models/index.ts
@@ -2,15 +2,15 @@
* Models routes - HTTP API for model providers and availability
*/
-import { Router } from "express";
-import { createAvailableHandler } from "./routes/available.js";
-import { createProvidersHandler } from "./routes/providers.js";
+import { Router } from 'express';
+import { createAvailableHandler } from './routes/available.js';
+import { createProvidersHandler } from './routes/providers.js';
export function createModelsRoutes(): Router {
const router = Router();
- router.get("/available", createAvailableHandler());
- router.get("/providers", createProvidersHandler());
+ router.get('/available', createAvailableHandler());
+ router.get('/providers', createProvidersHandler());
return router;
}
diff --git a/apps/server/src/routes/models/routes/available.ts b/apps/server/src/routes/models/routes/available.ts
index 3e26b690a..4ac4e0b18 100644
--- a/apps/server/src/routes/models/routes/available.ts
+++ b/apps/server/src/routes/models/routes/available.ts
@@ -2,8 +2,8 @@
* GET /available endpoint - Get available models
*/
-import type { Request, Response } from "express";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getErrorMessage, logError } from '../common.js';
interface ModelDefinition {
id: string;
@@ -20,36 +20,36 @@ export function createAvailableHandler() {
try {
const models: ModelDefinition[] = [
{
- id: "claude-opus-4-5-20251101",
- name: "Claude Opus 4.5",
- provider: "anthropic",
+ id: 'claude-opus-4-5-20251101',
+ name: 'Claude Opus 4.5',
+ provider: 'anthropic',
contextWindow: 200000,
maxOutputTokens: 16384,
supportsVision: true,
supportsTools: true,
},
{
- id: "claude-sonnet-4-20250514",
- name: "Claude Sonnet 4",
- provider: "anthropic",
+ id: 'claude-sonnet-4-20250514',
+ name: 'Claude Sonnet 4',
+ provider: 'anthropic',
contextWindow: 200000,
maxOutputTokens: 16384,
supportsVision: true,
supportsTools: true,
},
{
- id: "claude-3-5-sonnet-20241022",
- name: "Claude 3.5 Sonnet",
- provider: "anthropic",
+ id: 'claude-3-5-sonnet-20241022',
+ name: 'Claude 3.5 Sonnet',
+ provider: 'anthropic',
contextWindow: 200000,
maxOutputTokens: 8192,
supportsVision: true,
supportsTools: true,
},
{
- id: "claude-3-5-haiku-20241022",
- name: "Claude 3.5 Haiku",
- provider: "anthropic",
+ id: 'claude-3-5-haiku-20241022',
+ name: 'Claude 3.5 Haiku',
+ provider: 'anthropic',
contextWindow: 200000,
maxOutputTokens: 8192,
supportsVision: true,
@@ -59,7 +59,7 @@ export function createAvailableHandler() {
res.json({ success: true, models });
} catch (error) {
- logError(error, "Get available models failed");
+ logError(error, 'Get available models failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/models/routes/providers.ts b/apps/server/src/routes/models/routes/providers.ts
index 3f140f379..b7ef1b858 100644
--- a/apps/server/src/routes/models/routes/providers.ts
+++ b/apps/server/src/routes/models/routes/providers.ts
@@ -2,9 +2,9 @@
* GET /providers endpoint - Check provider status
*/
-import type { Request, Response } from "express";
-import { ProviderFactory } from "../../../providers/provider-factory.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { ProviderFactory } from '../../../providers/provider-factory.js';
+import { getErrorMessage, logError } from '../common.js';
export function createProvidersHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -21,7 +21,7 @@ export function createProvidersHandler() {
res.json({ success: true, providers });
} catch (error) {
- logError(error, "Get providers failed");
+ logError(error, 'Get providers failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/running-agents/common.ts b/apps/server/src/routes/running-agents/common.ts
index acb0d7e57..c2d16a185 100644
--- a/apps/server/src/routes/running-agents/common.ts
+++ b/apps/server/src/routes/running-agents/common.ts
@@ -2,13 +2,10 @@
* Common utilities for running-agents routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("RunningAgents");
+const logger = createLogger('RunningAgents');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/running-agents/index.ts b/apps/server/src/routes/running-agents/index.ts
index cef82fea8..a1dbffcdb 100644
--- a/apps/server/src/routes/running-agents/index.ts
+++ b/apps/server/src/routes/running-agents/index.ts
@@ -2,16 +2,14 @@
* Running Agents routes - HTTP API for tracking active agent executions
*/
-import { Router } from "express";
-import type { AutoModeService } from "../../services/auto-mode-service.js";
-import { createIndexHandler } from "./routes/index.js";
+import { Router } from 'express';
+import type { AutoModeService } from '../../services/auto-mode-service.js';
+import { createIndexHandler } from './routes/index.js';
-export function createRunningAgentsRoutes(
- autoModeService: AutoModeService
-): Router {
+export function createRunningAgentsRoutes(autoModeService: AutoModeService): Router {
const router = Router();
- router.get("/", createIndexHandler(autoModeService));
+ router.get('/', createIndexHandler(autoModeService));
return router;
}
diff --git a/apps/server/src/routes/running-agents/routes/index.ts b/apps/server/src/routes/running-agents/routes/index.ts
index e2f7e14ea..72a3f8385 100644
--- a/apps/server/src/routes/running-agents/routes/index.ts
+++ b/apps/server/src/routes/running-agents/routes/index.ts
@@ -2,9 +2,9 @@
* GET / endpoint - Get all running agents
*/
-import type { Request, Response } from "express";
-import type { AutoModeService } from "../../../services/auto-mode-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { AutoModeService } from '../../../services/auto-mode-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createIndexHandler(autoModeService: AutoModeService) {
return async (_req: Request, res: Response): Promise => {
@@ -18,7 +18,7 @@ export function createIndexHandler(autoModeService: AutoModeService) {
totalCount: runningAgents.length,
});
} catch (error) {
- logError(error, "Get running agents failed");
+ logError(error, 'Get running agents failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/sessions/common.ts b/apps/server/src/routes/sessions/common.ts
index facae6480..0d1df9b68 100644
--- a/apps/server/src/routes/sessions/common.ts
+++ b/apps/server/src/routes/sessions/common.ts
@@ -2,13 +2,10 @@
* Common utilities for sessions routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Sessions");
+const logger = createLogger('Sessions');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/sessions/index.ts b/apps/server/src/routes/sessions/index.ts
index 1cae202d4..e625671f3 100644
--- a/apps/server/src/routes/sessions/index.ts
+++ b/apps/server/src/routes/sessions/index.ts
@@ -2,24 +2,24 @@
* Sessions routes - HTTP API for session management
*/
-import { Router } from "express";
-import { AgentService } from "../../services/agent-service.js";
-import { createIndexHandler } from "./routes/index.js";
-import { createCreateHandler } from "./routes/create.js";
-import { createUpdateHandler } from "./routes/update.js";
-import { createArchiveHandler } from "./routes/archive.js";
-import { createUnarchiveHandler } from "./routes/unarchive.js";
-import { createDeleteHandler } from "./routes/delete.js";
+import { Router } from 'express';
+import { AgentService } from '../../services/agent-service.js';
+import { createIndexHandler } from './routes/index.js';
+import { createCreateHandler } from './routes/create.js';
+import { createUpdateHandler } from './routes/update.js';
+import { createArchiveHandler } from './routes/archive.js';
+import { createUnarchiveHandler } from './routes/unarchive.js';
+import { createDeleteHandler } from './routes/delete.js';
export function createSessionsRoutes(agentService: AgentService): Router {
const router = Router();
- router.get("/", createIndexHandler(agentService));
- router.post("/", createCreateHandler(agentService));
- router.put("/:sessionId", createUpdateHandler(agentService));
- router.post("/:sessionId/archive", createArchiveHandler(agentService));
- router.post("/:sessionId/unarchive", createUnarchiveHandler(agentService));
- router.delete("/:sessionId", createDeleteHandler(agentService));
+ router.get('/', createIndexHandler(agentService));
+ router.post('/', createCreateHandler(agentService));
+ router.put('/:sessionId', createUpdateHandler(agentService));
+ router.post('/:sessionId/archive', createArchiveHandler(agentService));
+ router.post('/:sessionId/unarchive', createUnarchiveHandler(agentService));
+ router.delete('/:sessionId', createDeleteHandler(agentService));
return router;
}
diff --git a/apps/server/src/routes/sessions/routes/archive.ts b/apps/server/src/routes/sessions/routes/archive.ts
index dd9b6aa01..3407e5cd3 100644
--- a/apps/server/src/routes/sessions/routes/archive.ts
+++ b/apps/server/src/routes/sessions/routes/archive.ts
@@ -2,9 +2,9 @@
* POST /:sessionId/archive endpoint - Archive a session
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createArchiveHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -13,13 +13,13 @@ export function createArchiveHandler(agentService: AgentService) {
const success = await agentService.archiveSession(sessionId);
if (!success) {
- res.status(404).json({ success: false, error: "Session not found" });
+ res.status(404).json({ success: false, error: 'Session not found' });
return;
}
res.json({ success: true });
} catch (error) {
- logError(error, "Archive session failed");
+ logError(error, 'Archive session failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/sessions/routes/create.ts b/apps/server/src/routes/sessions/routes/create.ts
index 7faf9e365..2917168ca 100644
--- a/apps/server/src/routes/sessions/routes/create.ts
+++ b/apps/server/src/routes/sessions/routes/create.ts
@@ -2,9 +2,9 @@
* POST / endpoint - Create a new session
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createCreateHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -17,19 +17,14 @@ export function createCreateHandler(agentService: AgentService) {
};
if (!name) {
- res.status(400).json({ success: false, error: "name is required" });
+ res.status(400).json({ success: false, error: 'name is required' });
return;
}
- const session = await agentService.createSession(
- name,
- projectPath,
- workingDirectory,
- model
- );
+ const session = await agentService.createSession(name, projectPath, workingDirectory, model);
res.json({ success: true, session });
} catch (error) {
- logError(error, "Create session failed");
+ logError(error, 'Create session failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/sessions/routes/delete.ts b/apps/server/src/routes/sessions/routes/delete.ts
index 2d4c9f4c7..91bbc39d2 100644
--- a/apps/server/src/routes/sessions/routes/delete.ts
+++ b/apps/server/src/routes/sessions/routes/delete.ts
@@ -2,9 +2,9 @@
* DELETE /:sessionId endpoint - Delete a session
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createDeleteHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -13,13 +13,13 @@ export function createDeleteHandler(agentService: AgentService) {
const success = await agentService.deleteSession(sessionId);
if (!success) {
- res.status(404).json({ success: false, error: "Session not found" });
+ res.status(404).json({ success: false, error: 'Session not found' });
return;
}
res.json({ success: true });
} catch (error) {
- logError(error, "Delete session failed");
+ logError(error, 'Delete session failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/sessions/routes/index.ts b/apps/server/src/routes/sessions/routes/index.ts
index 64b891db8..5f82bcabf 100644
--- a/apps/server/src/routes/sessions/routes/index.ts
+++ b/apps/server/src/routes/sessions/routes/index.ts
@@ -2,14 +2,14 @@
* GET / endpoint - List all sessions
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createIndexHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
try {
- const includeArchived = req.query.includeArchived === "true";
+ const includeArchived = req.query.includeArchived === 'true';
const sessionsRaw = await agentService.listSessions(includeArchived);
// Transform to match frontend SessionListItem interface
@@ -17,7 +17,7 @@ export function createIndexHandler(agentService: AgentService) {
sessionsRaw.map(async (s) => {
const messages = await agentService.loadSession(s.id);
const lastMessage = messages[messages.length - 1];
- const preview = lastMessage?.content?.slice(0, 100) || "";
+ const preview = lastMessage?.content?.slice(0, 100) || '';
return {
id: s.id,
@@ -36,7 +36,7 @@ export function createIndexHandler(agentService: AgentService) {
res.json({ success: true, sessions });
} catch (error) {
- logError(error, "List sessions failed");
+ logError(error, 'List sessions failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/sessions/routes/unarchive.ts b/apps/server/src/routes/sessions/routes/unarchive.ts
index 07e4be173..638d31509 100644
--- a/apps/server/src/routes/sessions/routes/unarchive.ts
+++ b/apps/server/src/routes/sessions/routes/unarchive.ts
@@ -2,9 +2,9 @@
* POST /:sessionId/unarchive endpoint - Unarchive a session
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createUnarchiveHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -13,13 +13,13 @@ export function createUnarchiveHandler(agentService: AgentService) {
const success = await agentService.unarchiveSession(sessionId);
if (!success) {
- res.status(404).json({ success: false, error: "Session not found" });
+ res.status(404).json({ success: false, error: 'Session not found' });
return;
}
res.json({ success: true });
} catch (error) {
- logError(error, "Unarchive session failed");
+ logError(error, 'Unarchive session failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/sessions/routes/update.ts b/apps/server/src/routes/sessions/routes/update.ts
index 2dbea4318..7705fa221 100644
--- a/apps/server/src/routes/sessions/routes/update.ts
+++ b/apps/server/src/routes/sessions/routes/update.ts
@@ -2,9 +2,9 @@
* PUT /:sessionId endpoint - Update a session
*/
-import type { Request, Response } from "express";
-import { AgentService } from "../../../services/agent-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { AgentService } from '../../../services/agent-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createUpdateHandler(agentService: AgentService) {
return async (req: Request, res: Response): Promise => {
@@ -22,13 +22,13 @@ export function createUpdateHandler(agentService: AgentService) {
model,
});
if (!session) {
- res.status(404).json({ success: false, error: "Session not found" });
+ res.status(404).json({ success: false, error: 'Session not found' });
return;
}
res.json({ success: true, session });
} catch (error) {
- logError(error, "Update session failed");
+ logError(error, 'Update session failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/common.ts b/apps/server/src/routes/settings/common.ts
index 74057a4e5..d8201bfd0 100644
--- a/apps/server/src/routes/settings/common.ts
+++ b/apps/server/src/routes/settings/common.ts
@@ -5,14 +5,11 @@
* Re-exports error handling helpers from the parent routes module.
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
/** Logger instance for settings-related operations */
-export const logger = createLogger("Settings");
+export const logger = createLogger('Settings');
/**
* Extract user-friendly error message from error objects
diff --git a/apps/server/src/routes/settings/index.ts b/apps/server/src/routes/settings/index.ts
index 53c4556e1..cc1648569 100644
--- a/apps/server/src/routes/settings/index.ts
+++ b/apps/server/src/routes/settings/index.ts
@@ -12,17 +12,17 @@
* Mounted at /api/settings in the main server.
*/
-import { Router } from "express";
-import type { SettingsService } from "../../services/settings-service.js";
-import { validatePathParams } from "../../middleware/validate-paths.js";
-import { createGetGlobalHandler } from "./routes/get-global.js";
-import { createUpdateGlobalHandler } from "./routes/update-global.js";
-import { createGetCredentialsHandler } from "./routes/get-credentials.js";
-import { createUpdateCredentialsHandler } from "./routes/update-credentials.js";
-import { createGetProjectHandler } from "./routes/get-project.js";
-import { createUpdateProjectHandler } from "./routes/update-project.js";
-import { createMigrateHandler } from "./routes/migrate.js";
-import { createStatusHandler } from "./routes/status.js";
+import { Router } from 'express';
+import type { SettingsService } from '../../services/settings-service.js';
+import { validatePathParams } from '../../middleware/validate-paths.js';
+import { createGetGlobalHandler } from './routes/get-global.js';
+import { createUpdateGlobalHandler } from './routes/update-global.js';
+import { createGetCredentialsHandler } from './routes/get-credentials.js';
+import { createUpdateCredentialsHandler } from './routes/update-credentials.js';
+import { createGetProjectHandler } from './routes/get-project.js';
+import { createUpdateProjectHandler } from './routes/update-project.js';
+import { createMigrateHandler } from './routes/migrate.js';
+import { createStatusHandler } from './routes/status.js';
/**
* Create settings router with all endpoints
@@ -47,22 +47,30 @@ export function createSettingsRoutes(settingsService: SettingsService): Router {
const router = Router();
// Status endpoint (check if migration needed)
- router.get("/status", createStatusHandler(settingsService));
+ router.get('/status', createStatusHandler(settingsService));
// Global settings
- router.get("/global", createGetGlobalHandler(settingsService));
- router.put("/global", createUpdateGlobalHandler(settingsService));
+ router.get('/global', createGetGlobalHandler(settingsService));
+ router.put('/global', createUpdateGlobalHandler(settingsService));
// Credentials (separate for security)
- router.get("/credentials", createGetCredentialsHandler(settingsService));
- router.put("/credentials", createUpdateCredentialsHandler(settingsService));
+ router.get('/credentials', createGetCredentialsHandler(settingsService));
+ router.put('/credentials', createUpdateCredentialsHandler(settingsService));
// Project settings
- router.post("/project", validatePathParams("projectPath"), createGetProjectHandler(settingsService));
- router.put("/project", validatePathParams("projectPath"), createUpdateProjectHandler(settingsService));
+ router.post(
+ '/project',
+ validatePathParams('projectPath'),
+ createGetProjectHandler(settingsService)
+ );
+ router.put(
+ '/project',
+ validatePathParams('projectPath'),
+ createUpdateProjectHandler(settingsService)
+ );
// Migration from localStorage
- router.post("/migrate", createMigrateHandler(settingsService));
+ router.post('/migrate', createMigrateHandler(settingsService));
return router;
}
diff --git a/apps/server/src/routes/settings/routes/get-credentials.ts b/apps/server/src/routes/settings/routes/get-credentials.ts
index 2294e9c48..be15b04be 100644
--- a/apps/server/src/routes/settings/routes/get-credentials.ts
+++ b/apps/server/src/routes/settings/routes/get-credentials.ts
@@ -8,9 +8,9 @@
* Response: `{ "success": true, "credentials": { anthropic } }`
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import { getErrorMessage, logError } from '../common.js';
/**
* Create handler factory for GET /api/settings/credentials
@@ -28,7 +28,7 @@ export function createGetCredentialsHandler(settingsService: SettingsService) {
credentials,
});
} catch (error) {
- logError(error, "Get credentials failed");
+ logError(error, 'Get credentials failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/routes/get-global.ts b/apps/server/src/routes/settings/routes/get-global.ts
index 0e71c4eb0..fa432b252 100644
--- a/apps/server/src/routes/settings/routes/get-global.ts
+++ b/apps/server/src/routes/settings/routes/get-global.ts
@@ -7,9 +7,9 @@
* Response: `{ "success": true, "settings": GlobalSettings }`
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import { getErrorMessage, logError } from '../common.js';
/**
* Create handler factory for GET /api/settings/global
@@ -27,7 +27,7 @@ export function createGetGlobalHandler(settingsService: SettingsService) {
settings,
});
} catch (error) {
- logError(error, "Get global settings failed");
+ logError(error, 'Get global settings failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/routes/get-project.ts b/apps/server/src/routes/settings/routes/get-project.ts
index 58f6ce7e3..7cd449a2a 100644
--- a/apps/server/src/routes/settings/routes/get-project.ts
+++ b/apps/server/src/routes/settings/routes/get-project.ts
@@ -8,9 +8,9 @@
* Response: `{ "success": true, "settings": ProjectSettings }`
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import { getErrorMessage, logError } from '../common.js';
/**
* Create handler factory for POST /api/settings/project
@@ -23,10 +23,10 @@ export function createGetProjectHandler(settingsService: SettingsService) {
try {
const { projectPath } = req.body as { projectPath?: string };
- if (!projectPath || typeof projectPath !== "string") {
+ if (!projectPath || typeof projectPath !== 'string') {
res.status(400).json({
success: false,
- error: "projectPath is required",
+ error: 'projectPath is required',
});
return;
}
@@ -38,7 +38,7 @@ export function createGetProjectHandler(settingsService: SettingsService) {
settings,
});
} catch (error) {
- logError(error, "Get project settings failed");
+ logError(error, 'Get project settings failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/routes/migrate.ts b/apps/server/src/routes/settings/routes/migrate.ts
index e95b11c0b..02145d667 100644
--- a/apps/server/src/routes/settings/routes/migrate.ts
+++ b/apps/server/src/routes/settings/routes/migrate.ts
@@ -30,9 +30,9 @@
* ```
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import { getErrorMessage, logError, logger } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import { getErrorMessage, logError, logger } from '../common.js';
/**
* Create handler factory for POST /api/settings/migrate
@@ -45,32 +45,30 @@ export function createMigrateHandler(settingsService: SettingsService) {
try {
const { data } = req.body as {
data?: {
- "automaker-storage"?: string;
- "automaker-setup"?: string;
- "worktree-panel-collapsed"?: string;
- "file-browser-recent-folders"?: string;
- "automaker:lastProjectDir"?: string;
+ 'automaker-storage'?: string;
+ 'automaker-setup'?: string;
+ 'worktree-panel-collapsed'?: string;
+ 'file-browser-recent-folders'?: string;
+ 'automaker:lastProjectDir'?: string;
};
};
- if (!data || typeof data !== "object") {
+ if (!data || typeof data !== 'object') {
res.status(400).json({
success: false,
- error: "data object is required containing localStorage data",
+ error: 'data object is required containing localStorage data',
});
return;
}
- logger.info("Starting settings migration from localStorage");
+ logger.info('Starting settings migration from localStorage');
const result = await settingsService.migrateFromLocalStorage(data);
if (result.success) {
- logger.info(
- `Migration successful: ${result.migratedProjectCount} projects migrated`
- );
+ logger.info(`Migration successful: ${result.migratedProjectCount} projects migrated`);
} else {
- logger.warn(`Migration completed with errors: ${result.errors.join(", ")}`);
+ logger.warn(`Migration completed with errors: ${result.errors.join(', ')}`);
}
res.json({
@@ -81,7 +79,7 @@ export function createMigrateHandler(settingsService: SettingsService) {
errors: result.errors,
});
} catch (error) {
- logError(error, "Migration failed");
+ logError(error, 'Migration failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/routes/status.ts b/apps/server/src/routes/settings/routes/status.ts
index 0354502f6..04f016437 100644
--- a/apps/server/src/routes/settings/routes/status.ts
+++ b/apps/server/src/routes/settings/routes/status.ts
@@ -16,9 +16,9 @@
* ```
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import { getErrorMessage, logError } from '../common.js';
/**
* Create handler factory for GET /api/settings/status
@@ -40,7 +40,7 @@ export function createStatusHandler(settingsService: SettingsService) {
needsMigration: !hasGlobalSettings,
});
} catch (error) {
- logError(error, "Get settings status failed");
+ logError(error, 'Get settings status failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/routes/update-credentials.ts b/apps/server/src/routes/settings/routes/update-credentials.ts
index 16367879c..c08b24457 100644
--- a/apps/server/src/routes/settings/routes/update-credentials.ts
+++ b/apps/server/src/routes/settings/routes/update-credentials.ts
@@ -8,10 +8,10 @@
* Response: `{ "success": true, "credentials": { anthropic } }`
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import type { Credentials } from "../../../types/settings.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import type { Credentials } from '../../../types/settings.js';
+import { getErrorMessage, logError } from '../common.js';
/**
* Create handler factory for PUT /api/settings/credentials
@@ -19,17 +19,15 @@ import { getErrorMessage, logError } from "../common.js";
* @param settingsService - Instance of SettingsService for file I/O
* @returns Express request handler
*/
-export function createUpdateCredentialsHandler(
- settingsService: SettingsService
-) {
+export function createUpdateCredentialsHandler(settingsService: SettingsService) {
return async (req: Request, res: Response): Promise => {
try {
const updates = req.body as Partial;
- if (!updates || typeof updates !== "object") {
+ if (!updates || typeof updates !== 'object') {
res.status(400).json({
success: false,
- error: "Invalid request body - expected credentials object",
+ error: 'Invalid request body - expected credentials object',
});
return;
}
@@ -44,7 +42,7 @@ export function createUpdateCredentialsHandler(
credentials: masked,
});
} catch (error) {
- logError(error, "Update credentials failed");
+ logError(error, 'Update credentials failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/routes/update-global.ts b/apps/server/src/routes/settings/routes/update-global.ts
index 21af8dd2b..6072f2372 100644
--- a/apps/server/src/routes/settings/routes/update-global.ts
+++ b/apps/server/src/routes/settings/routes/update-global.ts
@@ -8,10 +8,10 @@
* Response: `{ "success": true, "settings": GlobalSettings }`
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import type { GlobalSettings } from "../../../types/settings.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import type { GlobalSettings } from '../../../types/settings.js';
+import { getErrorMessage, logError } from '../common.js';
/**
* Create handler factory for PUT /api/settings/global
@@ -24,10 +24,10 @@ export function createUpdateGlobalHandler(settingsService: SettingsService) {
try {
const updates = req.body as Partial;
- if (!updates || typeof updates !== "object") {
+ if (!updates || typeof updates !== 'object') {
res.status(400).json({
success: false,
- error: "Invalid request body - expected settings object",
+ error: 'Invalid request body - expected settings object',
});
return;
}
@@ -39,7 +39,7 @@ export function createUpdateGlobalHandler(settingsService: SettingsService) {
settings,
});
} catch (error) {
- logError(error, "Update global settings failed");
+ logError(error, 'Update global settings failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/settings/routes/update-project.ts b/apps/server/src/routes/settings/routes/update-project.ts
index 5dc38df0b..f5f639f47 100644
--- a/apps/server/src/routes/settings/routes/update-project.ts
+++ b/apps/server/src/routes/settings/routes/update-project.ts
@@ -8,10 +8,10 @@
* Response: `{ "success": true, "settings": ProjectSettings }`
*/
-import type { Request, Response } from "express";
-import type { SettingsService } from "../../../services/settings-service.js";
-import type { ProjectSettings } from "../../../types/settings.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import type { SettingsService } from '../../../services/settings-service.js';
+import type { ProjectSettings } from '../../../types/settings.js';
+import { getErrorMessage, logError } from '../common.js';
/**
* Create handler factory for PUT /api/settings/project
@@ -27,33 +27,30 @@ export function createUpdateProjectHandler(settingsService: SettingsService) {
updates?: Partial;
};
- if (!projectPath || typeof projectPath !== "string") {
+ if (!projectPath || typeof projectPath !== 'string') {
res.status(400).json({
success: false,
- error: "projectPath is required",
+ error: 'projectPath is required',
});
return;
}
- if (!updates || typeof updates !== "object") {
+ if (!updates || typeof updates !== 'object') {
res.status(400).json({
success: false,
- error: "updates object is required",
+ error: 'updates object is required',
});
return;
}
- const settings = await settingsService.updateProjectSettings(
- projectPath,
- updates
- );
+ const settings = await settingsService.updateProjectSettings(projectPath, updates);
res.json({
success: true,
settings,
});
} catch (error) {
- logError(error, "Update project settings failed");
+ logError(error, 'Update project settings failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/common.ts b/apps/server/src/routes/setup/common.ts
index 036def1e3..097d7a6c8 100644
--- a/apps/server/src/routes/setup/common.ts
+++ b/apps/server/src/routes/setup/common.ts
@@ -2,15 +2,12 @@
* Common utilities and state for setup routes
*/
-import { createLogger } from "@automaker/utils";
-import path from "path";
-import fs from "fs/promises";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import path from 'path';
+import fs from 'fs/promises';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Setup");
+const logger = createLogger('Setup');
// Storage for API keys (in-memory cache) - private
const apiKeys: Record = {};
@@ -39,22 +36,19 @@ export function getAllApiKeys(): Record {
/**
* Helper to persist API keys to .env file
*/
-export async function persistApiKeyToEnv(
- key: string,
- value: string
-): Promise {
- const envPath = path.join(process.cwd(), ".env");
+export async function persistApiKeyToEnv(key: string, value: string): Promise {
+ const envPath = path.join(process.cwd(), '.env');
try {
- let envContent = "";
+ let envContent = '';
try {
- envContent = await fs.readFile(envPath, "utf-8");
+ envContent = await fs.readFile(envPath, 'utf-8');
} catch {
// .env file doesn't exist, we'll create it
}
// Parse existing env content
- const lines = envContent.split("\n");
+ const lines = envContent.split('\n');
const keyRegex = new RegExp(`^${key}=`);
let found = false;
const newLines = lines.map((line) => {
@@ -70,7 +64,7 @@ export async function persistApiKeyToEnv(
newLines.push(`${key}=${value}`);
}
- await fs.writeFile(envPath, newLines.join("\n"));
+ await fs.writeFile(envPath, newLines.join('\n'));
logger.info(`[Setup] Persisted ${key} to .env file`);
} catch (error) {
logger.error(`[Setup] Failed to persist ${key} to .env:`, error);
diff --git a/apps/server/src/routes/setup/get-claude-status.ts b/apps/server/src/routes/setup/get-claude-status.ts
index 2ae072ffe..922d363f1 100644
--- a/apps/server/src/routes/setup/get-claude-status.ts
+++ b/apps/server/src/routes/setup/get-claude-status.ts
@@ -2,36 +2,36 @@
* Business logic for getting Claude CLI status
*/
-import { exec } from "child_process";
-import { promisify } from "util";
-import os from "os";
-import path from "path";
-import fs from "fs/promises";
-import { getApiKey } from "./common.js";
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import os from 'os';
+import path from 'path';
+import fs from 'fs/promises';
+import { getApiKey } from './common.js';
const execAsync = promisify(exec);
export async function getClaudeStatus() {
let installed = false;
- let version = "";
- let cliPath = "";
- let method = "none";
+ let version = '';
+ let cliPath = '';
+ let method = 'none';
- const isWindows = process.platform === "win32";
+ const isWindows = process.platform === 'win32';
// Try to find Claude CLI using platform-specific command
try {
// Use 'where' on Windows, 'which' on Unix-like systems
- const findCommand = isWindows ? "where claude" : "which claude";
+ const findCommand = isWindows ? 'where claude' : 'which claude';
const { stdout } = await execAsync(findCommand);
// 'where' on Windows can return multiple paths - take the first one
cliPath = stdout.trim().split(/\r?\n/)[0];
installed = true;
- method = "path";
+ method = 'path';
// Get version
try {
- const { stdout: versionOut } = await execAsync("claude --version");
+ const { stdout: versionOut } = await execAsync('claude --version');
version = versionOut.trim();
} catch {
// Version command might not be available
@@ -40,22 +40,22 @@ export async function getClaudeStatus() {
// Not in PATH, try common locations based on platform
const commonPaths = isWindows
? (() => {
- const appData = process.env.APPDATA || path.join(os.homedir(), "AppData", "Roaming");
+ const appData = process.env.APPDATA || path.join(os.homedir(), 'AppData', 'Roaming');
return [
// Windows-specific paths
- path.join(os.homedir(), ".local", "bin", "claude.exe"),
- path.join(appData, "npm", "claude.cmd"),
- path.join(appData, "npm", "claude"),
- path.join(appData, ".npm-global", "bin", "claude.cmd"),
- path.join(appData, ".npm-global", "bin", "claude"),
+ path.join(os.homedir(), '.local', 'bin', 'claude.exe'),
+ path.join(appData, 'npm', 'claude.cmd'),
+ path.join(appData, 'npm', 'claude'),
+ path.join(appData, '.npm-global', 'bin', 'claude.cmd'),
+ path.join(appData, '.npm-global', 'bin', 'claude'),
];
})()
: [
// Unix (Linux/macOS) paths
- path.join(os.homedir(), ".local", "bin", "claude"),
- path.join(os.homedir(), ".claude", "local", "claude"),
- "/usr/local/bin/claude",
- path.join(os.homedir(), ".npm-global", "bin", "claude"),
+ path.join(os.homedir(), '.local', 'bin', 'claude'),
+ path.join(os.homedir(), '.claude', 'local', 'claude'),
+ '/usr/local/bin/claude',
+ path.join(os.homedir(), '.npm-global', 'bin', 'claude'),
];
for (const p of commonPaths) {
@@ -63,7 +63,7 @@ export async function getClaudeStatus() {
await fs.access(p);
cliPath = p;
installed = true;
- method = "local";
+ method = 'local';
// Get version from this path
try {
@@ -84,11 +84,11 @@ export async function getClaudeStatus() {
// apiKeys.anthropic stores direct API keys for pay-per-use
let auth = {
authenticated: false,
- method: "none" as string,
+ method: 'none' as string,
hasCredentialsFile: false,
hasToken: false,
- hasStoredOAuthToken: !!getApiKey("anthropic_oauth_token"),
- hasStoredApiKey: !!getApiKey("anthropic"),
+ hasStoredOAuthToken: !!getApiKey('anthropic_oauth_token'),
+ hasStoredApiKey: !!getApiKey('anthropic'),
hasEnvApiKey: !!process.env.ANTHROPIC_API_KEY,
// Additional fields for detailed status
oauthTokenValid: false,
@@ -97,13 +97,13 @@ export async function getClaudeStatus() {
hasRecentActivity: false,
};
- const claudeDir = path.join(os.homedir(), ".claude");
+ const claudeDir = path.join(os.homedir(), '.claude');
// Check for recent Claude CLI activity - indicates working authentication
// The stats-cache.json file is only populated when the CLI is working properly
- const statsCachePath = path.join(claudeDir, "stats-cache.json");
+ const statsCachePath = path.join(claudeDir, 'stats-cache.json');
try {
- const statsContent = await fs.readFile(statsCachePath, "utf-8");
+ const statsContent = await fs.readFile(statsCachePath, 'utf-8');
const stats = JSON.parse(statsContent);
// Check if there's any activity (which means the CLI is authenticated and working)
@@ -111,26 +111,26 @@ export async function getClaudeStatus() {
auth.hasRecentActivity = true;
auth.hasCliAuth = true;
auth.authenticated = true;
- auth.method = "cli_authenticated";
+ auth.method = 'cli_authenticated';
}
} catch {
// Stats file doesn't exist or is invalid
}
// Check for settings.json - indicates CLI has been set up
- const settingsPath = path.join(claudeDir, "settings.json");
+ const settingsPath = path.join(claudeDir, 'settings.json');
try {
await fs.access(settingsPath);
// If settings exist but no activity, CLI might be set up but not authenticated
if (!auth.hasCliAuth) {
// Try to check for other indicators of auth
- const sessionsDir = path.join(claudeDir, "projects");
+ const sessionsDir = path.join(claudeDir, 'projects');
try {
const sessions = await fs.readdir(sessionsDir);
if (sessions.length > 0) {
auth.hasCliAuth = true;
auth.authenticated = true;
- auth.method = "cli_authenticated";
+ auth.method = 'cli_authenticated';
}
} catch {
// Sessions directory doesn't exist
@@ -143,13 +143,13 @@ export async function getClaudeStatus() {
// Check for credentials file (OAuth tokens from claude login)
// Note: Claude CLI may use ".credentials.json" (hidden) or "credentials.json" depending on version/platform
const credentialsPaths = [
- path.join(claudeDir, ".credentials.json"),
- path.join(claudeDir, "credentials.json"),
+ path.join(claudeDir, '.credentials.json'),
+ path.join(claudeDir, 'credentials.json'),
];
for (const credentialsPath of credentialsPaths) {
try {
- const credentialsContent = await fs.readFile(credentialsPath, "utf-8");
+ const credentialsContent = await fs.readFile(credentialsPath, 'utf-8');
const credentials = JSON.parse(credentialsContent);
auth.hasCredentialsFile = true;
@@ -158,11 +158,11 @@ export async function getClaudeStatus() {
auth.hasStoredOAuthToken = true;
auth.oauthTokenValid = true;
auth.authenticated = true;
- auth.method = "oauth_token"; // Stored OAuth token from credentials file
+ auth.method = 'oauth_token'; // Stored OAuth token from credentials file
} else if (credentials.api_key) {
auth.apiKeyValid = true;
auth.authenticated = true;
- auth.method = "api_key"; // Stored API key in credentials file
+ auth.method = 'api_key'; // Stored API key in credentials file
}
break; // Found and processed credentials file
} catch {
@@ -174,25 +174,25 @@ export async function getClaudeStatus() {
if (auth.hasEnvApiKey) {
auth.authenticated = true;
auth.apiKeyValid = true;
- auth.method = "api_key_env"; // API key from ANTHROPIC_API_KEY env var
+ auth.method = 'api_key_env'; // API key from ANTHROPIC_API_KEY env var
}
// In-memory stored OAuth token (from setup wizard - subscription auth)
- if (!auth.authenticated && getApiKey("anthropic_oauth_token")) {
+ if (!auth.authenticated && getApiKey('anthropic_oauth_token')) {
auth.authenticated = true;
auth.oauthTokenValid = true;
- auth.method = "oauth_token"; // Stored OAuth token from setup wizard
+ auth.method = 'oauth_token'; // Stored OAuth token from setup wizard
}
// In-memory stored API key (from settings UI - pay-per-use)
- if (!auth.authenticated && getApiKey("anthropic")) {
+ if (!auth.authenticated && getApiKey('anthropic')) {
auth.authenticated = true;
auth.apiKeyValid = true;
- auth.method = "api_key"; // Manually stored API key
+ auth.method = 'api_key'; // Manually stored API key
}
return {
- status: installed ? "installed" : "not_installed",
+ status: installed ? 'installed' : 'not_installed',
installed,
method,
version,
diff --git a/apps/server/src/routes/setup/index.ts b/apps/server/src/routes/setup/index.ts
index 2b5db9423..3681b2fc5 100644
--- a/apps/server/src/routes/setup/index.ts
+++ b/apps/server/src/routes/setup/index.ts
@@ -2,29 +2,29 @@
* Setup routes - HTTP API for CLI detection, API keys, and platform info
*/
-import { Router } from "express";
-import { createClaudeStatusHandler } from "./routes/claude-status.js";
-import { createInstallClaudeHandler } from "./routes/install-claude.js";
-import { createAuthClaudeHandler } from "./routes/auth-claude.js";
-import { createStoreApiKeyHandler } from "./routes/store-api-key.js";
-import { createDeleteApiKeyHandler } from "./routes/delete-api-key.js";
-import { createApiKeysHandler } from "./routes/api-keys.js";
-import { createPlatformHandler } from "./routes/platform.js";
-import { createVerifyClaudeAuthHandler } from "./routes/verify-claude-auth.js";
-import { createGhStatusHandler } from "./routes/gh-status.js";
+import { Router } from 'express';
+import { createClaudeStatusHandler } from './routes/claude-status.js';
+import { createInstallClaudeHandler } from './routes/install-claude.js';
+import { createAuthClaudeHandler } from './routes/auth-claude.js';
+import { createStoreApiKeyHandler } from './routes/store-api-key.js';
+import { createDeleteApiKeyHandler } from './routes/delete-api-key.js';
+import { createApiKeysHandler } from './routes/api-keys.js';
+import { createPlatformHandler } from './routes/platform.js';
+import { createVerifyClaudeAuthHandler } from './routes/verify-claude-auth.js';
+import { createGhStatusHandler } from './routes/gh-status.js';
export function createSetupRoutes(): Router {
const router = Router();
- router.get("/claude-status", createClaudeStatusHandler());
- router.post("/install-claude", createInstallClaudeHandler());
- router.post("/auth-claude", createAuthClaudeHandler());
- router.post("/store-api-key", createStoreApiKeyHandler());
- router.post("/delete-api-key", createDeleteApiKeyHandler());
- router.get("/api-keys", createApiKeysHandler());
- router.get("/platform", createPlatformHandler());
- router.post("/verify-claude-auth", createVerifyClaudeAuthHandler());
- router.get("/gh-status", createGhStatusHandler());
+ router.get('/claude-status', createClaudeStatusHandler());
+ router.post('/install-claude', createInstallClaudeHandler());
+ router.post('/auth-claude', createAuthClaudeHandler());
+ router.post('/store-api-key', createStoreApiKeyHandler());
+ router.post('/delete-api-key', createDeleteApiKeyHandler());
+ router.get('/api-keys', createApiKeysHandler());
+ router.get('/platform', createPlatformHandler());
+ router.post('/verify-claude-auth', createVerifyClaudeAuthHandler());
+ router.get('/gh-status', createGhStatusHandler());
return router;
}
diff --git a/apps/server/src/routes/setup/routes/api-keys.ts b/apps/server/src/routes/setup/routes/api-keys.ts
index 201e4ebaf..d052c187f 100644
--- a/apps/server/src/routes/setup/routes/api-keys.ts
+++ b/apps/server/src/routes/setup/routes/api-keys.ts
@@ -2,19 +2,18 @@
* GET /api-keys endpoint - Get API keys status
*/
-import type { Request, Response } from "express";
-import { getApiKey, getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getApiKey, getErrorMessage, logError } from '../common.js';
export function createApiKeysHandler() {
return async (_req: Request, res: Response): Promise => {
try {
res.json({
success: true,
- hasAnthropicKey:
- !!getApiKey("anthropic") || !!process.env.ANTHROPIC_API_KEY,
+ hasAnthropicKey: !!getApiKey('anthropic') || !!process.env.ANTHROPIC_API_KEY,
});
} catch (error) {
- logError(error, "Get API keys failed");
+ logError(error, 'Get API keys failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/routes/auth-claude.ts b/apps/server/src/routes/setup/routes/auth-claude.ts
index 2ab8401d8..4531501d8 100644
--- a/apps/server/src/routes/setup/routes/auth-claude.ts
+++ b/apps/server/src/routes/setup/routes/auth-claude.ts
@@ -2,8 +2,8 @@
* POST /auth-claude endpoint - Auth Claude
*/
-import type { Request, Response } from "express";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getErrorMessage, logError } from '../common.js';
export function createAuthClaudeHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -11,11 +11,11 @@ export function createAuthClaudeHandler() {
res.json({
success: true,
requiresManualAuth: true,
- command: "claude login",
+ command: 'claude login',
message: "Please run 'claude login' in your terminal to authenticate",
});
} catch (error) {
- logError(error, "Auth Claude failed");
+ logError(error, 'Auth Claude failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/routes/claude-status.ts b/apps/server/src/routes/setup/routes/claude-status.ts
index 232a47bd2..f2ae4a593 100644
--- a/apps/server/src/routes/setup/routes/claude-status.ts
+++ b/apps/server/src/routes/setup/routes/claude-status.ts
@@ -2,9 +2,9 @@
* GET /claude-status endpoint - Get Claude CLI status
*/
-import type { Request, Response } from "express";
-import { getClaudeStatus } from "../get-claude-status.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getClaudeStatus } from '../get-claude-status.js';
+import { getErrorMessage, logError } from '../common.js';
export function createClaudeStatusHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -15,7 +15,7 @@ export function createClaudeStatusHandler() {
...status,
});
} catch (error) {
- logError(error, "Get Claude status failed");
+ logError(error, 'Get Claude status failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/routes/delete-api-key.ts b/apps/server/src/routes/setup/routes/delete-api-key.ts
index 4bb3d4e58..e64ff6b76 100644
--- a/apps/server/src/routes/setup/routes/delete-api-key.ts
+++ b/apps/server/src/routes/setup/routes/delete-api-key.ts
@@ -2,43 +2,43 @@
* POST /delete-api-key endpoint - Delete a stored API key
*/
-import type { Request, Response } from "express";
-import { createLogger } from "@automaker/utils";
-import path from "path";
-import fs from "fs/promises";
+import type { Request, Response } from 'express';
+import { createLogger } from '@automaker/utils';
+import path from 'path';
+import fs from 'fs/promises';
-const logger = createLogger("Setup");
+const logger = createLogger('Setup');
// In-memory storage reference (imported from common.ts pattern)
// We need to modify common.ts to export a deleteApiKey function
-import { setApiKey } from "../common.js";
+import { setApiKey } from '../common.js';
/**
* Remove an API key from the .env file
*/
async function removeApiKeyFromEnv(key: string): Promise {
- const envPath = path.join(process.cwd(), ".env");
+ const envPath = path.join(process.cwd(), '.env');
try {
- let envContent = "";
+ let envContent = '';
try {
- envContent = await fs.readFile(envPath, "utf-8");
+ envContent = await fs.readFile(envPath, 'utf-8');
} catch {
// .env file doesn't exist, nothing to delete
return;
}
// Parse existing env content and remove the key
- const lines = envContent.split("\n");
+ const lines = envContent.split('\n');
const keyRegex = new RegExp(`^${key}=`);
const newLines = lines.filter((line) => !keyRegex.test(line));
// Remove empty lines at the end
- while (newLines.length > 0 && newLines[newLines.length - 1].trim() === "") {
+ while (newLines.length > 0 && newLines[newLines.length - 1].trim() === '') {
newLines.pop();
}
- await fs.writeFile(envPath, newLines.join("\n") + (newLines.length > 0 ? "\n" : ""));
+ await fs.writeFile(envPath, newLines.join('\n') + (newLines.length > 0 ? '\n' : ''));
logger.info(`[Setup] Removed ${key} from .env file`);
} catch (error) {
logger.error(`[Setup] Failed to remove ${key} from .env:`, error);
@@ -54,7 +54,7 @@ export function createDeleteApiKeyHandler() {
if (!provider) {
res.status(400).json({
success: false,
- error: "Provider is required",
+ error: 'Provider is required',
});
return;
}
@@ -63,7 +63,7 @@ export function createDeleteApiKeyHandler() {
// Map provider to env key name
const envKeyMap: Record = {
- anthropic: "ANTHROPIC_API_KEY",
+ anthropic: 'ANTHROPIC_API_KEY',
};
const envKey = envKeyMap[provider];
@@ -76,7 +76,7 @@ export function createDeleteApiKeyHandler() {
}
// Clear from in-memory storage
- setApiKey(provider, "");
+ setApiKey(provider, '');
// Remove from environment
delete process.env[envKey];
@@ -91,14 +91,11 @@ export function createDeleteApiKeyHandler() {
message: `API key for ${provider} has been deleted`,
});
} catch (error) {
- logger.error("[Setup] Delete API key error:", error);
+ logger.error('[Setup] Delete API key error:', error);
res.status(500).json({
success: false,
- error: error instanceof Error ? error.message : "Failed to delete API key",
+ error: error instanceof Error ? error.message : 'Failed to delete API key',
});
}
};
}
-
-
-
diff --git a/apps/server/src/routes/setup/routes/gh-status.ts b/apps/server/src/routes/setup/routes/gh-status.ts
index 7dcf5d826..4d36561c9 100644
--- a/apps/server/src/routes/setup/routes/gh-status.ts
+++ b/apps/server/src/routes/setup/routes/gh-status.ts
@@ -2,24 +2,26 @@
* GET /gh-status endpoint - Get GitHub CLI status
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import os from "os";
-import path from "path";
-import fs from "fs/promises";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import os from 'os';
+import path from 'path';
+import fs from 'fs/promises';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
// Extended PATH to include common tool installation locations
const extendedPath = [
process.env.PATH,
- "/opt/homebrew/bin",
- "/usr/local/bin",
- "/home/linuxbrew/.linuxbrew/bin",
+ '/opt/homebrew/bin',
+ '/usr/local/bin',
+ '/home/linuxbrew/.linuxbrew/bin',
`${process.env.HOME}/.local/bin`,
-].filter(Boolean).join(":");
+]
+ .filter(Boolean)
+ .join(':');
const execEnv = {
...process.env,
@@ -44,11 +46,11 @@ async function getGhStatus(): Promise {
user: null,
};
- const isWindows = process.platform === "win32";
+ const isWindows = process.platform === 'win32';
// Check if gh CLI is installed
try {
- const findCommand = isWindows ? "where gh" : "command -v gh";
+ const findCommand = isWindows ? 'where gh' : 'command -v gh';
const { stdout } = await execAsync(findCommand, { env: execEnv });
status.path = stdout.trim().split(/\r?\n/)[0];
status.installed = true;
@@ -56,14 +58,14 @@ async function getGhStatus(): Promise {
// gh not in PATH, try common locations
const commonPaths = isWindows
? [
- path.join(process.env.LOCALAPPDATA || "", "Programs", "gh", "bin", "gh.exe"),
- path.join(process.env.ProgramFiles || "", "GitHub CLI", "gh.exe"),
+ path.join(process.env.LOCALAPPDATA || '', 'Programs', 'gh', 'bin', 'gh.exe'),
+ path.join(process.env.ProgramFiles || '', 'GitHub CLI', 'gh.exe'),
]
: [
- "/opt/homebrew/bin/gh",
- "/usr/local/bin/gh",
- path.join(os.homedir(), ".local", "bin", "gh"),
- "/home/linuxbrew/.linuxbrew/bin/gh",
+ '/opt/homebrew/bin/gh',
+ '/usr/local/bin/gh',
+ path.join(os.homedir(), '.local', 'bin', 'gh'),
+ '/home/linuxbrew/.linuxbrew/bin/gh',
];
for (const p of commonPaths) {
@@ -84,30 +86,31 @@ async function getGhStatus(): Promise {
// Get version
try {
- const { stdout } = await execAsync("gh --version", { env: execEnv });
+ const { stdout } = await execAsync('gh --version', { env: execEnv });
// Extract version from output like "gh version 2.40.1 (2024-01-09)"
const versionMatch = stdout.match(/gh version ([\d.]+)/);
- status.version = versionMatch ? versionMatch[1] : stdout.trim().split("\n")[0];
+ status.version = versionMatch ? versionMatch[1] : stdout.trim().split('\n')[0];
} catch {
// Version command failed
}
// Check authentication status
try {
- const { stdout } = await execAsync("gh auth status", { env: execEnv });
+ const { stdout } = await execAsync('gh auth status', { env: execEnv });
// If this succeeds without error, we're authenticated
status.authenticated = true;
// Try to extract username from output
- const userMatch = stdout.match(/Logged in to [^\s]+ account ([^\s]+)/i) ||
- stdout.match(/Logged in to [^\s]+ as ([^\s]+)/i);
+ const userMatch =
+ stdout.match(/Logged in to [^\s]+ account ([^\s]+)/i) ||
+ stdout.match(/Logged in to [^\s]+ as ([^\s]+)/i);
if (userMatch) {
status.user = userMatch[1];
}
} catch (error: unknown) {
// Auth status returns non-zero if not authenticated
const err = error as { stderr?: string };
- if (err.stderr?.includes("not logged in")) {
+ if (err.stderr?.includes('not logged in')) {
status.authenticated = false;
}
}
@@ -124,7 +127,7 @@ export function createGhStatusHandler() {
...status,
});
} catch (error) {
- logError(error, "Get GitHub CLI status failed");
+ logError(error, 'Get GitHub CLI status failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/routes/install-claude.ts b/apps/server/src/routes/setup/routes/install-claude.ts
index c471fc6c4..644f5e10f 100644
--- a/apps/server/src/routes/setup/routes/install-claude.ts
+++ b/apps/server/src/routes/setup/routes/install-claude.ts
@@ -2,8 +2,8 @@
* POST /install-claude endpoint - Install Claude CLI
*/
-import type { Request, Response } from "express";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getErrorMessage, logError } from '../common.js';
export function createInstallClaudeHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -13,10 +13,10 @@ export function createInstallClaudeHandler() {
res.json({
success: false,
error:
- "CLI installation requires terminal access. Please install manually using: npm install -g @anthropic-ai/claude-code",
+ 'CLI installation requires terminal access. Please install manually using: npm install -g @anthropic-ai/claude-code',
});
} catch (error) {
- logError(error, "Install Claude CLI failed");
+ logError(error, 'Install Claude CLI failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/routes/platform.ts b/apps/server/src/routes/setup/routes/platform.ts
index 40788d0b0..303cdd877 100644
--- a/apps/server/src/routes/setup/routes/platform.ts
+++ b/apps/server/src/routes/setup/routes/platform.ts
@@ -2,9 +2,9 @@
* GET /platform endpoint - Get platform info
*/
-import type { Request, Response } from "express";
-import os from "os";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import os from 'os';
+import { getErrorMessage, logError } from '../common.js';
export function createPlatformHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -15,12 +15,12 @@ export function createPlatformHandler() {
platform,
arch: os.arch(),
homeDir: os.homedir(),
- isWindows: platform === "win32",
- isMac: platform === "darwin",
- isLinux: platform === "linux",
+ isWindows: platform === 'win32',
+ isMac: platform === 'darwin',
+ isLinux: platform === 'linux',
});
} catch (error) {
- logError(error, "Get platform info failed");
+ logError(error, 'Get platform info failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/routes/store-api-key.ts b/apps/server/src/routes/setup/routes/store-api-key.ts
index 71011e015..e77a697e8 100644
--- a/apps/server/src/routes/setup/routes/store-api-key.ts
+++ b/apps/server/src/routes/setup/routes/store-api-key.ts
@@ -2,16 +2,11 @@
* POST /store-api-key endpoint - Store API key
*/
-import type { Request, Response } from "express";
-import {
- setApiKey,
- persistApiKeyToEnv,
- getErrorMessage,
- logError,
-} from "../common.js";
-import { createLogger } from "@automaker/utils";
+import type { Request, Response } from 'express';
+import { setApiKey, persistApiKeyToEnv, getErrorMessage, logError } from '../common.js';
+import { createLogger } from '@automaker/utils';
-const logger = createLogger("Setup");
+const logger = createLogger('Setup');
export function createStoreApiKeyHandler() {
return async (req: Request, res: Response): Promise => {
@@ -22,20 +17,18 @@ export function createStoreApiKeyHandler() {
};
if (!provider || !apiKey) {
- res
- .status(400)
- .json({ success: false, error: "provider and apiKey required" });
+ res.status(400).json({ success: false, error: 'provider and apiKey required' });
return;
}
setApiKey(provider, apiKey);
// Also set as environment variable and persist to .env
- if (provider === "anthropic" || provider === "anthropic_oauth_token") {
+ if (provider === 'anthropic' || provider === 'anthropic_oauth_token') {
// Both API key and OAuth token use ANTHROPIC_API_KEY
process.env.ANTHROPIC_API_KEY = apiKey;
- await persistApiKeyToEnv("ANTHROPIC_API_KEY", apiKey);
- logger.info("[Setup] Stored API key as ANTHROPIC_API_KEY");
+ await persistApiKeyToEnv('ANTHROPIC_API_KEY', apiKey);
+ logger.info('[Setup] Stored API key as ANTHROPIC_API_KEY');
} else {
res.status(400).json({
success: false,
@@ -46,7 +39,7 @@ export function createStoreApiKeyHandler() {
res.json({ success: true });
} catch (error) {
- logError(error, "Store API key failed");
+ logError(error, 'Store API key failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/setup/routes/verify-claude-auth.ts b/apps/server/src/routes/setup/routes/verify-claude-auth.ts
index 4b5438e3c..5debc5c7b 100644
--- a/apps/server/src/routes/setup/routes/verify-claude-auth.ts
+++ b/apps/server/src/routes/setup/routes/verify-claude-auth.ts
@@ -3,50 +3,50 @@
* Supports verifying either CLI auth or API key auth independently
*/
-import type { Request, Response } from "express";
-import { query } from "@anthropic-ai/claude-agent-sdk";
-import { createLogger } from "@automaker/utils";
-import { getApiKey } from "../common.js";
+import type { Request, Response } from 'express';
+import { query } from '@anthropic-ai/claude-agent-sdk';
+import { createLogger } from '@automaker/utils';
+import { getApiKey } from '../common.js';
-const logger = createLogger("Setup");
+const logger = createLogger('Setup');
// Known error patterns that indicate auth failure
const AUTH_ERROR_PATTERNS = [
- "OAuth token revoked",
- "Please run /login",
- "please run /login",
- "token revoked",
- "invalid_api_key",
- "authentication_error",
- "unauthorized",
- "not authenticated",
- "authentication failed",
- "invalid api key",
- "api key is invalid",
+ 'OAuth token revoked',
+ 'Please run /login',
+ 'please run /login',
+ 'token revoked',
+ 'invalid_api_key',
+ 'authentication_error',
+ 'unauthorized',
+ 'not authenticated',
+ 'authentication failed',
+ 'invalid api key',
+ 'api key is invalid',
];
// Patterns that indicate billing/credit issues - should FAIL verification
const BILLING_ERROR_PATTERNS = [
- "credit balance is too low",
- "credit balance too low",
- "insufficient credits",
- "insufficient balance",
- "no credits",
- "out of credits",
- "billing",
- "payment required",
- "add credits",
+ 'credit balance is too low',
+ 'credit balance too low',
+ 'insufficient credits',
+ 'insufficient balance',
+ 'no credits',
+ 'out of credits',
+ 'billing',
+ 'payment required',
+ 'add credits',
];
// Patterns that indicate rate/usage limits - should FAIL verification
// Users need to wait or upgrade their plan
const RATE_LIMIT_PATTERNS = [
- "limit reached",
- "rate limit",
- "rate_limit",
- "resets", // Only valid if it's a temporary reset, not a billing issue
- "/upgrade",
- "extra-usage",
+ 'limit reached',
+ 'rate limit',
+ 'rate_limit',
+ 'resets', // Only valid if it's a temporary reset, not a billing issue
+ '/upgrade',
+ 'extra-usage',
];
function isRateLimitError(text: string): boolean {
@@ -55,43 +55,33 @@ function isRateLimitError(text: string): boolean {
if (isBillingError(text)) {
return false;
}
- return RATE_LIMIT_PATTERNS.some((pattern) =>
- lowerText.includes(pattern.toLowerCase())
- );
+ return RATE_LIMIT_PATTERNS.some((pattern) => lowerText.includes(pattern.toLowerCase()));
}
function isBillingError(text: string): boolean {
const lowerText = text.toLowerCase();
- return BILLING_ERROR_PATTERNS.some((pattern) =>
- lowerText.includes(pattern.toLowerCase())
- );
+ return BILLING_ERROR_PATTERNS.some((pattern) => lowerText.includes(pattern.toLowerCase()));
}
function containsAuthError(text: string): boolean {
const lowerText = text.toLowerCase();
- return AUTH_ERROR_PATTERNS.some((pattern) =>
- lowerText.includes(pattern.toLowerCase())
- );
+ return AUTH_ERROR_PATTERNS.some((pattern) => lowerText.includes(pattern.toLowerCase()));
}
export function createVerifyClaudeAuthHandler() {
return async (req: Request, res: Response): Promise => {
try {
// Get the auth method from the request body
- const { authMethod } = req.body as { authMethod?: "cli" | "api_key" };
+ const { authMethod } = req.body as { authMethod?: 'cli' | 'api_key' };
- logger.info(
- `[Setup] Verifying Claude authentication using method: ${
- authMethod || "auto"
- }`
- );
+ logger.info(`[Setup] Verifying Claude authentication using method: ${authMethod || 'auto'}`);
// Create an AbortController with a 30-second timeout
const abortController = new AbortController();
const timeoutId = setTimeout(() => abortController.abort(), 30000);
let authenticated = false;
- let errorMessage = "";
+ let errorMessage = '';
let receivedAnyContent = false;
// Save original env values
@@ -99,25 +89,23 @@ export function createVerifyClaudeAuthHandler() {
try {
// Configure environment based on auth method
- if (authMethod === "cli") {
+ if (authMethod === 'cli') {
// For CLI verification, remove any API key so it uses CLI credentials only
delete process.env.ANTHROPIC_API_KEY;
- logger.info(
- "[Setup] Cleared API key environment for CLI verification"
- );
- } else if (authMethod === "api_key") {
+ logger.info('[Setup] Cleared API key environment for CLI verification');
+ } else if (authMethod === 'api_key') {
// For API key verification, ensure we're using the stored API key
- const storedApiKey = getApiKey("anthropic");
+ const storedApiKey = getApiKey('anthropic');
if (storedApiKey) {
process.env.ANTHROPIC_API_KEY = storedApiKey;
- logger.info("[Setup] Using stored API key for verification");
+ logger.info('[Setup] Using stored API key for verification');
} else {
// Check env var
if (!process.env.ANTHROPIC_API_KEY) {
res.json({
success: true,
authenticated: false,
- error: "No API key configured. Please enter an API key first.",
+ error: 'No API key configured. Please enter an API key first.',
});
return;
}
@@ -128,7 +116,7 @@ export function createVerifyClaudeAuthHandler() {
const stream = query({
prompt: "Reply with only the word 'ok'",
options: {
- model: "claude-sonnet-4-20250514",
+ model: 'claude-sonnet-4-20250514',
maxTurns: 1,
allowedTools: [],
abortController,
@@ -141,50 +129,50 @@ export function createVerifyClaudeAuthHandler() {
for await (const msg of stream) {
const msgStr = JSON.stringify(msg);
allMessages.push(msgStr);
- logger.info("[Setup] Stream message:", msgStr.substring(0, 500));
+ logger.info('[Setup] Stream message:', msgStr.substring(0, 500));
// Check for billing errors FIRST - these should fail verification
if (isBillingError(msgStr)) {
- logger.error("[Setup] Found billing error in message");
+ logger.error('[Setup] Found billing error in message');
errorMessage =
- "Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com";
+ 'Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com';
authenticated = false;
break;
}
// Check if any part of the message contains auth errors
if (containsAuthError(msgStr)) {
- logger.error("[Setup] Found auth error in message");
- if (authMethod === "cli") {
+ logger.error('[Setup] Found auth error in message');
+ if (authMethod === 'cli') {
errorMessage =
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
} else {
- errorMessage = "API key is invalid or has been revoked.";
+ errorMessage = 'API key is invalid or has been revoked.';
}
break;
}
// Check specifically for assistant messages with text content
- if (msg.type === "assistant" && (msg as any).message?.content) {
+ if (msg.type === 'assistant' && (msg as any).message?.content) {
const content = (msg as any).message.content;
if (Array.isArray(content)) {
for (const block of content) {
- if (block.type === "text" && block.text) {
+ if (block.type === 'text' && block.text) {
const text = block.text;
- logger.info("[Setup] Assistant text:", text);
+ logger.info('[Setup] Assistant text:', text);
if (containsAuthError(text)) {
- if (authMethod === "cli") {
+ if (authMethod === 'cli') {
errorMessage =
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
} else {
- errorMessage = "API key is invalid or has been revoked.";
+ errorMessage = 'API key is invalid or has been revoked.';
}
break;
}
// Valid text response that's not an error
- if (text.toLowerCase().includes("ok") || text.length > 0) {
+ if (text.toLowerCase().includes('ok') || text.length > 0) {
receivedAnyContent = true;
}
}
@@ -193,34 +181,30 @@ export function createVerifyClaudeAuthHandler() {
}
// Check for result messages
- if (msg.type === "result") {
+ if (msg.type === 'result') {
const resultStr = JSON.stringify(msg);
// First check for billing errors - these should FAIL verification
if (isBillingError(resultStr)) {
- logger.error(
- "[Setup] Billing error detected - insufficient credits"
- );
+ logger.error('[Setup] Billing error detected - insufficient credits');
errorMessage =
- "Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com";
+ 'Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com';
authenticated = false;
break;
}
// Check if it's a rate limit error - should FAIL verification
else if (isRateLimitError(resultStr)) {
- logger.warn(
- "[Setup] Rate limit detected - treating as unverified"
- );
+ logger.warn('[Setup] Rate limit detected - treating as unverified');
errorMessage =
- "Rate limit reached. Please wait a while before trying again or upgrade your plan.";
+ 'Rate limit reached. Please wait a while before trying again or upgrade your plan.';
authenticated = false;
break;
} else if (containsAuthError(resultStr)) {
- if (authMethod === "cli") {
+ if (authMethod === 'cli') {
errorMessage =
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
} else {
- errorMessage = "API key is invalid or has been revoked.";
+ errorMessage = 'API key is invalid or has been revoked.';
}
} else {
// Got a result without errors
@@ -236,60 +220,48 @@ export function createVerifyClaudeAuthHandler() {
authenticated = true;
} else {
// No content received - might be an issue
- logger.warn("[Setup] No content received from stream");
- logger.warn("[Setup] All messages:", allMessages.join("\n"));
- errorMessage =
- "No response received from Claude. Please check your authentication.";
+ logger.warn('[Setup] No content received from stream');
+ logger.warn('[Setup] All messages:', allMessages.join('\n'));
+ errorMessage = 'No response received from Claude. Please check your authentication.';
}
} catch (error: unknown) {
- const errMessage =
- error instanceof Error ? error.message : String(error);
+ const errMessage = error instanceof Error ? error.message : String(error);
- logger.error("[Setup] Claude auth verification exception:", errMessage);
+ logger.error('[Setup] Claude auth verification exception:', errMessage);
// Check for billing errors FIRST - these always fail
if (isBillingError(errMessage)) {
authenticated = false;
errorMessage =
- "Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com";
+ 'Credit balance is too low. Please add credits to your Anthropic account at console.anthropic.com';
}
// Check for rate limit in exception - should FAIL verification
else if (isRateLimitError(errMessage)) {
authenticated = false;
errorMessage =
- "Rate limit reached. Please wait a while before trying again or upgrade your plan.";
- logger.warn(
- "[Setup] Rate limit in exception - treating as unverified"
- );
+ 'Rate limit reached. Please wait a while before trying again or upgrade your plan.';
+ logger.warn('[Setup] Rate limit in exception - treating as unverified');
}
// If we already determined auth was successful, keep it
else if (authenticated) {
- logger.info("[Setup] Auth already confirmed, ignoring exception");
+ logger.info('[Setup] Auth already confirmed, ignoring exception');
}
// Check for auth-related errors in exception
else if (containsAuthError(errMessage)) {
- if (authMethod === "cli") {
+ if (authMethod === 'cli') {
errorMessage =
"CLI authentication failed. Please run 'claude login' in your terminal to authenticate.";
} else {
- errorMessage = "API key is invalid or has been revoked.";
+ errorMessage = 'API key is invalid or has been revoked.';
}
- } else if (
- errMessage.includes("abort") ||
- errMessage.includes("timeout")
- ) {
- errorMessage = "Verification timed out. Please try again.";
- } else if (
- errMessage.includes("exit") &&
- errMessage.includes("code 1")
- ) {
+ } else if (errMessage.includes('abort') || errMessage.includes('timeout')) {
+ errorMessage = 'Verification timed out. Please try again.';
+ } else if (errMessage.includes('exit') && errMessage.includes('code 1')) {
// Process exited with code 1 but we might have gotten rate limit info in the stream
// Check if we received any content that indicated auth worked
if (receivedAnyContent && !errorMessage) {
authenticated = true;
- logger.info(
- "[Setup] Process exit 1 but content received - auth valid"
- );
+ logger.info('[Setup] Process exit 1 but content received - auth valid');
} else if (!errorMessage) {
errorMessage = errMessage;
}
@@ -301,13 +273,13 @@ export function createVerifyClaudeAuthHandler() {
// Restore original environment
if (originalAnthropicKey !== undefined) {
process.env.ANTHROPIC_API_KEY = originalAnthropicKey;
- } else if (authMethod === "cli") {
+ } else if (authMethod === 'cli') {
// If we cleared it and there was no original, keep it cleared
delete process.env.ANTHROPIC_API_KEY;
}
}
- logger.info("[Setup] Verification result:", {
+ logger.info('[Setup] Verification result:', {
authenticated,
errorMessage,
authMethod,
@@ -319,11 +291,11 @@ export function createVerifyClaudeAuthHandler() {
error: errorMessage || undefined,
});
} catch (error) {
- logger.error("[Setup] Verify Claude auth endpoint error:", error);
+ logger.error('[Setup] Verify Claude auth endpoint error:', error);
res.status(500).json({
success: false,
authenticated: false,
- error: error instanceof Error ? error.message : "Verification failed",
+ error: error instanceof Error ? error.message : 'Verification failed',
});
}
};
diff --git a/apps/server/src/routes/suggestions/common.ts b/apps/server/src/routes/suggestions/common.ts
index 4816ca663..e4e3dbe81 100644
--- a/apps/server/src/routes/suggestions/common.ts
+++ b/apps/server/src/routes/suggestions/common.ts
@@ -2,13 +2,10 @@
* Common utilities and state for suggestions routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Suggestions");
+const logger = createLogger('Suggestions');
// Shared state for tracking generation status - private
let isRunning = false;
@@ -27,10 +24,7 @@ export function getSuggestionsStatus(): {
/**
* Set the running state and abort controller
*/
-export function setRunningState(
- running: boolean,
- controller: AbortController | null = null
-): void {
+export function setRunningState(running: boolean, controller: AbortController | null = null): void {
isRunning = running;
currentAbortController = controller;
}
diff --git a/apps/server/src/routes/suggestions/generate-suggestions.ts b/apps/server/src/routes/suggestions/generate-suggestions.ts
index d0c985d99..42514a0a2 100644
--- a/apps/server/src/routes/suggestions/generate-suggestions.ts
+++ b/apps/server/src/routes/suggestions/generate-suggestions.ts
@@ -2,43 +2,43 @@
* Business logic for generating suggestions
*/
-import { query } from "@anthropic-ai/claude-agent-sdk";
-import type { EventEmitter } from "../../lib/events.js";
-import { createLogger } from "@automaker/utils";
-import { createSuggestionsOptions } from "../../lib/sdk-options.js";
+import { query } from '@anthropic-ai/claude-agent-sdk';
+import type { EventEmitter } from '../../lib/events.js';
+import { createLogger } from '@automaker/utils';
+import { createSuggestionsOptions } from '../../lib/sdk-options.js';
-const logger = createLogger("Suggestions");
+const logger = createLogger('Suggestions');
/**
* JSON Schema for suggestions output
*/
const suggestionsSchema = {
- type: "object",
+ type: 'object',
properties: {
suggestions: {
- type: "array",
+ type: 'array',
items: {
- type: "object",
+ type: 'object',
properties: {
- id: { type: "string" },
- category: { type: "string" },
- description: { type: "string" },
+ id: { type: 'string' },
+ category: { type: 'string' },
+ description: { type: 'string' },
steps: {
- type: "array",
- items: { type: "string" },
+ type: 'array',
+ items: { type: 'string' },
},
- priority: {
- type: "number",
+ priority: {
+ type: 'number',
minimum: 1,
maximum: 3,
},
- reasoning: { type: "string" },
+ reasoning: { type: 'string' },
},
- required: ["category", "description", "steps", "priority", "reasoning"],
+ required: ['category', 'description', 'steps', 'priority', 'reasoning'],
},
},
},
- required: ["suggestions"],
+ required: ['suggestions'],
additionalProperties: false,
};
@@ -49,13 +49,10 @@ export async function generateSuggestions(
abortController: AbortController
): Promise {
const typePrompts: Record = {
- features:
- "Analyze this project and suggest new features that would add value.",
- refactoring: "Analyze this project and identify refactoring opportunities.",
- security:
- "Analyze this project for security vulnerabilities and suggest fixes.",
- performance:
- "Analyze this project for performance issues and suggest optimizations.",
+ features: 'Analyze this project and suggest new features that would add value.',
+ refactoring: 'Analyze this project and identify refactoring opportunities.',
+ security: 'Analyze this project for security vulnerabilities and suggest fixes.',
+ performance: 'Analyze this project for performance issues and suggest optimizations.',
};
const prompt = `${typePrompts[suggestionType] || typePrompts.features}
@@ -71,8 +68,8 @@ For each suggestion, provide:
The response will be automatically formatted as structured JSON.`;
- events.emit("suggestions:event", {
- type: "suggestions_progress",
+ events.emit('suggestions:event', {
+ type: 'suggestions_progress',
content: `Starting ${suggestionType} analysis...\n`,
});
@@ -80,48 +77,48 @@ The response will be automatically formatted as structured JSON.`;
cwd: projectPath,
abortController,
outputFormat: {
- type: "json_schema",
+ type: 'json_schema',
schema: suggestionsSchema,
},
});
const stream = query({ prompt, options });
- let responseText = "";
+ let responseText = '';
let structuredOutput: { suggestions: Array> } | null = null;
for await (const msg of stream) {
- if (msg.type === "assistant" && msg.message.content) {
+ if (msg.type === 'assistant' && msg.message.content) {
for (const block of msg.message.content) {
- if (block.type === "text") {
+ if (block.type === 'text') {
responseText += block.text;
- events.emit("suggestions:event", {
- type: "suggestions_progress",
+ events.emit('suggestions:event', {
+ type: 'suggestions_progress',
content: block.text,
});
- } else if (block.type === "tool_use") {
- events.emit("suggestions:event", {
- type: "suggestions_tool",
+ } else if (block.type === 'tool_use') {
+ events.emit('suggestions:event', {
+ type: 'suggestions_tool',
tool: block.name,
input: block.input,
});
}
}
- } else if (msg.type === "result" && msg.subtype === "success") {
+ } else if (msg.type === 'result' && msg.subtype === 'success') {
// Check for structured output
const resultMsg = msg as any;
if (resultMsg.structured_output) {
structuredOutput = resultMsg.structured_output as {
suggestions: Array>;
};
- logger.debug("Received structured output:", structuredOutput);
+ logger.debug('Received structured output:', structuredOutput);
}
- } else if (msg.type === "result") {
+ } else if (msg.type === 'result') {
const resultMsg = msg as any;
- if (resultMsg.subtype === "error_max_structured_output_retries") {
- logger.error("Failed to produce valid structured output after retries");
- throw new Error("Could not produce valid suggestions output");
- } else if (resultMsg.subtype === "error_max_turns") {
- logger.error("Hit max turns limit before completing suggestions generation");
+ if (resultMsg.subtype === 'error_max_structured_output_retries') {
+ logger.error('Failed to produce valid structured output after retries');
+ throw new Error('Could not produce valid suggestions output');
+ } else if (resultMsg.subtype === 'error_max_turns') {
+ logger.error('Hit max turns limit before completing suggestions generation');
logger.warn(`Response text length: ${responseText.length} chars`);
// Still try to parse what we have
}
@@ -132,49 +129,44 @@ The response will be automatically formatted as structured JSON.`;
try {
if (structuredOutput && structuredOutput.suggestions) {
// Use structured output directly
- events.emit("suggestions:event", {
- type: "suggestions_complete",
- suggestions: structuredOutput.suggestions.map(
- (s: Record, i: number) => ({
- ...s,
- id: s.id || `suggestion-${Date.now()}-${i}`,
- })
- ),
+ events.emit('suggestions:event', {
+ type: 'suggestions_complete',
+ suggestions: structuredOutput.suggestions.map((s: Record, i: number) => ({
+ ...s,
+ id: s.id || `suggestion-${Date.now()}-${i}`,
+ })),
});
} else {
// Fallback: try to parse from text (for backwards compatibility)
- logger.warn("No structured output received, attempting to parse from text");
+ logger.warn('No structured output received, attempting to parse from text');
const jsonMatch = responseText.match(/\{[\s\S]*"suggestions"[\s\S]*\}/);
if (jsonMatch) {
const parsed = JSON.parse(jsonMatch[0]);
- events.emit("suggestions:event", {
- type: "suggestions_complete",
- suggestions: parsed.suggestions.map(
- (s: Record, i: number) => ({
- ...s,
- id: s.id || `suggestion-${Date.now()}-${i}`,
- })
- ),
+ events.emit('suggestions:event', {
+ type: 'suggestions_complete',
+ suggestions: parsed.suggestions.map((s: Record, i: number) => ({
+ ...s,
+ id: s.id || `suggestion-${Date.now()}-${i}`,
+ })),
});
} else {
- throw new Error("No valid JSON found in response");
+ throw new Error('No valid JSON found in response');
}
}
} catch (error) {
// Log the parsing error for debugging
- logger.error("Failed to parse suggestions JSON from AI response:", error);
+ logger.error('Failed to parse suggestions JSON from AI response:', error);
// Return generic suggestions if parsing fails
- events.emit("suggestions:event", {
- type: "suggestions_complete",
+ events.emit('suggestions:event', {
+ type: 'suggestions_complete',
suggestions: [
{
id: `suggestion-${Date.now()}-0`,
- category: "Analysis",
- description: "Review the AI analysis output for insights",
- steps: ["Review the generated analysis"],
+ category: 'Analysis',
+ description: 'Review the AI analysis output for insights',
+ steps: ['Review the generated analysis'],
priority: 1,
- reasoning:
- "The AI provided analysis but suggestions need manual review",
+ reasoning: 'The AI provided analysis but suggestions need manual review',
},
],
});
diff --git a/apps/server/src/routes/suggestions/index.ts b/apps/server/src/routes/suggestions/index.ts
index a4b2ec20a..2ea6f9ae6 100644
--- a/apps/server/src/routes/suggestions/index.ts
+++ b/apps/server/src/routes/suggestions/index.ts
@@ -2,19 +2,19 @@
* Suggestions routes - HTTP API for AI-powered feature suggestions
*/
-import { Router } from "express";
-import type { EventEmitter } from "../../lib/events.js";
-import { validatePathParams } from "../../middleware/validate-paths.js";
-import { createGenerateHandler } from "./routes/generate.js";
-import { createStopHandler } from "./routes/stop.js";
-import { createStatusHandler } from "./routes/status.js";
+import { Router } from 'express';
+import type { EventEmitter } from '../../lib/events.js';
+import { validatePathParams } from '../../middleware/validate-paths.js';
+import { createGenerateHandler } from './routes/generate.js';
+import { createStopHandler } from './routes/stop.js';
+import { createStatusHandler } from './routes/status.js';
export function createSuggestionsRoutes(events: EventEmitter): Router {
const router = Router();
- router.post("/generate", validatePathParams("projectPath"), createGenerateHandler(events));
- router.post("/stop", createStopHandler());
- router.get("/status", createStatusHandler());
+ router.post('/generate', validatePathParams('projectPath'), createGenerateHandler(events));
+ router.post('/stop', createStopHandler());
+ router.get('/status', createStatusHandler());
return router;
}
diff --git a/apps/server/src/routes/suggestions/routes/generate.ts b/apps/server/src/routes/suggestions/routes/generate.ts
index 6a027a055..939e0cde6 100644
--- a/apps/server/src/routes/suggestions/routes/generate.ts
+++ b/apps/server/src/routes/suggestions/routes/generate.ts
@@ -2,29 +2,24 @@
* POST /generate endpoint - Generate suggestions
*/
-import type { Request, Response } from "express";
-import type { EventEmitter } from "../../../lib/events.js";
-import { createLogger } from "@automaker/utils";
-import {
- getSuggestionsStatus,
- setRunningState,
- getErrorMessage,
- logError,
-} from "../common.js";
-import { generateSuggestions } from "../generate-suggestions.js";
-
-const logger = createLogger("Suggestions");
+import type { Request, Response } from 'express';
+import type { EventEmitter } from '../../../lib/events.js';
+import { createLogger } from '@automaker/utils';
+import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
+import { generateSuggestions } from '../generate-suggestions.js';
+
+const logger = createLogger('Suggestions');
export function createGenerateHandler(events: EventEmitter) {
return async (req: Request, res: Response): Promise => {
try {
- const { projectPath, suggestionType = "features" } = req.body as {
+ const { projectPath, suggestionType = 'features' } = req.body as {
projectPath: string;
suggestionType?: string;
};
if (!projectPath) {
- res.status(400).json({ success: false, error: "projectPath required" });
+ res.status(400).json({ success: false, error: 'projectPath required' });
return;
}
@@ -32,7 +27,7 @@ export function createGenerateHandler(events: EventEmitter) {
if (isRunning) {
res.json({
success: false,
- error: "Suggestions generation is already running",
+ error: 'Suggestions generation is already running',
});
return;
}
@@ -44,9 +39,9 @@ export function createGenerateHandler(events: EventEmitter) {
// Start generation in background
generateSuggestions(projectPath, suggestionType, events, abortController)
.catch((error) => {
- logError(error, "Generate suggestions failed (background)");
- events.emit("suggestions:event", {
- type: "suggestions_error",
+ logError(error, 'Generate suggestions failed (background)');
+ events.emit('suggestions:event', {
+ type: 'suggestions_error',
error: getErrorMessage(error),
});
})
@@ -56,7 +51,7 @@ export function createGenerateHandler(events: EventEmitter) {
res.json({ success: true });
} catch (error) {
- logError(error, "Generate suggestions failed");
+ logError(error, 'Generate suggestions failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/suggestions/routes/status.ts b/apps/server/src/routes/suggestions/routes/status.ts
index d62dfa17d..eb135e062 100644
--- a/apps/server/src/routes/suggestions/routes/status.ts
+++ b/apps/server/src/routes/suggestions/routes/status.ts
@@ -2,8 +2,8 @@
* GET /status endpoint - Get status
*/
-import type { Request, Response } from "express";
-import { getSuggestionsStatus, getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getSuggestionsStatus, getErrorMessage, logError } from '../common.js';
export function createStatusHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -11,7 +11,7 @@ export function createStatusHandler() {
const { isRunning } = getSuggestionsStatus();
res.json({ success: true, isRunning });
} catch (error) {
- logError(error, "Get status failed");
+ logError(error, 'Get status failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/suggestions/routes/stop.ts b/apps/server/src/routes/suggestions/routes/stop.ts
index 3a18a0bea..f9e01fb65 100644
--- a/apps/server/src/routes/suggestions/routes/stop.ts
+++ b/apps/server/src/routes/suggestions/routes/stop.ts
@@ -2,13 +2,8 @@
* POST /stop endpoint - Stop suggestions generation
*/
-import type { Request, Response } from "express";
-import {
- getSuggestionsStatus,
- setRunningState,
- getErrorMessage,
- logError,
-} from "../common.js";
+import type { Request, Response } from 'express';
+import { getSuggestionsStatus, setRunningState, getErrorMessage, logError } from '../common.js';
export function createStopHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -20,7 +15,7 @@ export function createStopHandler() {
setRunningState(false, null);
res.json({ success: true });
} catch (error) {
- logError(error, "Stop suggestions failed");
+ logError(error, 'Stop suggestions failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/templates/common.ts b/apps/server/src/routes/templates/common.ts
index 4ffb9e8be..d0ee96dda 100644
--- a/apps/server/src/routes/templates/common.ts
+++ b/apps/server/src/routes/templates/common.ts
@@ -2,13 +2,10 @@
* Common utilities for templates routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-export const logger = createLogger("Templates");
+export const logger = createLogger('Templates');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/templates/index.ts b/apps/server/src/routes/templates/index.ts
index 4e7462fe6..38eb0270f 100644
--- a/apps/server/src/routes/templates/index.ts
+++ b/apps/server/src/routes/templates/index.ts
@@ -3,13 +3,13 @@
* Provides API for cloning GitHub starter templates
*/
-import { Router } from "express";
-import { createCloneHandler } from "./routes/clone.js";
+import { Router } from 'express';
+import { createCloneHandler } from './routes/clone.js';
export function createTemplatesRoutes(): Router {
const router = Router();
- router.post("/clone", createCloneHandler());
+ router.post('/clone', createCloneHandler());
return router;
}
diff --git a/apps/server/src/routes/terminal/common.ts b/apps/server/src/routes/terminal/common.ts
index eccde7563..6121e3453 100644
--- a/apps/server/src/routes/terminal/common.ts
+++ b/apps/server/src/routes/terminal/common.ts
@@ -2,11 +2,12 @@
* Common utilities and state for terminal routes
*/
-import { createLogger } from "@automaker/utils";
-import type { Request, Response, NextFunction } from "express";
-import { getTerminalService } from "../../services/terminal-service.js";
+import { randomBytes } from 'crypto';
+import { createLogger } from '@automaker/utils';
+import type { Request, Response, NextFunction } from 'express';
+import { getTerminalService } from '../../services/terminal-service.js';
-const logger = createLogger("Terminal");
+const logger = createLogger('Terminal');
// Read env variables lazily to ensure dotenv has loaded them
function getTerminalPassword(): string | undefined {
@@ -14,21 +15,17 @@ function getTerminalPassword(): string | undefined {
}
function getTerminalEnabledConfig(): boolean {
- return process.env.TERMINAL_ENABLED !== "false"; // Enabled by default
+ return process.env.TERMINAL_ENABLED !== 'false'; // Enabled by default
}
// In-memory session tokens (would use Redis in production) - private
-const validTokens: Map =
- new Map();
+const validTokens: Map = new Map();
const TOKEN_EXPIRY_MS = 24 * 60 * 60 * 1000; // 24 hours
/**
* Add a token to the valid tokens map
*/
-export function addToken(
- token: string,
- data: { createdAt: Date; expiresAt: Date }
-): void {
+export function addToken(token: string, data: { createdAt: Date; expiresAt: Date }): void {
validTokens.set(token, data);
}
@@ -42,19 +39,15 @@ export function deleteToken(token: string): void {
/**
* Get token data for a given token
*/
-export function getTokenData(
- token: string
-): { createdAt: Date; expiresAt: Date } | undefined {
+export function getTokenData(token: string): { createdAt: Date; expiresAt: Date } | undefined {
return validTokens.get(token);
}
/**
- * Generate a secure random token
+ * Generate a cryptographically secure random token
*/
export function generateToken(): string {
- return `term-${Date.now()}-${Math.random()
- .toString(36)
- .substr(2, 15)}${Math.random().toString(36).substr(2, 15)}`;
+ return `term-${randomBytes(32).toString('base64url')}`;
}
/**
@@ -107,16 +100,12 @@ export function isTerminalEnabled(): boolean {
* Terminal authentication middleware
* Checks for valid session token if password is configured
*/
-export function terminalAuthMiddleware(
- req: Request,
- res: Response,
- next: NextFunction
-): void {
+export function terminalAuthMiddleware(req: Request, res: Response, next: NextFunction): void {
// Check if terminal is enabled
if (!getTerminalEnabledConfig()) {
res.status(403).json({
success: false,
- error: "Terminal access is disabled",
+ error: 'Terminal access is disabled',
});
return;
}
@@ -128,13 +117,12 @@ export function terminalAuthMiddleware(
}
// Check for session token
- const token =
- (req.headers["x-terminal-token"] as string) || (req.query.token as string);
+ const token = (req.headers['x-terminal-token'] as string) || (req.query.token as string);
if (!validateTerminalToken(token)) {
res.status(401).json({
success: false,
- error: "Terminal authentication required",
+ error: 'Terminal authentication required',
passwordRequired: true,
});
return;
@@ -155,10 +143,7 @@ export function getTokenExpiryMs(): number {
return TOKEN_EXPIRY_MS;
}
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/terminal/index.ts b/apps/server/src/routes/terminal/index.ts
index 7ee0e978e..380801e52 100644
--- a/apps/server/src/routes/terminal/index.ts
+++ b/apps/server/src/routes/terminal/index.ts
@@ -5,22 +5,20 @@
* WebSocket connections for real-time I/O are handled separately in index.ts.
*/
-import { Router } from "express";
+import { Router } from 'express';
import {
terminalAuthMiddleware,
validateTerminalToken,
isTerminalEnabled,
isTerminalPasswordRequired,
-} from "./common.js";
-import { createStatusHandler } from "./routes/status.js";
-import { createAuthHandler } from "./routes/auth.js";
-import { createLogoutHandler } from "./routes/logout.js";
-import {
- createSessionsListHandler,
- createSessionsCreateHandler,
-} from "./routes/sessions.js";
-import { createSessionDeleteHandler } from "./routes/session-delete.js";
-import { createSessionResizeHandler } from "./routes/session-resize.js";
+} from './common.js';
+import { createStatusHandler } from './routes/status.js';
+import { createAuthHandler } from './routes/auth.js';
+import { createLogoutHandler } from './routes/logout.js';
+import { createSessionsListHandler, createSessionsCreateHandler } from './routes/sessions.js';
+import { createSessionDeleteHandler } from './routes/session-delete.js';
+import { createSessionResizeHandler } from './routes/session-resize.js';
+import { createSettingsGetHandler, createSettingsUpdateHandler } from './routes/settings.js';
// Re-export for use in main index.ts
export { validateTerminalToken, isTerminalEnabled, isTerminalPasswordRequired };
@@ -28,17 +26,19 @@ export { validateTerminalToken, isTerminalEnabled, isTerminalPasswordRequired };
export function createTerminalRoutes(): Router {
const router = Router();
- router.get("/status", createStatusHandler());
- router.post("/auth", createAuthHandler());
- router.post("/logout", createLogoutHandler());
+ router.get('/status', createStatusHandler());
+ router.post('/auth', createAuthHandler());
+ router.post('/logout', createLogoutHandler());
// Apply terminal auth middleware to all routes below
router.use(terminalAuthMiddleware);
- router.get("/sessions", createSessionsListHandler());
- router.post("/sessions", createSessionsCreateHandler());
- router.delete("/sessions/:id", createSessionDeleteHandler());
- router.post("/sessions/:id/resize", createSessionResizeHandler());
+ router.get('/sessions', createSessionsListHandler());
+ router.post('/sessions', createSessionsCreateHandler());
+ router.delete('/sessions/:id', createSessionDeleteHandler());
+ router.post('/sessions/:id/resize', createSessionResizeHandler());
+ router.get('/settings', createSettingsGetHandler());
+ router.put('/settings', createSettingsUpdateHandler());
return router;
}
diff --git a/apps/server/src/routes/terminal/routes/auth.ts b/apps/server/src/routes/terminal/routes/auth.ts
index 234d45729..1d6156bd1 100644
--- a/apps/server/src/routes/terminal/routes/auth.ts
+++ b/apps/server/src/routes/terminal/routes/auth.ts
@@ -2,7 +2,7 @@
* POST /auth endpoint - Authenticate with password to get a session token
*/
-import type { Request, Response } from "express";
+import type { Request, Response } from 'express';
import {
getTerminalEnabledConfigValue,
getTerminalPasswordConfig,
@@ -10,14 +10,14 @@ import {
addToken,
getTokenExpiryMs,
getErrorMessage,
-} from "../common.js";
+} from '../common.js';
export function createAuthHandler() {
return (req: Request, res: Response): void => {
if (!getTerminalEnabledConfigValue()) {
res.status(403).json({
success: false,
- error: "Terminal access is disabled",
+ error: 'Terminal access is disabled',
});
return;
}
@@ -41,7 +41,7 @@ export function createAuthHandler() {
if (!password || password !== terminalPassword) {
res.status(401).json({
success: false,
- error: "Invalid password",
+ error: 'Invalid password',
});
return;
}
diff --git a/apps/server/src/routes/terminal/routes/logout.ts b/apps/server/src/routes/terminal/routes/logout.ts
index 9e3c8fa3e..2af85713c 100644
--- a/apps/server/src/routes/terminal/routes/logout.ts
+++ b/apps/server/src/routes/terminal/routes/logout.ts
@@ -2,12 +2,12 @@
* POST /logout endpoint - Invalidate a session token
*/
-import type { Request, Response } from "express";
-import { deleteToken } from "../common.js";
+import type { Request, Response } from 'express';
+import { deleteToken } from '../common.js';
export function createLogoutHandler() {
return (req: Request, res: Response): void => {
- const token = (req.headers["x-terminal-token"] as string) || req.body.token;
+ const token = (req.headers['x-terminal-token'] as string) || req.body.token;
if (token) {
deleteToken(token);
diff --git a/apps/server/src/routes/terminal/routes/session-delete.ts b/apps/server/src/routes/terminal/routes/session-delete.ts
index aa3f96cb1..dec3c6943 100644
--- a/apps/server/src/routes/terminal/routes/session-delete.ts
+++ b/apps/server/src/routes/terminal/routes/session-delete.ts
@@ -2,8 +2,8 @@
* DELETE /sessions/:id endpoint - Kill a terminal session
*/
-import type { Request, Response } from "express";
-import { getTerminalService } from "../../../services/terminal-service.js";
+import type { Request, Response } from 'express';
+import { getTerminalService } from '../../../services/terminal-service.js';
export function createSessionDeleteHandler() {
return (req: Request, res: Response): void => {
@@ -14,7 +14,7 @@ export function createSessionDeleteHandler() {
if (!killed) {
res.status(404).json({
success: false,
- error: "Session not found",
+ error: 'Session not found',
});
return;
}
diff --git a/apps/server/src/routes/terminal/routes/session-resize.ts b/apps/server/src/routes/terminal/routes/session-resize.ts
index a6a8a70d3..41db97630 100644
--- a/apps/server/src/routes/terminal/routes/session-resize.ts
+++ b/apps/server/src/routes/terminal/routes/session-resize.ts
@@ -2,8 +2,8 @@
* POST /sessions/:id/resize endpoint - Resize a terminal session
*/
-import type { Request, Response } from "express";
-import { getTerminalService } from "../../../services/terminal-service.js";
+import type { Request, Response } from 'express';
+import { getTerminalService } from '../../../services/terminal-service.js';
export function createSessionResizeHandler() {
return (req: Request, res: Response): void => {
@@ -14,7 +14,7 @@ export function createSessionResizeHandler() {
if (!cols || !rows) {
res.status(400).json({
success: false,
- error: "cols and rows are required",
+ error: 'cols and rows are required',
});
return;
}
@@ -24,7 +24,7 @@ export function createSessionResizeHandler() {
if (!resized) {
res.status(404).json({
success: false,
- error: "Session not found",
+ error: 'Session not found',
});
return;
}
diff --git a/apps/server/src/routes/terminal/routes/sessions.ts b/apps/server/src/routes/terminal/routes/sessions.ts
index c9d6133c3..a7f425093 100644
--- a/apps/server/src/routes/terminal/routes/sessions.ts
+++ b/apps/server/src/routes/terminal/routes/sessions.ts
@@ -3,12 +3,12 @@
* POST /sessions endpoint - Create a new terminal session
*/
-import type { Request, Response } from "express";
-import { getTerminalService } from "../../../services/terminal-service.js";
-import { getErrorMessage, logError } from "../common.js";
-import { createLogger } from "@automaker/utils";
+import type { Request, Response } from 'express';
+import { getTerminalService } from '../../../services/terminal-service.js';
+import { getErrorMessage, logError } from '../common.js';
+import { createLogger } from '@automaker/utils';
-const logger = createLogger("Terminal");
+const logger = createLogger('Terminal');
export function createSessionsListHandler() {
return (_req: Request, res: Response): void => {
@@ -34,6 +34,21 @@ export function createSessionsCreateHandler() {
shell,
});
+ // Check if session creation was refused due to limit
+ if (!session) {
+ const maxSessions = terminalService.getMaxSessions();
+ const currentSessions = terminalService.getSessionCount();
+ logger.warn(`Session limit reached: ${currentSessions}/${maxSessions}`);
+ res.status(429).json({
+ success: false,
+ error: 'Maximum terminal sessions reached',
+ details: `Server limit is ${maxSessions} concurrent sessions. Please close unused terminals.`,
+ currentSessions,
+ maxSessions,
+ });
+ return;
+ }
+
res.json({
success: true,
data: {
@@ -44,10 +59,10 @@ export function createSessionsCreateHandler() {
},
});
} catch (error) {
- logError(error, "Create terminal session failed");
+ logError(error, 'Create terminal session failed');
res.status(500).json({
success: false,
- error: "Failed to create terminal session",
+ error: 'Failed to create terminal session',
details: getErrorMessage(error),
});
}
diff --git a/apps/server/src/routes/terminal/routes/settings.ts b/apps/server/src/routes/terminal/routes/settings.ts
new file mode 100644
index 000000000..9d8146065
--- /dev/null
+++ b/apps/server/src/routes/terminal/routes/settings.ts
@@ -0,0 +1,83 @@
+/**
+ * GET/PUT /settings endpoint - Get/Update terminal settings
+ */
+
+import type { Request, Response } from 'express';
+import {
+ getTerminalService,
+ MIN_MAX_SESSIONS,
+ MAX_MAX_SESSIONS,
+} from '../../../services/terminal-service.js';
+import { getErrorMessage, logError } from '../common.js';
+
+export function createSettingsGetHandler() {
+ return (_req: Request, res: Response): void => {
+ try {
+ const terminalService = getTerminalService();
+ res.json({
+ success: true,
+ data: {
+ maxSessions: terminalService.getMaxSessions(),
+ currentSessions: terminalService.getSessionCount(),
+ },
+ });
+ } catch (error) {
+ logError(error, 'Get terminal settings failed');
+ res.status(500).json({
+ success: false,
+ error: 'Failed to get terminal settings',
+ details: getErrorMessage(error),
+ });
+ }
+ };
+}
+
+export function createSettingsUpdateHandler() {
+ return (req: Request, res: Response): void => {
+ try {
+ const terminalService = getTerminalService();
+ const { maxSessions } = req.body;
+
+ // Validate maxSessions if provided
+ if (maxSessions !== undefined) {
+ if (typeof maxSessions !== 'number') {
+ res.status(400).json({
+ success: false,
+ error: 'maxSessions must be a number',
+ });
+ return;
+ }
+ if (!Number.isInteger(maxSessions)) {
+ res.status(400).json({
+ success: false,
+ error: 'maxSessions must be an integer',
+ });
+ return;
+ }
+ if (maxSessions < MIN_MAX_SESSIONS || maxSessions > MAX_MAX_SESSIONS) {
+ res.status(400).json({
+ success: false,
+ error: `maxSessions must be between ${MIN_MAX_SESSIONS} and ${MAX_MAX_SESSIONS}`,
+ });
+ return;
+ }
+ terminalService.setMaxSessions(maxSessions);
+ }
+
+ res.json({
+ success: true,
+ data: {
+ maxSessions: terminalService.getMaxSessions(),
+ currentSessions: terminalService.getSessionCount(),
+ },
+ });
+ } catch (error) {
+ logError(error, 'Update terminal settings failed');
+ res.status(500).json({
+ success: false,
+ error: 'Failed to update terminal settings',
+ details: getErrorMessage(error),
+ });
+ }
+ };
+}
diff --git a/apps/server/src/routes/terminal/routes/status.ts b/apps/server/src/routes/terminal/routes/status.ts
index 014c482ae..670b405c3 100644
--- a/apps/server/src/routes/terminal/routes/status.ts
+++ b/apps/server/src/routes/terminal/routes/status.ts
@@ -2,12 +2,9 @@
* GET /status endpoint - Get terminal status
*/
-import type { Request, Response } from "express";
-import { getTerminalService } from "../../../services/terminal-service.js";
-import {
- getTerminalEnabledConfigValue,
- isTerminalPasswordRequired,
-} from "../common.js";
+import type { Request, Response } from 'express';
+import { getTerminalService } from '../../../services/terminal-service.js';
+import { getTerminalEnabledConfigValue, isTerminalPasswordRequired } from '../common.js';
export function createStatusHandler() {
return (_req: Request, res: Response): void => {
diff --git a/apps/server/src/routes/workspace/common.ts b/apps/server/src/routes/workspace/common.ts
index 10105baf8..bec656dda 100644
--- a/apps/server/src/routes/workspace/common.ts
+++ b/apps/server/src/routes/workspace/common.ts
@@ -2,13 +2,10 @@
* Common utilities for workspace routes
*/
-import { createLogger } from "@automaker/utils";
-import {
- getErrorMessage as getErrorMessageShared,
- createLogError,
-} from "../common.js";
+import { createLogger } from '@automaker/utils';
+import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
-const logger = createLogger("Workspace");
+const logger = createLogger('Workspace');
// Re-export shared utilities
export { getErrorMessageShared as getErrorMessage };
diff --git a/apps/server/src/routes/workspace/index.ts b/apps/server/src/routes/workspace/index.ts
index ec247a894..374249975 100644
--- a/apps/server/src/routes/workspace/index.ts
+++ b/apps/server/src/routes/workspace/index.ts
@@ -3,15 +3,15 @@
* Provides API endpoints for workspace directory management
*/
-import { Router } from "express";
-import { createConfigHandler } from "./routes/config.js";
-import { createDirectoriesHandler } from "./routes/directories.js";
+import { Router } from 'express';
+import { createConfigHandler } from './routes/config.js';
+import { createDirectoriesHandler } from './routes/directories.js';
export function createWorkspaceRoutes(): Router {
const router = Router();
- router.get("/config", createConfigHandler());
- router.get("/directories", createDirectoriesHandler());
+ router.get('/config', createConfigHandler());
+ router.get('/directories', createDirectoriesHandler());
return router;
}
diff --git a/apps/server/src/routes/worktree/index.ts b/apps/server/src/routes/worktree/index.ts
index 6d81c8540..a3780b45d 100644
--- a/apps/server/src/routes/worktree/index.ts
+++ b/apps/server/src/routes/worktree/index.ts
@@ -2,60 +2,64 @@
* Worktree routes - HTTP API for git worktree operations
*/
-import { Router } from "express";
-import { validatePathParams } from "../../middleware/validate-paths.js";
-import { createInfoHandler } from "./routes/info.js";
-import { createStatusHandler } from "./routes/status.js";
-import { createListHandler } from "./routes/list.js";
-import { createDiffsHandler } from "./routes/diffs.js";
-import { createFileDiffHandler } from "./routes/file-diff.js";
-import { createMergeHandler } from "./routes/merge.js";
-import { createCreateHandler } from "./routes/create.js";
-import { createDeleteHandler } from "./routes/delete.js";
-import { createCreatePRHandler } from "./routes/create-pr.js";
-import { createPRInfoHandler } from "./routes/pr-info.js";
-import { createCommitHandler } from "./routes/commit.js";
-import { createPushHandler } from "./routes/push.js";
-import { createPullHandler } from "./routes/pull.js";
-import { createCheckoutBranchHandler } from "./routes/checkout-branch.js";
-import { createListBranchesHandler } from "./routes/list-branches.js";
-import { createSwitchBranchHandler } from "./routes/switch-branch.js";
+import { Router } from 'express';
+import { validatePathParams } from '../../middleware/validate-paths.js';
+import { createInfoHandler } from './routes/info.js';
+import { createStatusHandler } from './routes/status.js';
+import { createListHandler } from './routes/list.js';
+import { createDiffsHandler } from './routes/diffs.js';
+import { createFileDiffHandler } from './routes/file-diff.js';
+import { createMergeHandler } from './routes/merge.js';
+import { createCreateHandler } from './routes/create.js';
+import { createDeleteHandler } from './routes/delete.js';
+import { createCreatePRHandler } from './routes/create-pr.js';
+import { createPRInfoHandler } from './routes/pr-info.js';
+import { createCommitHandler } from './routes/commit.js';
+import { createPushHandler } from './routes/push.js';
+import { createPullHandler } from './routes/pull.js';
+import { createCheckoutBranchHandler } from './routes/checkout-branch.js';
+import { createListBranchesHandler } from './routes/list-branches.js';
+import { createSwitchBranchHandler } from './routes/switch-branch.js';
import {
createOpenInEditorHandler,
createGetDefaultEditorHandler,
-} from "./routes/open-in-editor.js";
-import { createInitGitHandler } from "./routes/init-git.js";
-import { createMigrateHandler } from "./routes/migrate.js";
-import { createStartDevHandler } from "./routes/start-dev.js";
-import { createStopDevHandler } from "./routes/stop-dev.js";
-import { createListDevServersHandler } from "./routes/list-dev-servers.js";
+} from './routes/open-in-editor.js';
+import { createInitGitHandler } from './routes/init-git.js';
+import { createMigrateHandler } from './routes/migrate.js';
+import { createStartDevHandler } from './routes/start-dev.js';
+import { createStopDevHandler } from './routes/stop-dev.js';
+import { createListDevServersHandler } from './routes/list-dev-servers.js';
export function createWorktreeRoutes(): Router {
const router = Router();
- router.post("/info", validatePathParams("projectPath"), createInfoHandler());
- router.post("/status", validatePathParams("projectPath"), createStatusHandler());
- router.post("/list", createListHandler());
- router.post("/diffs", validatePathParams("projectPath"), createDiffsHandler());
- router.post("/file-diff", validatePathParams("projectPath", "filePath"), createFileDiffHandler());
- router.post("/merge", validatePathParams("projectPath"), createMergeHandler());
- router.post("/create", validatePathParams("projectPath"), createCreateHandler());
- router.post("/delete", validatePathParams("projectPath", "worktreePath"), createDeleteHandler());
- router.post("/create-pr", createCreatePRHandler());
- router.post("/pr-info", createPRInfoHandler());
- router.post("/commit", validatePathParams("worktreePath"), createCommitHandler());
- router.post("/push", validatePathParams("worktreePath"), createPushHandler());
- router.post("/pull", validatePathParams("worktreePath"), createPullHandler());
- router.post("/checkout-branch", createCheckoutBranchHandler());
- router.post("/list-branches", validatePathParams("worktreePath"), createListBranchesHandler());
- router.post("/switch-branch", createSwitchBranchHandler());
- router.post("/open-in-editor", validatePathParams("worktreePath"), createOpenInEditorHandler());
- router.get("/default-editor", createGetDefaultEditorHandler());
- router.post("/init-git", validatePathParams("projectPath"), createInitGitHandler());
- router.post("/migrate", createMigrateHandler());
- router.post("/start-dev", validatePathParams("projectPath", "worktreePath"), createStartDevHandler());
- router.post("/stop-dev", createStopDevHandler());
- router.post("/list-dev-servers", createListDevServersHandler());
+ router.post('/info', validatePathParams('projectPath'), createInfoHandler());
+ router.post('/status', validatePathParams('projectPath'), createStatusHandler());
+ router.post('/list', createListHandler());
+ router.post('/diffs', validatePathParams('projectPath'), createDiffsHandler());
+ router.post('/file-diff', validatePathParams('projectPath', 'filePath'), createFileDiffHandler());
+ router.post('/merge', validatePathParams('projectPath'), createMergeHandler());
+ router.post('/create', validatePathParams('projectPath'), createCreateHandler());
+ router.post('/delete', validatePathParams('projectPath', 'worktreePath'), createDeleteHandler());
+ router.post('/create-pr', createCreatePRHandler());
+ router.post('/pr-info', createPRInfoHandler());
+ router.post('/commit', validatePathParams('worktreePath'), createCommitHandler());
+ router.post('/push', validatePathParams('worktreePath'), createPushHandler());
+ router.post('/pull', validatePathParams('worktreePath'), createPullHandler());
+ router.post('/checkout-branch', createCheckoutBranchHandler());
+ router.post('/list-branches', validatePathParams('worktreePath'), createListBranchesHandler());
+ router.post('/switch-branch', createSwitchBranchHandler());
+ router.post('/open-in-editor', validatePathParams('worktreePath'), createOpenInEditorHandler());
+ router.get('/default-editor', createGetDefaultEditorHandler());
+ router.post('/init-git', validatePathParams('projectPath'), createInitGitHandler());
+ router.post('/migrate', createMigrateHandler());
+ router.post(
+ '/start-dev',
+ validatePathParams('projectPath', 'worktreePath'),
+ createStartDevHandler()
+ );
+ router.post('/stop-dev', createStopDevHandler());
+ router.post('/list-dev-servers', createListDevServersHandler());
return router;
}
diff --git a/apps/server/src/routes/worktree/routes/checkout-branch.ts b/apps/server/src/routes/worktree/routes/checkout-branch.ts
index 50254a69b..ef8ddc475 100644
--- a/apps/server/src/routes/worktree/routes/checkout-branch.ts
+++ b/apps/server/src/routes/worktree/routes/checkout-branch.ts
@@ -2,10 +2,10 @@
* POST /checkout-branch endpoint - Create and checkout a new branch
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -20,7 +20,7 @@ export function createCheckoutBranchHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath required",
+ error: 'worktreePath required',
});
return;
}
@@ -28,7 +28,7 @@ export function createCheckoutBranchHandler() {
if (!branchName) {
res.status(400).json({
success: false,
- error: "branchName required",
+ error: 'branchName required',
});
return;
}
@@ -38,16 +38,15 @@ export function createCheckoutBranchHandler() {
if (invalidChars.test(branchName)) {
res.status(400).json({
success: false,
- error: "Branch name contains invalid characters",
+ error: 'Branch name contains invalid characters',
});
return;
}
// Get current branch for reference
- const { stdout: currentBranchOutput } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: worktreePath }
- );
+ const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: worktreePath,
+ });
const currentBranch = currentBranchOutput.trim();
// Check if branch already exists
@@ -79,7 +78,7 @@ export function createCheckoutBranchHandler() {
},
});
} catch (error) {
- logError(error, "Checkout branch failed");
+ logError(error, 'Checkout branch failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/commit.ts b/apps/server/src/routes/worktree/routes/commit.ts
index 273c79645..6cdc39c1d 100644
--- a/apps/server/src/routes/worktree/routes/commit.ts
+++ b/apps/server/src/routes/worktree/routes/commit.ts
@@ -2,10 +2,10 @@
* POST /commit endpoint - Commit changes in a worktree
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -20,13 +20,13 @@ export function createCommitHandler() {
if (!worktreePath || !message) {
res.status(400).json({
success: false,
- error: "worktreePath and message required",
+ error: 'worktreePath and message required',
});
return;
}
// Check for uncommitted changes
- const { stdout: status } = await execAsync("git status --porcelain", {
+ const { stdout: status } = await execAsync('git status --porcelain', {
cwd: worktreePath,
});
@@ -35,14 +35,14 @@ export function createCommitHandler() {
success: true,
result: {
committed: false,
- message: "No changes to commit",
+ message: 'No changes to commit',
},
});
return;
}
// Stage all changes
- await execAsync("git add -A", { cwd: worktreePath });
+ await execAsync('git add -A', { cwd: worktreePath });
// Create commit
await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, {
@@ -50,16 +50,15 @@ export function createCommitHandler() {
});
// Get commit hash
- const { stdout: hashOutput } = await execAsync("git rev-parse HEAD", {
+ const { stdout: hashOutput } = await execAsync('git rev-parse HEAD', {
cwd: worktreePath,
});
const commitHash = hashOutput.trim().substring(0, 8);
// Get branch name
- const { stdout: branchOutput } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: worktreePath }
- );
+ const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: worktreePath,
+ });
const branchName = branchOutput.trim();
res.json({
@@ -72,7 +71,7 @@ export function createCommitHandler() {
},
});
} catch (error) {
- logError(error, "Commit worktree failed");
+ logError(error, 'Commit worktree failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/create-pr.ts b/apps/server/src/routes/worktree/routes/create-pr.ts
index 488fa3b5d..1e71bfab6 100644
--- a/apps/server/src/routes/worktree/routes/create-pr.ts
+++ b/apps/server/src/routes/worktree/routes/create-pr.ts
@@ -2,7 +2,7 @@
* POST /create-pr endpoint - Commit changes and create a pull request from a worktree
*/
-import type { Request, Response } from "express";
+import type { Request, Response } from 'express';
import {
getErrorMessage,
logError,
@@ -10,26 +10,27 @@ import {
execEnv,
isValidBranchName,
isGhCliAvailable,
-} from "../common.js";
-import { updateWorktreePRInfo } from "../../../lib/worktree-metadata.js";
+} from '../common.js';
+import { updateWorktreePRInfo } from '../../../lib/worktree-metadata.js';
export function createCreatePRHandler() {
return async (req: Request, res: Response): Promise => {
try {
- const { worktreePath, projectPath, commitMessage, prTitle, prBody, baseBranch, draft } = req.body as {
- worktreePath: string;
- projectPath?: string;
- commitMessage?: string;
- prTitle?: string;
- prBody?: string;
- baseBranch?: string;
- draft?: boolean;
- };
+ const { worktreePath, projectPath, commitMessage, prTitle, prBody, baseBranch, draft } =
+ req.body as {
+ worktreePath: string;
+ projectPath?: string;
+ commitMessage?: string;
+ prTitle?: string;
+ prBody?: string;
+ baseBranch?: string;
+ draft?: boolean;
+ };
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath required",
+ error: 'worktreePath required',
});
return;
}
@@ -39,23 +40,23 @@ export function createCreatePRHandler() {
const effectiveProjectPath = projectPath || worktreePath;
// Get current branch name
- const { stdout: branchOutput } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: worktreePath, env: execEnv }
- );
+ const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: worktreePath,
+ env: execEnv,
+ });
const branchName = branchOutput.trim();
// Validate branch name for security
if (!isValidBranchName(branchName)) {
res.status(400).json({
success: false,
- error: "Invalid branch name contains unsafe characters",
+ error: 'Invalid branch name contains unsafe characters',
});
return;
}
// Check for uncommitted changes
- const { stdout: status } = await execAsync("git status --porcelain", {
+ const { stdout: status } = await execAsync('git status --porcelain', {
cwd: worktreePath,
env: execEnv,
});
@@ -67,7 +68,7 @@ export function createCreatePRHandler() {
const message = commitMessage || `Changes from ${branchName}`;
// Stage all changes
- await execAsync("git add -A", { cwd: worktreePath, env: execEnv });
+ await execAsync('git add -A', { cwd: worktreePath, env: execEnv });
// Create commit
await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, {
@@ -76,7 +77,7 @@ export function createCreatePRHandler() {
});
// Get commit hash
- const { stdout: hashOutput } = await execAsync("git rev-parse HEAD", {
+ const { stdout: hashOutput } = await execAsync('git rev-parse HEAD', {
cwd: worktreePath,
env: execEnv,
});
@@ -100,8 +101,8 @@ export function createCreatePRHandler() {
} catch (error2: unknown) {
// Capture push error for reporting
const err = error2 as { stderr?: string; message?: string };
- pushError = err.stderr || err.message || "Push failed";
- console.error("[CreatePR] Push failed:", pushError);
+ pushError = err.stderr || err.message || 'Push failed';
+ console.error('[CreatePR] Push failed:', pushError);
}
}
@@ -115,10 +116,10 @@ export function createCreatePRHandler() {
}
// Create PR using gh CLI or provide browser fallback
- const base = baseBranch || "main";
+ const base = baseBranch || 'main';
const title = prTitle || branchName;
const body = prBody || `Changes from branch ${branchName}`;
- const draftFlag = draft ? "--draft" : "";
+ const draftFlag = draft ? '--draft' : '';
let prUrl: string | null = null;
let prError: string | null = null;
@@ -131,7 +132,7 @@ export function createCreatePRHandler() {
let upstreamRepo: string | null = null;
let originOwner: string | null = null;
try {
- const { stdout: remotes } = await execAsync("git remote -v", {
+ const { stdout: remotes } = await execAsync('git remote -v', {
cwd: worktreePath,
env: execEnv,
});
@@ -150,15 +151,17 @@ export function createCreatePRHandler() {
}
if (!match) {
// Try HTTPS format: https://github.com/owner/repo.git
- match = line.match(/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/);
+ match = line.match(
+ /^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
+ );
}
if (match) {
const [, remoteName, owner, repo] = match;
- if (remoteName === "upstream") {
+ if (remoteName === 'upstream') {
upstreamRepo = `${owner}/${repo}`;
repoUrl = `https://github.com/${owner}/${repo}`;
- } else if (remoteName === "origin") {
+ } else if (remoteName === 'origin') {
originOwner = owner;
if (!repoUrl) {
repoUrl = `https://github.com/${owner}/${repo}`;
@@ -173,7 +176,7 @@ export function createCreatePRHandler() {
// Fallback: Try to get repo URL from git config if remote parsing failed
if (!repoUrl) {
try {
- const { stdout: originUrl } = await execAsync("git config --get remote.origin.url", {
+ const { stdout: originUrl } = await execAsync('git config --get remote.origin.url', {
cwd: worktreePath,
env: execEnv,
});
@@ -217,9 +220,11 @@ export function createCreatePRHandler() {
// This is more reliable than gh pr view as it explicitly searches by branch name
// For forks, we need to use owner:branch format for the head parameter
const headRef = upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
- const repoArg = upstreamRepo ? ` --repo "${upstreamRepo}"` : "";
+ const repoArg = upstreamRepo ? ` --repo "${upstreamRepo}"` : '';
- console.log(`[CreatePR] Checking for existing PR for branch: ${branchName} (headRef: ${headRef})`);
+ console.log(
+ `[CreatePR] Checking for existing PR for branch: ${branchName} (headRef: ${headRef})`
+ );
try {
const listCmd = `gh pr list${repoArg} --head "${headRef}" --json number,title,url,state --limit 1`;
console.log(`[CreatePR] Running: ${listCmd}`);
@@ -234,7 +239,9 @@ export function createCreatePRHandler() {
if (Array.isArray(existingPrs) && existingPrs.length > 0) {
const existingPr = existingPrs[0];
// PR already exists - use it and store metadata
- console.log(`[CreatePR] PR already exists for branch ${branchName}: PR #${existingPr.number}`);
+ console.log(
+ `[CreatePR] PR already exists for branch ${branchName}: PR #${existingPr.number}`
+ );
prUrl = existingPr.url;
prNumber = existingPr.number;
prAlreadyExisted = true;
@@ -244,10 +251,12 @@ export function createCreatePRHandler() {
number: existingPr.number,
url: existingPr.url,
title: existingPr.title || title,
- state: existingPr.state || "open",
+ state: existingPr.state || 'open',
createdAt: new Date().toISOString(),
});
- console.log(`[CreatePR] Stored existing PR info for branch ${branchName}: PR #${existingPr.number}`);
+ console.log(
+ `[CreatePR] Stored existing PR info for branch ${branchName}: PR #${existingPr.number}`
+ );
} else {
console.log(`[CreatePR] No existing PR found for branch ${branchName}`);
}
@@ -293,23 +302,25 @@ export function createCreatePRHandler() {
number: prNumber,
url: prUrl,
title,
- state: draft ? "draft" : "open",
+ state: draft ? 'draft' : 'open',
createdAt: new Date().toISOString(),
});
- console.log(`[CreatePR] Stored PR info for branch ${branchName}: PR #${prNumber}`);
+ console.log(
+ `[CreatePR] Stored PR info for branch ${branchName}: PR #${prNumber}`
+ );
} catch (metadataError) {
- console.error("[CreatePR] Failed to store PR metadata:", metadataError);
+ console.error('[CreatePR] Failed to store PR metadata:', metadataError);
}
}
}
} catch (ghError: unknown) {
// gh CLI failed - check if it's "already exists" error and try to fetch the PR
const err = ghError as { stderr?: string; message?: string };
- const errorMessage = err.stderr || err.message || "PR creation failed";
+ const errorMessage = err.stderr || err.message || 'PR creation failed';
console.log(`[CreatePR] gh pr create failed: ${errorMessage}`);
// If error indicates PR already exists, try to fetch it
- if (errorMessage.toLowerCase().includes("already exists")) {
+ if (errorMessage.toLowerCase().includes('already exists')) {
console.log(`[CreatePR] PR already exists error - trying to fetch existing PR`);
try {
const { stdout: viewOutput } = await execAsync(
@@ -326,13 +337,13 @@ export function createCreatePRHandler() {
number: existingPr.number,
url: existingPr.url,
title: existingPr.title || title,
- state: existingPr.state || "open",
+ state: existingPr.state || 'open',
createdAt: new Date().toISOString(),
});
console.log(`[CreatePR] Fetched and stored existing PR: #${existingPr.number}`);
}
} catch (viewError) {
- console.error("[CreatePR] Failed to fetch existing PR:", viewError);
+ console.error('[CreatePR] Failed to fetch existing PR:', viewError);
prError = errorMessage;
}
} else {
@@ -341,7 +352,7 @@ export function createCreatePRHandler() {
}
}
} else {
- prError = "gh_cli_not_available";
+ prError = 'gh_cli_not_available';
}
// Return result with browser fallback URL
@@ -362,7 +373,7 @@ export function createCreatePRHandler() {
},
});
} catch (error) {
- logError(error, "Create PR failed");
+ logError(error, 'Create PR failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/delete.ts b/apps/server/src/routes/worktree/routes/delete.ts
index 419b5418d..93857f787 100644
--- a/apps/server/src/routes/worktree/routes/delete.ts
+++ b/apps/server/src/routes/worktree/routes/delete.ts
@@ -2,11 +2,11 @@
* POST /delete endpoint - Delete a git worktree
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { isGitRepo } from "@automaker/git-utils";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { isGitRepo } from '@automaker/git-utils';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -22,7 +22,7 @@ export function createDeleteHandler() {
if (!projectPath || !worktreePath) {
res.status(400).json({
success: false,
- error: "projectPath and worktreePath required",
+ error: 'projectPath and worktreePath required',
});
return;
}
@@ -30,7 +30,7 @@ export function createDeleteHandler() {
if (!(await isGitRepo(projectPath))) {
res.status(400).json({
success: false,
- error: "Not a git repository",
+ error: 'Not a git repository',
});
return;
}
@@ -38,7 +38,7 @@ export function createDeleteHandler() {
// Get branch name before removing worktree
let branchName: string | null = null;
try {
- const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", {
+ const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
cwd: worktreePath,
});
branchName = stdout.trim();
@@ -53,11 +53,11 @@ export function createDeleteHandler() {
});
} catch (error) {
// Try with prune if remove fails
- await execAsync("git worktree prune", { cwd: projectPath });
+ await execAsync('git worktree prune', { cwd: projectPath });
}
// Optionally delete the branch
- if (deleteBranch && branchName && branchName !== "main" && branchName !== "master") {
+ if (deleteBranch && branchName && branchName !== 'main' && branchName !== 'master') {
try {
await execAsync(`git branch -D ${branchName}`, { cwd: projectPath });
} catch {
@@ -73,7 +73,7 @@ export function createDeleteHandler() {
},
});
} catch (error) {
- logError(error, "Delete worktree failed");
+ logError(error, 'Delete worktree failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/list-branches.ts b/apps/server/src/routes/worktree/routes/list-branches.ts
index 0b07eb175..5fab4aff3 100644
--- a/apps/server/src/routes/worktree/routes/list-branches.ts
+++ b/apps/server/src/routes/worktree/routes/list-branches.ts
@@ -2,10 +2,10 @@
* POST /list-branches endpoint - List all local branches
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logWorktreeError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logWorktreeError } from '../common.js';
const execAsync = promisify(exec);
@@ -25,33 +25,31 @@ export function createListBranchesHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath required",
+ error: 'worktreePath required',
});
return;
}
// Get current branch
- const { stdout: currentBranchOutput } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: worktreePath }
- );
+ const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: worktreePath,
+ });
const currentBranch = currentBranchOutput.trim();
// List all local branches
// Use double quotes around the format string for cross-platform compatibility
// Single quotes are preserved literally on Windows; double quotes work on both
- const { stdout: branchesOutput } = await execAsync(
- 'git branch --format="%(refname:short)"',
- { cwd: worktreePath }
- );
+ const { stdout: branchesOutput } = await execAsync('git branch --format="%(refname:short)"', {
+ cwd: worktreePath,
+ });
const branches: BranchInfo[] = branchesOutput
.trim()
- .split("\n")
+ .split('\n')
.filter((b) => b.trim())
.map((name) => {
// Remove any surrounding quotes (Windows git may preserve them)
- const cleanName = name.trim().replace(/^['"]|['"]$/g, "");
+ const cleanName = name.trim().replace(/^['"]|['"]$/g, '');
return {
name: cleanName,
isCurrent: cleanName === currentBranch,
@@ -93,7 +91,7 @@ export function createListBranchesHandler() {
});
} catch (error) {
const worktreePath = req.body?.worktreePath;
- logWorktreeError(error, "List branches failed", worktreePath);
+ logWorktreeError(error, 'List branches failed', worktreePath);
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/list-dev-servers.ts b/apps/server/src/routes/worktree/routes/list-dev-servers.ts
index ff5c527a4..c1093ea5c 100644
--- a/apps/server/src/routes/worktree/routes/list-dev-servers.ts
+++ b/apps/server/src/routes/worktree/routes/list-dev-servers.ts
@@ -5,9 +5,9 @@
* including their ports and URLs.
*/
-import type { Request, Response } from "express";
-import { getDevServerService } from "../../../services/dev-server-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getDevServerService } from '../../../services/dev-server-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createListDevServersHandler() {
return async (_req: Request, res: Response): Promise => {
@@ -22,7 +22,7 @@ export function createListDevServersHandler() {
},
});
} catch (error) {
- logError(error, "List dev servers failed");
+ logError(error, 'List dev servers failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/merge.ts b/apps/server/src/routes/worktree/routes/merge.ts
index f9499d856..40ac8dd48 100644
--- a/apps/server/src/routes/worktree/routes/merge.ts
+++ b/apps/server/src/routes/worktree/routes/merge.ts
@@ -2,11 +2,11 @@
* POST /merge endpoint - Merge feature (merge worktree branch into main)
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import path from "path";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import path from 'path';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -20,42 +20,34 @@ export function createMergeHandler() {
};
if (!projectPath || !featureId) {
- res
- .status(400)
- .json({
- success: false,
- error: "projectPath and featureId required",
- });
+ res.status(400).json({
+ success: false,
+ error: 'projectPath and featureId required',
+ });
return;
}
const branchName = `feature/${featureId}`;
// Git worktrees are stored in project directory
- const worktreePath = path.join(projectPath, ".worktrees", featureId);
+ const worktreePath = path.join(projectPath, '.worktrees', featureId);
// Get current branch
- const { stdout: currentBranch } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: projectPath }
- );
+ const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: projectPath,
+ });
// Merge the feature branch
const mergeCmd = options?.squash
? `git merge --squash ${branchName}`
- : `git merge ${branchName} -m "${
- options?.message || `Merge ${branchName}`
- }"`;
+ : `git merge ${branchName} -m "${options?.message || `Merge ${branchName}`}"`;
await execAsync(mergeCmd, { cwd: projectPath });
// If squash merge, need to commit
if (options?.squash) {
- await execAsync(
- `git commit -m "${
- options?.message || `Merge ${branchName} (squash)`
- }"`,
- { cwd: projectPath }
- );
+ await execAsync(`git commit -m "${options?.message || `Merge ${branchName} (squash)`}"`, {
+ cwd: projectPath,
+ });
}
// Clean up worktree and branch
@@ -70,7 +62,7 @@ export function createMergeHandler() {
res.json({ success: true, mergedBranch: branchName });
} catch (error) {
- logError(error, "Merge worktree failed");
+ logError(error, 'Merge worktree failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/migrate.ts b/apps/server/src/routes/worktree/routes/migrate.ts
index a5287a126..7165b1762 100644
--- a/apps/server/src/routes/worktree/routes/migrate.ts
+++ b/apps/server/src/routes/worktree/routes/migrate.ts
@@ -5,8 +5,8 @@
* any migration since .automaker is now stored in the project directory.
*/
-import type { Request, Response } from "express";
-import { getAutomakerDir } from "@automaker/platform";
+import type { Request, Response } from 'express';
+import { getAutomakerDir } from '@automaker/platform';
export function createMigrateHandler() {
return async (req: Request, res: Response): Promise => {
@@ -15,7 +15,7 @@ export function createMigrateHandler() {
if (!projectPath) {
res.status(400).json({
success: false,
- error: "projectPath is required",
+ error: 'projectPath is required',
});
return;
}
@@ -25,7 +25,7 @@ export function createMigrateHandler() {
res.json({
success: true,
migrated: false,
- message: "No migration needed - .automaker is stored in project directory",
+ message: 'No migration needed - .automaker is stored in project directory',
path: automakerDir,
});
};
diff --git a/apps/server/src/routes/worktree/routes/open-in-editor.ts b/apps/server/src/routes/worktree/routes/open-in-editor.ts
index 04f9815f1..40e71b004 100644
--- a/apps/server/src/routes/worktree/routes/open-in-editor.ts
+++ b/apps/server/src/routes/worktree/routes/open-in-editor.ts
@@ -3,10 +3,10 @@
* GET /default-editor endpoint - Get the name of the default code editor
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -29,8 +29,8 @@ async function detectDefaultEditor(): Promise {
// Try Cursor first (if user has Cursor, they probably prefer it)
try {
- await execAsync("which cursor || where cursor");
- cachedEditor = { name: "Cursor", command: "cursor" };
+ await execAsync('which cursor || where cursor');
+ cachedEditor = { name: 'Cursor', command: 'cursor' };
return cachedEditor;
} catch {
// Cursor not found
@@ -38,8 +38,8 @@ async function detectDefaultEditor(): Promise {
// Try VS Code
try {
- await execAsync("which code || where code");
- cachedEditor = { name: "VS Code", command: "code" };
+ await execAsync('which code || where code');
+ cachedEditor = { name: 'VS Code', command: 'code' };
return cachedEditor;
} catch {
// VS Code not found
@@ -47,8 +47,8 @@ async function detectDefaultEditor(): Promise {
// Try Zed
try {
- await execAsync("which zed || where zed");
- cachedEditor = { name: "Zed", command: "zed" };
+ await execAsync('which zed || where zed');
+ cachedEditor = { name: 'Zed', command: 'zed' };
return cachedEditor;
} catch {
// Zed not found
@@ -56,8 +56,8 @@ async function detectDefaultEditor(): Promise {
// Try Sublime Text
try {
- await execAsync("which subl || where subl");
- cachedEditor = { name: "Sublime Text", command: "subl" };
+ await execAsync('which subl || where subl');
+ cachedEditor = { name: 'Sublime Text', command: 'subl' };
return cachedEditor;
} catch {
// Sublime not found
@@ -65,12 +65,12 @@ async function detectDefaultEditor(): Promise {
// Fallback to file manager
const platform = process.platform;
- if (platform === "darwin") {
- cachedEditor = { name: "Finder", command: "open" };
- } else if (platform === "win32") {
- cachedEditor = { name: "Explorer", command: "explorer" };
+ if (platform === 'darwin') {
+ cachedEditor = { name: 'Finder', command: 'open' };
+ } else if (platform === 'win32') {
+ cachedEditor = { name: 'Explorer', command: 'explorer' };
} else {
- cachedEditor = { name: "File Manager", command: "xdg-open" };
+ cachedEditor = { name: 'File Manager', command: 'xdg-open' };
}
return cachedEditor;
}
@@ -87,7 +87,7 @@ export function createGetDefaultEditorHandler() {
},
});
} catch (error) {
- logError(error, "Get default editor failed");
+ logError(error, 'Get default editor failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
@@ -103,7 +103,7 @@ export function createOpenInEditorHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath required",
+ error: 'worktreePath required',
});
return;
}
@@ -125,15 +125,15 @@ export function createOpenInEditorHandler() {
let openCommand: string;
let fallbackName: string;
- if (platform === "darwin") {
+ if (platform === 'darwin') {
openCommand = `open "${worktreePath}"`;
- fallbackName = "Finder";
- } else if (platform === "win32") {
+ fallbackName = 'Finder';
+ } else if (platform === 'win32') {
openCommand = `explorer "${worktreePath}"`;
- fallbackName = "Explorer";
+ fallbackName = 'Explorer';
} else {
openCommand = `xdg-open "${worktreePath}"`;
- fallbackName = "File Manager";
+ fallbackName = 'File Manager';
}
await execAsync(openCommand);
@@ -146,7 +146,7 @@ export function createOpenInEditorHandler() {
});
}
} catch (error) {
- logError(error, "Open in editor failed");
+ logError(error, 'Open in editor failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/pr-info.ts b/apps/server/src/routes/worktree/routes/pr-info.ts
index 779e81cb6..cb64ccd91 100644
--- a/apps/server/src/routes/worktree/routes/pr-info.ts
+++ b/apps/server/src/routes/worktree/routes/pr-info.ts
@@ -2,7 +2,7 @@
* POST /pr-info endpoint - Get PR info and comments for a branch
*/
-import type { Request, Response } from "express";
+import type { Request, Response } from 'express';
import {
getErrorMessage,
logError,
@@ -10,7 +10,7 @@ import {
execEnv,
isValidBranchName,
isGhCliAvailable,
-} from "../common.js";
+} from '../common.js';
export interface PRComment {
id: number;
@@ -44,7 +44,7 @@ export function createPRInfoHandler() {
if (!worktreePath || !branchName) {
res.status(400).json({
success: false,
- error: "worktreePath and branchName required",
+ error: 'worktreePath and branchName required',
});
return;
}
@@ -53,7 +53,7 @@ export function createPRInfoHandler() {
if (!isValidBranchName(branchName)) {
res.status(400).json({
success: false,
- error: "Invalid branch name contains unsafe characters",
+ error: 'Invalid branch name contains unsafe characters',
});
return;
}
@@ -67,7 +67,7 @@ export function createPRInfoHandler() {
result: {
hasPR: false,
ghCliAvailable: false,
- error: "gh CLI not available",
+ error: 'gh CLI not available',
},
});
return;
@@ -79,7 +79,7 @@ export function createPRInfoHandler() {
let originRepo: string | null = null;
try {
- const { stdout: remotes } = await execAsync("git remote -v", {
+ const { stdout: remotes } = await execAsync('git remote -v', {
cwd: worktreePath,
env: execEnv,
});
@@ -87,21 +87,15 @@ export function createPRInfoHandler() {
const lines = remotes.split(/\r?\n/);
for (const line of lines) {
let match =
- line.match(
- /^(\w+)\s+.*[:/]([^/]+)\/([^/\s]+?)(?:\.git)?\s+\(fetch\)/
- ) ||
- line.match(
- /^(\w+)\s+git@[^:]+:([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
- ) ||
- line.match(
- /^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/
- );
+ line.match(/^(\w+)\s+.*[:/]([^/]+)\/([^/\s]+?)(?:\.git)?\s+\(fetch\)/) ||
+ line.match(/^(\w+)\s+git@[^:]+:([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/) ||
+ line.match(/^(\w+)\s+https?:\/\/[^/]+\/([^/]+)\/([^\s]+?)(?:\.git)?\s+\(fetch\)/);
if (match) {
const [, remoteName, owner, repo] = match;
- if (remoteName === "upstream") {
+ if (remoteName === 'upstream') {
upstreamRepo = `${owner}/${repo}`;
- } else if (remoteName === "origin") {
+ } else if (remoteName === 'origin') {
originOwner = owner;
originRepo = repo;
}
@@ -113,16 +107,11 @@ export function createPRInfoHandler() {
if (!originOwner || !originRepo) {
try {
- const { stdout: originUrl } = await execAsync(
- "git config --get remote.origin.url",
- {
- cwd: worktreePath,
- env: execEnv,
- }
- );
- const match = originUrl
- .trim()
- .match(/[:/]([^/]+)\/([^/\s]+?)(?:\.git)?$/);
+ const { stdout: originUrl } = await execAsync('git config --get remote.origin.url', {
+ cwd: worktreePath,
+ env: execEnv,
+ });
+ const match = originUrl.trim().match(/[:/]([^/]+)\/([^/\s]+?)(?:\.git)?$/);
if (match) {
if (!originOwner) {
originOwner = match[1];
@@ -137,21 +126,18 @@ export function createPRInfoHandler() {
}
const targetRepo =
- upstreamRepo || (originOwner && originRepo
- ? `${originOwner}/${originRepo}`
- : null);
- const repoFlag = targetRepo ? ` --repo "${targetRepo}"` : "";
- const headRef =
- upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
+ upstreamRepo || (originOwner && originRepo ? `${originOwner}/${originRepo}` : null);
+ const repoFlag = targetRepo ? ` --repo "${targetRepo}"` : '';
+ const headRef = upstreamRepo && originOwner ? `${originOwner}:${branchName}` : branchName;
// Get PR info for the branch using gh CLI
try {
// First, find the PR associated with this branch
const listCmd = `gh pr list${repoFlag} --head "${headRef}" --json number,title,url,state,author,body --limit 1`;
- const { stdout: prListOutput } = await execAsync(
- listCmd,
- { cwd: worktreePath, env: execEnv }
- );
+ const { stdout: prListOutput } = await execAsync(listCmd, {
+ cwd: worktreePath,
+ env: execEnv,
+ });
const prList = JSON.parse(prListOutput);
@@ -173,25 +159,22 @@ export function createPRInfoHandler() {
let comments: PRComment[] = [];
try {
const viewCmd = `gh pr view ${prNumber}${repoFlag} --json comments`;
- const { stdout: commentsOutput } = await execAsync(
- viewCmd,
- { cwd: worktreePath, env: execEnv }
- );
+ const { stdout: commentsOutput } = await execAsync(viewCmd, {
+ cwd: worktreePath,
+ env: execEnv,
+ });
const commentsData = JSON.parse(commentsOutput);
- comments = (commentsData.comments || []).map((c: {
- id: number;
- author: { login: string };
- body: string;
- createdAt: string;
- }) => ({
- id: c.id,
- author: c.author?.login || "unknown",
- body: c.body,
- createdAt: c.createdAt,
- isReviewComment: false,
- }));
+ comments = (commentsData.comments || []).map(
+ (c: { id: number; author: { login: string }; body: string; createdAt: string }) => ({
+ id: c.id,
+ author: c.author?.login || 'unknown',
+ body: c.body,
+ createdAt: c.createdAt,
+ isReviewComment: false,
+ })
+ );
} catch (error) {
- console.warn("[PRInfo] Failed to fetch PR comments:", error);
+ console.warn('[PRInfo] Failed to fetch PR comments:', error);
}
// Get review comments (inline code comments)
@@ -201,33 +184,35 @@ export function createPRInfoHandler() {
try {
const reviewsEndpoint = `repos/${targetRepo}/pulls/${prNumber}/comments`;
const reviewsCmd = `gh api ${reviewsEndpoint}`;
- const { stdout: reviewsOutput } = await execAsync(
- reviewsCmd,
- { cwd: worktreePath, env: execEnv }
- );
+ const { stdout: reviewsOutput } = await execAsync(reviewsCmd, {
+ cwd: worktreePath,
+ env: execEnv,
+ });
const reviewsData = JSON.parse(reviewsOutput);
- reviewComments = reviewsData.map((c: {
- id: number;
- user: { login: string };
- body: string;
- path: string;
- line?: number;
- original_line?: number;
- created_at: string;
- }) => ({
- id: c.id,
- author: c.user?.login || "unknown",
- body: c.body,
- path: c.path,
- line: c.line || c.original_line,
- createdAt: c.created_at,
- isReviewComment: true,
- }));
+ reviewComments = reviewsData.map(
+ (c: {
+ id: number;
+ user: { login: string };
+ body: string;
+ path: string;
+ line?: number;
+ original_line?: number;
+ created_at: string;
+ }) => ({
+ id: c.id,
+ author: c.user?.login || 'unknown',
+ body: c.body,
+ path: c.path,
+ line: c.line || c.original_line,
+ createdAt: c.created_at,
+ isReviewComment: true,
+ })
+ );
} catch (error) {
- console.warn("[PRInfo] Failed to fetch review comments:", error);
+ console.warn('[PRInfo] Failed to fetch review comments:', error);
}
} else {
- console.warn("[PRInfo] Cannot fetch review comments: repository info not available");
+ console.warn('[PRInfo] Cannot fetch review comments: repository info not available');
}
const prInfo: PRInfo = {
@@ -235,8 +220,8 @@ export function createPRInfoHandler() {
title: pr.title,
url: pr.url,
state: pr.state,
- author: pr.author?.login || "unknown",
- body: pr.body || "",
+ author: pr.author?.login || 'unknown',
+ body: pr.body || '',
comments,
reviewComments,
};
@@ -251,7 +236,7 @@ export function createPRInfoHandler() {
});
} catch (error) {
// gh CLI failed - might not be authenticated or no remote
- logError(error, "Failed to get PR info");
+ logError(error, 'Failed to get PR info');
res.json({
success: true,
result: {
@@ -262,7 +247,7 @@ export function createPRInfoHandler() {
});
}
} catch (error) {
- logError(error, "PR info handler failed");
+ logError(error, 'PR info handler failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/pull.ts b/apps/server/src/routes/worktree/routes/pull.ts
index 119192d07..4384e2070 100644
--- a/apps/server/src/routes/worktree/routes/pull.ts
+++ b/apps/server/src/routes/worktree/routes/pull.ts
@@ -2,10 +2,10 @@
* POST /pull endpoint - Pull latest changes for a worktree/branch
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -19,23 +19,22 @@ export function createPullHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath required",
+ error: 'worktreePath required',
});
return;
}
// Get current branch name
- const { stdout: branchOutput } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: worktreePath }
- );
+ const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: worktreePath,
+ });
const branchName = branchOutput.trim();
// Fetch latest from remote
- await execAsync("git fetch origin", { cwd: worktreePath });
+ await execAsync('git fetch origin', { cwd: worktreePath });
// Check if there are local changes that would be overwritten
- const { stdout: status } = await execAsync("git status --porcelain", {
+ const { stdout: status } = await execAsync('git status --porcelain', {
cwd: worktreePath,
});
const hasLocalChanges = status.trim().length > 0;
@@ -43,35 +42,34 @@ export function createPullHandler() {
if (hasLocalChanges) {
res.status(400).json({
success: false,
- error: "You have local changes. Please commit them before pulling.",
+ error: 'You have local changes. Please commit them before pulling.',
});
return;
}
// Pull latest changes
try {
- const { stdout: pullOutput } = await execAsync(
- `git pull origin ${branchName}`,
- { cwd: worktreePath }
- );
+ const { stdout: pullOutput } = await execAsync(`git pull origin ${branchName}`, {
+ cwd: worktreePath,
+ });
// Check if we pulled any changes
- const alreadyUpToDate = pullOutput.includes("Already up to date");
+ const alreadyUpToDate = pullOutput.includes('Already up to date');
res.json({
success: true,
result: {
branch: branchName,
pulled: !alreadyUpToDate,
- message: alreadyUpToDate ? "Already up to date" : "Pulled latest changes",
+ message: alreadyUpToDate ? 'Already up to date' : 'Pulled latest changes',
},
});
} catch (pullError: unknown) {
const err = pullError as { stderr?: string; message?: string };
- const errorMsg = err.stderr || err.message || "Pull failed";
+ const errorMsg = err.stderr || err.message || 'Pull failed';
// Check for common errors
- if (errorMsg.includes("no tracking information")) {
+ if (errorMsg.includes('no tracking information')) {
res.status(400).json({
success: false,
error: `Branch '${branchName}' has no upstream branch. Push it first or set upstream with: git branch --set-upstream-to=origin/${branchName}`,
@@ -85,7 +83,7 @@ export function createPullHandler() {
});
}
} catch (error) {
- logError(error, "Pull failed");
+ logError(error, 'Pull failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/push.ts b/apps/server/src/routes/worktree/routes/push.ts
index d9447a2bb..c0337f43a 100644
--- a/apps/server/src/routes/worktree/routes/push.ts
+++ b/apps/server/src/routes/worktree/routes/push.ts
@@ -2,10 +2,10 @@
* POST /push endpoint - Push a worktree branch to remote
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -20,20 +20,19 @@ export function createPushHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath required",
+ error: 'worktreePath required',
});
return;
}
// Get branch name
- const { stdout: branchOutput } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: worktreePath }
- );
+ const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: worktreePath,
+ });
const branchName = branchOutput.trim();
// Push the branch
- const forceFlag = force ? "--force" : "";
+ const forceFlag = force ? '--force' : '';
try {
await execAsync(`git push -u origin ${branchName} ${forceFlag}`, {
cwd: worktreePath,
@@ -54,7 +53,7 @@ export function createPushHandler() {
},
});
} catch (error) {
- logError(error, "Push worktree failed");
+ logError(error, 'Push worktree failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/start-dev.ts b/apps/server/src/routes/worktree/routes/start-dev.ts
index fcd0cec7c..13b93f9b9 100644
--- a/apps/server/src/routes/worktree/routes/start-dev.ts
+++ b/apps/server/src/routes/worktree/routes/start-dev.ts
@@ -6,9 +6,9 @@
* affecting the main dev server.
*/
-import type { Request, Response } from "express";
-import { getDevServerService } from "../../../services/dev-server-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getDevServerService } from '../../../services/dev-server-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createStartDevHandler() {
return async (req: Request, res: Response): Promise => {
@@ -21,7 +21,7 @@ export function createStartDevHandler() {
if (!projectPath) {
res.status(400).json({
success: false,
- error: "projectPath is required",
+ error: 'projectPath is required',
});
return;
}
@@ -29,7 +29,7 @@ export function createStartDevHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath is required",
+ error: 'worktreePath is required',
});
return;
}
@@ -50,11 +50,11 @@ export function createStartDevHandler() {
} else {
res.status(400).json({
success: false,
- error: result.error || "Failed to start dev server",
+ error: result.error || 'Failed to start dev server',
});
}
} catch (error) {
- logError(error, "Start dev server failed");
+ logError(error, 'Start dev server failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/stop-dev.ts b/apps/server/src/routes/worktree/routes/stop-dev.ts
index 2c22b006e..1dbc7340b 100644
--- a/apps/server/src/routes/worktree/routes/stop-dev.ts
+++ b/apps/server/src/routes/worktree/routes/stop-dev.ts
@@ -5,9 +5,9 @@
* freeing up the ports for reuse.
*/
-import type { Request, Response } from "express";
-import { getDevServerService } from "../../../services/dev-server-service.js";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { getDevServerService } from '../../../services/dev-server-service.js';
+import { getErrorMessage, logError } from '../common.js';
export function createStopDevHandler() {
return async (req: Request, res: Response): Promise => {
@@ -19,7 +19,7 @@ export function createStopDevHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath is required",
+ error: 'worktreePath is required',
});
return;
}
@@ -38,11 +38,11 @@ export function createStopDevHandler() {
} else {
res.status(400).json({
success: false,
- error: result.error || "Failed to stop dev server",
+ error: result.error || 'Failed to stop dev server',
});
}
} catch (error) {
- logError(error, "Stop dev server failed");
+ logError(error, 'Stop dev server failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/routes/worktree/routes/switch-branch.ts b/apps/server/src/routes/worktree/routes/switch-branch.ts
index c3c4cdb41..3df7a3f25 100644
--- a/apps/server/src/routes/worktree/routes/switch-branch.ts
+++ b/apps/server/src/routes/worktree/routes/switch-branch.ts
@@ -6,10 +6,10 @@
* the user should commit first.
*/
-import type { Request, Response } from "express";
-import { exec } from "child_process";
-import { promisify } from "util";
-import { getErrorMessage, logError } from "../common.js";
+import type { Request, Response } from 'express';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import { getErrorMessage, logError } from '../common.js';
const execAsync = promisify(exec);
@@ -19,13 +19,16 @@ const execAsync = promisify(exec);
*/
async function hasUncommittedChanges(cwd: string): Promise {
try {
- const { stdout } = await execAsync("git status --porcelain", { cwd });
- const lines = stdout.trim().split("\n").filter((line) => {
- if (!line.trim()) return false;
- // Exclude .worktrees/ directory (created by automaker)
- if (line.includes(".worktrees/") || line.endsWith(".worktrees")) return false;
- return true;
- });
+ const { stdout } = await execAsync('git status --porcelain', { cwd });
+ const lines = stdout
+ .trim()
+ .split('\n')
+ .filter((line) => {
+ if (!line.trim()) return false;
+ // Exclude .worktrees/ directory (created by automaker)
+ if (line.includes('.worktrees/') || line.endsWith('.worktrees')) return false;
+ return true;
+ });
return lines.length > 0;
} catch {
return false;
@@ -38,18 +41,21 @@ async function hasUncommittedChanges(cwd: string): Promise {
*/
async function getChangesSummary(cwd: string): Promise {
try {
- const { stdout } = await execAsync("git status --short", { cwd });
- const lines = stdout.trim().split("\n").filter((line) => {
- if (!line.trim()) return false;
- // Exclude .worktrees/ directory
- if (line.includes(".worktrees/") || line.endsWith(".worktrees")) return false;
- return true;
- });
- if (lines.length === 0) return "";
- if (lines.length <= 5) return lines.join(", ");
- return `${lines.slice(0, 5).join(", ")} and ${lines.length - 5} more files`;
+ const { stdout } = await execAsync('git status --short', { cwd });
+ const lines = stdout
+ .trim()
+ .split('\n')
+ .filter((line) => {
+ if (!line.trim()) return false;
+ // Exclude .worktrees/ directory
+ if (line.includes('.worktrees/') || line.endsWith('.worktrees')) return false;
+ return true;
+ });
+ if (lines.length === 0) return '';
+ if (lines.length <= 5) return lines.join(', ');
+ return `${lines.slice(0, 5).join(', ')} and ${lines.length - 5} more files`;
} catch {
- return "unknown changes";
+ return 'unknown changes';
}
}
@@ -64,7 +70,7 @@ export function createSwitchBranchHandler() {
if (!worktreePath) {
res.status(400).json({
success: false,
- error: "worktreePath required",
+ error: 'worktreePath required',
});
return;
}
@@ -72,16 +78,15 @@ export function createSwitchBranchHandler() {
if (!branchName) {
res.status(400).json({
success: false,
- error: "branchName required",
+ error: 'branchName required',
});
return;
}
// Get current branch
- const { stdout: currentBranchOutput } = await execAsync(
- "git rev-parse --abbrev-ref HEAD",
- { cwd: worktreePath }
- );
+ const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
+ cwd: worktreePath,
+ });
const previousBranch = currentBranchOutput.trim();
if (previousBranch === branchName) {
@@ -115,7 +120,7 @@ export function createSwitchBranchHandler() {
res.status(400).json({
success: false,
error: `Cannot switch branches: you have uncommitted changes (${summary}). Please commit your changes first.`,
- code: "UNCOMMITTED_CHANGES",
+ code: 'UNCOMMITTED_CHANGES',
});
return;
}
@@ -132,7 +137,7 @@ export function createSwitchBranchHandler() {
},
});
} catch (error) {
- logError(error, "Switch branch failed");
+ logError(error, 'Switch branch failed');
res.status(500).json({ success: false, error: getErrorMessage(error) });
}
};
diff --git a/apps/server/src/services/claude-usage-service.ts b/apps/server/src/services/claude-usage-service.ts
index 409437b65..946b7b23c 100644
--- a/apps/server/src/services/claude-usage-service.ts
+++ b/apps/server/src/services/claude-usage-service.ts
@@ -1,7 +1,7 @@
-import { spawn } from "child_process";
-import * as os from "os";
-import * as pty from "node-pty";
-import { ClaudeUsage } from "../routes/claude/types.js";
+import { spawn } from 'child_process';
+import * as os from 'os';
+import * as pty from 'node-pty';
+import { ClaudeUsage } from '../routes/claude/types.js';
/**
* Claude Usage Service
@@ -15,21 +15,21 @@ import { ClaudeUsage } from "../routes/claude/types.js";
* - Windows: Uses node-pty for PTY
*/
export class ClaudeUsageService {
- private claudeBinary = "claude";
+ private claudeBinary = 'claude';
private timeout = 30000; // 30 second timeout
- private isWindows = os.platform() === "win32";
+ private isWindows = os.platform() === 'win32';
/**
* Check if Claude CLI is available on the system
*/
async isAvailable(): Promise {
return new Promise((resolve) => {
- const checkCmd = this.isWindows ? "where" : "which";
+ const checkCmd = this.isWindows ? 'where' : 'which';
const proc = spawn(checkCmd, [this.claudeBinary]);
- proc.on("close", (code) => {
+ proc.on('close', (code) => {
resolve(code === 0);
});
- proc.on("error", () => {
+ proc.on('error', () => {
resolve(false);
});
});
@@ -59,12 +59,12 @@ export class ClaudeUsageService {
*/
private executeClaudeUsageCommandMac(): Promise {
return new Promise((resolve, reject) => {
- let stdout = "";
- let stderr = "";
+ let stdout = '';
+ let stderr = '';
let settled = false;
// Use a simple working directory (home or tmp)
- const workingDirectory = process.env.HOME || "/tmp";
+ const workingDirectory = process.env.HOME || '/tmp';
// Use 'expect' with an inline script to run claude /usage with a PTY
// Wait for "Current session" header, then wait for full output before exiting
@@ -86,11 +86,11 @@ export class ClaudeUsageService {
expect eof
`;
- const proc = spawn("expect", ["-c", expectScript], {
+ const proc = spawn('expect', ['-c', expectScript], {
cwd: workingDirectory,
env: {
...process.env,
- TERM: "xterm-256color",
+ TERM: 'xterm-256color',
},
});
@@ -98,26 +98,30 @@ export class ClaudeUsageService {
if (!settled) {
settled = true;
proc.kill();
- reject(new Error("Command timed out"));
+ reject(new Error('Command timed out'));
}
}, this.timeout);
- proc.stdout.on("data", (data) => {
+ proc.stdout.on('data', (data) => {
stdout += data.toString();
});
- proc.stderr.on("data", (data) => {
+ proc.stderr.on('data', (data) => {
stderr += data.toString();
});
- proc.on("close", (code) => {
+ proc.on('close', (code) => {
clearTimeout(timeoutId);
if (settled) return;
settled = true;
// Check for authentication errors in output
- if (stdout.includes("token_expired") || stdout.includes("authentication_error") ||
- stderr.includes("token_expired") || stderr.includes("authentication_error")) {
+ if (
+ stdout.includes('token_expired') ||
+ stdout.includes('authentication_error') ||
+ stderr.includes('token_expired') ||
+ stderr.includes('authentication_error')
+ ) {
reject(new Error("Authentication required - please run 'claude login'"));
return;
}
@@ -128,11 +132,11 @@ export class ClaudeUsageService {
} else if (code !== 0) {
reject(new Error(stderr || `Command exited with code ${code}`));
} else {
- reject(new Error("No output from claude command"));
+ reject(new Error('No output from claude command'));
}
});
- proc.on("error", (err) => {
+ proc.on('error', (err) => {
clearTimeout(timeoutId);
if (!settled) {
settled = true;
@@ -147,20 +151,20 @@ export class ClaudeUsageService {
*/
private executeClaudeUsageCommandWindows(): Promise {
return new Promise((resolve, reject) => {
- let output = "";
+ let output = '';
let settled = false;
let hasSeenUsageData = false;
- const workingDirectory = process.env.USERPROFILE || os.homedir() || "C:\\";
+ const workingDirectory = process.env.USERPROFILE || os.homedir() || 'C:\\';
- const ptyProcess = pty.spawn("cmd.exe", ["/c", "claude", "/usage"], {
- name: "xterm-256color",
+ const ptyProcess = pty.spawn('cmd.exe', ['/c', 'claude', '/usage'], {
+ name: 'xterm-256color',
cols: 120,
rows: 30,
cwd: workingDirectory,
env: {
...process.env,
- TERM: "xterm-256color",
+ TERM: 'xterm-256color',
} as Record,
});
@@ -168,7 +172,7 @@ export class ClaudeUsageService {
if (!settled) {
settled = true;
ptyProcess.kill();
- reject(new Error("Command timed out"));
+ reject(new Error('Command timed out'));
}
}, this.timeout);
@@ -176,21 +180,21 @@ export class ClaudeUsageService {
output += data;
// Check if we've seen the usage data (look for "Current session")
- if (!hasSeenUsageData && output.includes("Current session")) {
+ if (!hasSeenUsageData && output.includes('Current session')) {
hasSeenUsageData = true;
// Wait for full output, then send escape to exit
setTimeout(() => {
if (!settled) {
- ptyProcess.write("\x1b"); // Send escape key
+ ptyProcess.write('\x1b'); // Send escape key
}
}, 2000);
}
// Fallback: if we see "Esc to cancel" but haven't seen usage data yet
- if (!hasSeenUsageData && output.includes("Esc to cancel")) {
+ if (!hasSeenUsageData && output.includes('Esc to cancel')) {
setTimeout(() => {
if (!settled) {
- ptyProcess.write("\x1b"); // Send escape key
+ ptyProcess.write('\x1b'); // Send escape key
}
}, 3000);
}
@@ -202,7 +206,7 @@ export class ClaudeUsageService {
settled = true;
// Check for authentication errors in output
- if (output.includes("token_expired") || output.includes("authentication_error")) {
+ if (output.includes('token_expired') || output.includes('authentication_error')) {
reject(new Error("Authentication required - please run 'claude login'"));
return;
}
@@ -212,7 +216,7 @@ export class ClaudeUsageService {
} else if (exitCode !== 0) {
reject(new Error(`Command exited with code ${exitCode}`));
} else {
- reject(new Error("No output from claude command"));
+ reject(new Error('No output from claude command'));
}
});
});
@@ -223,7 +227,7 @@ export class ClaudeUsageService {
*/
private stripAnsiCodes(text: string): string {
// eslint-disable-next-line no-control-regex
- return text.replace(/\x1B\[[0-9;]*[A-Za-z]/g, "");
+ return text.replace(/\x1B\[[0-9;]*[A-Za-z]/g, '');
}
/**
@@ -248,21 +252,24 @@ export class ClaudeUsageService {
*/
private parseUsageOutput(rawOutput: string): ClaudeUsage {
const output = this.stripAnsiCodes(rawOutput);
- const lines = output.split("\n").map(l => l.trim()).filter(l => l);
+ const lines = output
+ .split('\n')
+ .map((l) => l.trim())
+ .filter((l) => l);
// Parse session usage
- const sessionData = this.parseSection(lines, "Current session", "session");
+ const sessionData = this.parseSection(lines, 'Current session', 'session');
// Parse weekly usage (all models)
- const weeklyData = this.parseSection(lines, "Current week (all models)", "weekly");
+ const weeklyData = this.parseSection(lines, 'Current week (all models)', 'weekly');
// Parse Sonnet/Opus usage - try different labels
- let sonnetData = this.parseSection(lines, "Current week (Sonnet only)", "sonnet");
+ let sonnetData = this.parseSection(lines, 'Current week (Sonnet only)', 'sonnet');
if (sonnetData.percentage === 0) {
- sonnetData = this.parseSection(lines, "Current week (Sonnet)", "sonnet");
+ sonnetData = this.parseSection(lines, 'Current week (Sonnet)', 'sonnet');
}
if (sonnetData.percentage === 0) {
- sonnetData = this.parseSection(lines, "Current week (Opus)", "sonnet");
+ sonnetData = this.parseSection(lines, 'Current week (Opus)', 'sonnet');
}
return {
@@ -294,10 +301,14 @@ export class ClaudeUsageService {
/**
* Parse a section of the usage output to extract percentage and reset time
*/
- private parseSection(lines: string[], sectionLabel: string, type: string): { percentage: number; resetTime: string; resetText: string } {
+ private parseSection(
+ lines: string[],
+ sectionLabel: string,
+ type: string
+ ): { percentage: number; resetTime: string; resetText: string } {
let percentage = 0;
let resetTime = this.getDefaultResetTime(type);
- let resetText = "";
+ let resetText = '';
// Find the LAST occurrence of the section (terminal output has multiple screen refreshes)
let sectionIndex = -1;
@@ -321,14 +332,14 @@ export class ClaudeUsageService {
const percentMatch = line.match(/(\d{1,3})\s*%\s*(left|used|remaining)/i);
if (percentMatch) {
const value = parseInt(percentMatch[1], 10);
- const isUsed = percentMatch[2].toLowerCase() === "used";
+ const isUsed = percentMatch[2].toLowerCase() === 'used';
// Convert "left" to "used" percentage (our UI shows % used)
- percentage = isUsed ? value : (100 - value);
+ percentage = isUsed ? value : 100 - value;
}
}
// Extract reset time - only take the first match
- if (!resetText && line.toLowerCase().includes("reset")) {
+ if (!resetText && line.toLowerCase().includes('reset')) {
resetText = line;
}
}
@@ -337,7 +348,7 @@ export class ClaudeUsageService {
if (resetText) {
resetTime = this.parseResetTime(resetText, type);
// Strip timezone like "(Asia/Dubai)" from the display text
- resetText = resetText.replace(/\s*\([A-Za-z_\/]+\)\s*$/, "").trim();
+ resetText = resetText.replace(/\s*\([A-Za-z_\/]+\)\s*$/, '').trim();
}
return { percentage, resetTime, resetText };
@@ -350,7 +361,9 @@ export class ClaudeUsageService {
const now = new Date();
// Try to parse duration format: "Resets in 2h 15m" or "Resets in 30m"
- const durationMatch = text.match(/(\d+)\s*h(?:ours?)?(?:\s+(\d+)\s*m(?:in)?)?|(\d+)\s*m(?:in)?/i);
+ const durationMatch = text.match(
+ /(\d+)\s*h(?:ours?)?(?:\s+(\d+)\s*m(?:in)?)?|(\d+)\s*m(?:in)?/i
+ );
if (durationMatch) {
let hours = 0;
let minutes = 0;
@@ -374,9 +387,9 @@ export class ClaudeUsageService {
const ampm = simpleTimeMatch[3].toLowerCase();
// Convert 12-hour to 24-hour
- if (ampm === "pm" && hours !== 12) {
+ if (ampm === 'pm' && hours !== 12) {
hours += 12;
- } else if (ampm === "am" && hours === 12) {
+ } else if (ampm === 'am' && hours === 12) {
hours = 0;
}
@@ -392,7 +405,9 @@ export class ClaudeUsageService {
}
// Try to parse date format: "Resets Dec 22 at 8pm" or "Resets Jan 15, 3:30pm"
- const dateMatch = text.match(/([A-Za-z]{3,})\s+(\d{1,2})(?:\s+at\s+|\s*,?\s*)(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i);
+ const dateMatch = text.match(
+ /([A-Za-z]{3,})\s+(\d{1,2})(?:\s+at\s+|\s*,?\s*)(\d{1,2})(?::(\d{2}))?\s*(am|pm)/i
+ );
if (dateMatch) {
const monthName = dateMatch[1];
const day = parseInt(dateMatch[2], 10);
@@ -401,16 +416,26 @@ export class ClaudeUsageService {
const ampm = dateMatch[5].toLowerCase();
// Convert 12-hour to 24-hour
- if (ampm === "pm" && hours !== 12) {
+ if (ampm === 'pm' && hours !== 12) {
hours += 12;
- } else if (ampm === "am" && hours === 12) {
+ } else if (ampm === 'am' && hours === 12) {
hours = 0;
}
// Parse month name
const months: Record = {
- jan: 0, feb: 1, mar: 2, apr: 3, may: 4, jun: 5,
- jul: 6, aug: 7, sep: 8, oct: 9, nov: 10, dec: 11
+ jan: 0,
+ feb: 1,
+ mar: 2,
+ apr: 3,
+ may: 4,
+ jun: 5,
+ jul: 6,
+ aug: 7,
+ sep: 8,
+ oct: 9,
+ nov: 10,
+ dec: 11,
};
const month = months[monthName.toLowerCase().substring(0, 3)];
@@ -435,7 +460,7 @@ export class ClaudeUsageService {
private getDefaultResetTime(type: string): string {
const now = new Date();
- if (type === "session") {
+ if (type === 'session') {
// Session resets in ~5 hours
return new Date(now.getTime() + 5 * 60 * 60 * 1000).toISOString();
} else {
diff --git a/apps/server/src/services/feature-loader.ts b/apps/server/src/services/feature-loader.ts
index 41585103e..fbf86d492 100644
--- a/apps/server/src/services/feature-loader.ts
+++ b/apps/server/src/services/feature-loader.ts
@@ -3,18 +3,18 @@
* Each feature is stored in .automaker/features/{featureId}/feature.json
*/
-import path from "path";
-import type { Feature } from "@automaker/types";
-import { createLogger } from "@automaker/utils";
-import * as secureFs from "../lib/secure-fs.js";
+import path from 'path';
+import type { Feature } from '@automaker/types';
+import { createLogger } from '@automaker/utils';
+import * as secureFs from '../lib/secure-fs.js';
import {
getFeaturesDir,
getFeatureDir,
getFeatureImagesDir,
ensureAutomakerDir,
-} from "@automaker/platform";
+} from '@automaker/platform';
-const logger = createLogger("FeatureLoader");
+const logger = createLogger('FeatureLoader');
// Re-export Feature type for convenience
export type { Feature };
@@ -39,24 +39,16 @@ export class FeatureLoader {
*/
private async deleteOrphanedImages(
projectPath: string,
- oldPaths:
- | Array
- | undefined,
- newPaths:
- | Array
- | undefined
+ oldPaths: Array | undefined,
+ newPaths: Array | undefined
): Promise {
if (!oldPaths || oldPaths.length === 0) {
return;
}
// Build sets of paths for comparison
- const oldPathSet = new Set(
- oldPaths.map((p) => (typeof p === "string" ? p : p.path))
- );
- const newPathSet = new Set(
- (newPaths || []).map((p) => (typeof p === "string" ? p : p.path))
- );
+ const oldPathSet = new Set(oldPaths.map((p) => (typeof p === 'string' ? p : p.path)));
+ const newPathSet = new Set((newPaths || []).map((p) => (typeof p === 'string' ? p : p.path)));
// Find images that were removed
for (const oldPath of oldPathSet) {
@@ -67,10 +59,7 @@ export class FeatureLoader {
console.log(`[FeatureLoader] Deleted orphaned image: ${oldPath}`);
} catch (error) {
// Ignore errors when deleting (file may already be gone)
- logger.warn(
- `[FeatureLoader] Failed to delete image: ${oldPath}`,
- error
- );
+ logger.warn(`[FeatureLoader] Failed to delete image: ${oldPath}`, error);
}
}
}
@@ -83,9 +72,7 @@ export class FeatureLoader {
projectPath: string,
featureId: string,
imagePaths?: Array
- ): Promise<
- Array | undefined
- > {
+ ): Promise | undefined> {
if (!imagePaths || imagePaths.length === 0) {
return imagePaths;
}
@@ -93,14 +80,11 @@ export class FeatureLoader {
const featureImagesDir = this.getFeatureImagesDir(projectPath, featureId);
await secureFs.mkdir(featureImagesDir, { recursive: true });
- const updatedPaths: Array<
- string | { path: string; [key: string]: unknown }
- > = [];
+ const updatedPaths: Array = [];
for (const imagePath of imagePaths) {
try {
- const originalPath =
- typeof imagePath === "string" ? imagePath : imagePath.path;
+ const originalPath = typeof imagePath === 'string' ? imagePath : imagePath.path;
// Skip if already in feature directory (already absolute path in external storage)
if (originalPath.includes(`/features/${featureId}/images/`)) {
@@ -117,9 +101,7 @@ export class FeatureLoader {
try {
await secureFs.access(fullOriginalPath);
} catch {
- logger.warn(
- `[FeatureLoader] Image not found, skipping: ${fullOriginalPath}`
- );
+ logger.warn(`[FeatureLoader] Image not found, skipping: ${fullOriginalPath}`);
continue;
}
@@ -129,9 +111,7 @@ export class FeatureLoader {
// Copy the file
await secureFs.copyFile(fullOriginalPath, newPath);
- console.log(
- `[FeatureLoader] Copied image: ${originalPath} -> ${newPath}`
- );
+ console.log(`[FeatureLoader] Copied image: ${originalPath} -> ${newPath}`);
// Try to delete the original temp file
try {
@@ -141,7 +121,7 @@ export class FeatureLoader {
}
// Update the path in the result (use absolute path)
- if (typeof imagePath === "string") {
+ if (typeof imagePath === 'string') {
updatedPaths.push(newPath);
} else {
updatedPaths.push({ ...imagePath, path: newPath });
@@ -168,20 +148,14 @@ export class FeatureLoader {
* Get the path to a feature's feature.json file
*/
getFeatureJsonPath(projectPath: string, featureId: string): string {
- return path.join(
- this.getFeatureDir(projectPath, featureId),
- "feature.json"
- );
+ return path.join(this.getFeatureDir(projectPath, featureId), 'feature.json');
}
/**
* Get the path to a feature's agent-output.md file
*/
getAgentOutputPath(projectPath: string, featureId: string): string {
- return path.join(
- this.getFeatureDir(projectPath, featureId),
- "agent-output.md"
- );
+ return path.join(this.getFeatureDir(projectPath, featureId), 'agent-output.md');
}
/**
@@ -218,10 +192,7 @@ export class FeatureLoader {
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
try {
- const content = (await secureFs.readFile(
- featureJsonPath,
- "utf-8"
- )) as string;
+ const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
const feature = JSON.parse(content);
if (!feature.id) {
@@ -233,7 +204,7 @@ export class FeatureLoader {
features.push(feature);
} catch (error) {
- if ((error as NodeJS.ErrnoException).code === "ENOENT") {
+ if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
continue;
} else if (error instanceof SyntaxError) {
logger.warn(
@@ -250,14 +221,14 @@ export class FeatureLoader {
// Sort by creation order (feature IDs contain timestamp)
features.sort((a, b) => {
- const aTime = a.id ? parseInt(a.id.split("-")[1] || "0") : 0;
- const bTime = b.id ? parseInt(b.id.split("-")[1] || "0") : 0;
+ const aTime = a.id ? parseInt(a.id.split('-')[1] || '0') : 0;
+ const bTime = b.id ? parseInt(b.id.split('-')[1] || '0') : 0;
return aTime - bTime;
});
return features;
} catch (error) {
- logger.error("Failed to get all features:", error);
+ logger.error('Failed to get all features:', error);
return [];
}
}
@@ -268,19 +239,13 @@ export class FeatureLoader {
async get(projectPath: string, featureId: string): Promise {
try {
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
- const content = (await secureFs.readFile(
- featureJsonPath,
- "utf-8"
- )) as string;
+ const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
return JSON.parse(content);
} catch (error) {
- if ((error as NodeJS.ErrnoException).code === "ENOENT") {
+ if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
return null;
}
- logger.error(
- `[FeatureLoader] Failed to get feature ${featureId}:`,
- error
- );
+ logger.error(`[FeatureLoader] Failed to get feature ${featureId}:`, error);
throw error;
}
}
@@ -288,10 +253,7 @@ export class FeatureLoader {
/**
* Create a new feature
*/
- async create(
- projectPath: string,
- featureData: Partial
- ): Promise {
+ async create(projectPath: string, featureData: Partial): Promise {
const featureId = featureData.id || this.generateFeatureId();
const featureDir = this.getFeatureDir(projectPath, featureId);
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
@@ -311,19 +273,15 @@ export class FeatureLoader {
// Ensure feature has required fields
const feature: Feature = {
- category: featureData.category || "Uncategorized",
- description: featureData.description || "",
+ category: featureData.category || 'Uncategorized',
+ description: featureData.description || '',
...featureData,
id: featureId,
imagePaths: migratedImagePaths,
};
// Write feature.json
- await secureFs.writeFile(
- featureJsonPath,
- JSON.stringify(feature, null, 2),
- "utf-8"
- );
+ await secureFs.writeFile(featureJsonPath, JSON.stringify(feature, null, 2), 'utf-8');
logger.info(`Created feature ${featureId}`);
return feature;
@@ -346,36 +304,22 @@ export class FeatureLoader {
let updatedImagePaths = updates.imagePaths;
if (updates.imagePaths !== undefined) {
// Delete orphaned images (images that were removed)
- await this.deleteOrphanedImages(
- projectPath,
- feature.imagePaths,
- updates.imagePaths
- );
+ await this.deleteOrphanedImages(projectPath, feature.imagePaths, updates.imagePaths);
// Migrate any new images
- updatedImagePaths = await this.migrateImages(
- projectPath,
- featureId,
- updates.imagePaths
- );
+ updatedImagePaths = await this.migrateImages(projectPath, featureId, updates.imagePaths);
}
// Merge updates
const updatedFeature: Feature = {
...feature,
...updates,
- ...(updatedImagePaths !== undefined
- ? { imagePaths: updatedImagePaths }
- : {}),
+ ...(updatedImagePaths !== undefined ? { imagePaths: updatedImagePaths } : {}),
};
// Write back to file
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
- await secureFs.writeFile(
- featureJsonPath,
- JSON.stringify(updatedFeature, null, 2),
- "utf-8"
- );
+ await secureFs.writeFile(featureJsonPath, JSON.stringify(updatedFeature, null, 2), 'utf-8');
logger.info(`Updated feature ${featureId}`);
return updatedFeature;
@@ -391,10 +335,7 @@ export class FeatureLoader {
console.log(`[FeatureLoader] Deleted feature ${featureId}`);
return true;
} catch (error) {
- logger.error(
- `[FeatureLoader] Failed to delete feature ${featureId}:`,
- error
- );
+ logger.error(`[FeatureLoader] Failed to delete feature ${featureId}:`, error);
return false;
}
}
@@ -402,25 +343,16 @@ export class FeatureLoader {
/**
* Get agent output for a feature
*/
- async getAgentOutput(
- projectPath: string,
- featureId: string
- ): Promise {
+ async getAgentOutput(projectPath: string, featureId: string): Promise {
try {
const agentOutputPath = this.getAgentOutputPath(projectPath, featureId);
- const content = (await secureFs.readFile(
- agentOutputPath,
- "utf-8"
- )) as string;
+ const content = (await secureFs.readFile(agentOutputPath, 'utf-8')) as string;
return content;
} catch (error) {
- if ((error as NodeJS.ErrnoException).code === "ENOENT") {
+ if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
return null;
}
- logger.error(
- `[FeatureLoader] Failed to get agent output for ${featureId}:`,
- error
- );
+ logger.error(`[FeatureLoader] Failed to get agent output for ${featureId}:`, error);
throw error;
}
}
@@ -428,30 +360,23 @@ export class FeatureLoader {
/**
* Save agent output for a feature
*/
- async saveAgentOutput(
- projectPath: string,
- featureId: string,
- content: string
- ): Promise {
+ async saveAgentOutput(projectPath: string, featureId: string, content: string): Promise {
const featureDir = this.getFeatureDir(projectPath, featureId);
await secureFs.mkdir(featureDir, { recursive: true });
const agentOutputPath = this.getAgentOutputPath(projectPath, featureId);
- await secureFs.writeFile(agentOutputPath, content, "utf-8");
+ await secureFs.writeFile(agentOutputPath, content, 'utf-8');
}
/**
* Delete agent output for a feature
*/
- async deleteAgentOutput(
- projectPath: string,
- featureId: string
- ): Promise {
+ async deleteAgentOutput(projectPath: string, featureId: string): Promise {
try {
const agentOutputPath = this.getAgentOutputPath(projectPath, featureId);
await secureFs.unlink(agentOutputPath);
} catch (error) {
- if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
+ if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
throw error;
}
}
diff --git a/apps/server/src/services/settings-service.ts b/apps/server/src/services/settings-service.ts
index a935d93aa..288bde186 100644
--- a/apps/server/src/services/settings-service.ts
+++ b/apps/server/src/services/settings-service.ts
@@ -7,8 +7,8 @@
* - Per-project settings ({projectPath}/.automaker/settings.json)
*/
-import { createLogger } from "@automaker/utils";
-import * as secureFs from "../lib/secure-fs.js";
+import { createLogger } from '@automaker/utils';
+import * as secureFs from '../lib/secure-fs.js';
import {
getGlobalSettingsPath,
@@ -16,7 +16,7 @@ import {
getProjectSettingsPath,
ensureDataDir,
ensureAutomakerDir,
-} from "@automaker/platform";
+} from '@automaker/platform';
import type {
GlobalSettings,
Credentials,
@@ -27,7 +27,7 @@ import type {
TrashedProjectRef,
BoardBackgroundSettings,
WorktreeInfo,
-} from "../types/settings.js";
+} from '../types/settings.js';
import {
DEFAULT_GLOBAL_SETTINGS,
DEFAULT_CREDENTIALS,
@@ -35,9 +35,9 @@ import {
SETTINGS_VERSION,
CREDENTIALS_VERSION,
PROJECT_SETTINGS_VERSION,
-} from "../types/settings.js";
+} from '../types/settings.js';
-const logger = createLogger("SettingsService");
+const logger = createLogger('SettingsService');
/**
* Atomic file write - write to temp file then rename
@@ -47,7 +47,7 @@ async function atomicWriteJson(filePath: string, data: unknown): Promise {
const content = JSON.stringify(data, null, 2);
try {
- await secureFs.writeFile(tempPath, content, "utf-8");
+ await secureFs.writeFile(tempPath, content, 'utf-8');
await secureFs.rename(tempPath, filePath);
} catch (error) {
// Clean up temp file if it exists
@@ -65,10 +65,10 @@ async function atomicWriteJson(filePath: string, data: unknown): Promise {
*/
async function readJsonFile(filePath: string, defaultValue: T): Promise {
try {
- const content = (await secureFs.readFile(filePath, "utf-8")) as string;
+ const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
return JSON.parse(content) as T;
} catch (error) {
- if ((error as NodeJS.ErrnoException).code === "ENOENT") {
+ if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
return defaultValue;
}
logger.error(`Error reading ${filePath}:`, error);
@@ -128,10 +128,7 @@ export class SettingsService {
*/
async getGlobalSettings(): Promise {
const settingsPath = getGlobalSettingsPath(this.dataDir);
- const settings = await readJsonFile(
- settingsPath,
- DEFAULT_GLOBAL_SETTINGS
- );
+ const settings = await readJsonFile(settingsPath, DEFAULT_GLOBAL_SETTINGS);
// Apply any missing defaults (for backwards compatibility)
return {
@@ -153,9 +150,7 @@ export class SettingsService {
* @param updates - Partial GlobalSettings to merge (only provided fields are updated)
* @returns Promise resolving to complete updated GlobalSettings
*/
- async updateGlobalSettings(
- updates: Partial
- ): Promise {
+ async updateGlobalSettings(updates: Partial): Promise {
await ensureDataDir(this.dataDir);
const settingsPath = getGlobalSettingsPath(this.dataDir);
@@ -175,7 +170,7 @@ export class SettingsService {
}
await atomicWriteJson(settingsPath, updated);
- logger.info("Global settings updated");
+ logger.info('Global settings updated');
return updated;
}
@@ -207,10 +202,7 @@ export class SettingsService {
*/
async getCredentials(): Promise {
const credentialsPath = getCredentialsPath(this.dataDir);
- const credentials = await readJsonFile(
- credentialsPath,
- DEFAULT_CREDENTIALS
- );
+ const credentials = await readJsonFile(credentialsPath, DEFAULT_CREDENTIALS);
return {
...DEFAULT_CREDENTIALS,
@@ -252,7 +244,7 @@ export class SettingsService {
}
await atomicWriteJson(credentialsPath, updated);
- logger.info("Credentials updated");
+ logger.info('Credentials updated');
return updated;
}
@@ -272,7 +264,7 @@ export class SettingsService {
const credentials = await this.getCredentials();
const maskKey = (key: string): string => {
- if (!key || key.length < 8) return "";
+ if (!key || key.length < 8) return '';
return `${key.substring(0, 4)}...${key.substring(key.length - 4)}`;
};
@@ -312,10 +304,7 @@ export class SettingsService {
*/
async getProjectSettings(projectPath: string): Promise {
const settingsPath = getProjectSettingsPath(projectPath);
- const settings = await readJsonFile(
- settingsPath,
- DEFAULT_PROJECT_SETTINGS
- );
+ const settings = await readJsonFile(settingsPath, DEFAULT_PROJECT_SETTINGS);
return {
...DEFAULT_PROJECT_SETTINGS,
@@ -388,11 +377,11 @@ export class SettingsService {
* @returns Promise resolving to migration result with success status and error list
*/
async migrateFromLocalStorage(localStorageData: {
- "automaker-storage"?: string;
- "automaker-setup"?: string;
- "worktree-panel-collapsed"?: string;
- "file-browser-recent-folders"?: string;
- "automaker:lastProjectDir"?: string;
+ 'automaker-storage'?: string;
+ 'automaker-setup'?: string;
+ 'worktree-panel-collapsed'?: string;
+ 'file-browser-recent-folders'?: string;
+ 'automaker:lastProjectDir'?: string;
}): Promise<{
success: boolean;
migratedGlobalSettings: boolean;
@@ -408,9 +397,9 @@ export class SettingsService {
try {
// Parse the main automaker-storage
let appState: Record = {};
- if (localStorageData["automaker-storage"]) {
+ if (localStorageData['automaker-storage']) {
try {
- const parsed = JSON.parse(localStorageData["automaker-storage"]);
+ const parsed = JSON.parse(localStorageData['automaker-storage']);
appState = parsed.state || parsed;
} catch (e) {
errors.push(`Failed to parse automaker-storage: ${e}`);
@@ -419,20 +408,14 @@ export class SettingsService {
// Extract global settings
const globalSettings: Partial = {
- theme: (appState.theme as GlobalSettings["theme"]) || "dark",
- sidebarOpen:
- appState.sidebarOpen !== undefined
- ? (appState.sidebarOpen as boolean)
- : true,
+ theme: (appState.theme as GlobalSettings['theme']) || 'dark',
+ sidebarOpen: appState.sidebarOpen !== undefined ? (appState.sidebarOpen as boolean) : true,
chatHistoryOpen: (appState.chatHistoryOpen as boolean) || false,
kanbanCardDetailLevel:
- (appState.kanbanCardDetailLevel as GlobalSettings["kanbanCardDetailLevel"]) ||
- "standard",
+ (appState.kanbanCardDetailLevel as GlobalSettings['kanbanCardDetailLevel']) || 'standard',
maxConcurrency: (appState.maxConcurrency as number) || 3,
defaultSkipTests:
- appState.defaultSkipTests !== undefined
- ? (appState.defaultSkipTests as boolean)
- : true,
+ appState.defaultSkipTests !== undefined ? (appState.defaultSkipTests as boolean) : true,
enableDependencyBlocking:
appState.enableDependencyBlocking !== undefined
? (appState.enableDependencyBlocking as boolean)
@@ -440,55 +423,48 @@ export class SettingsService {
useWorktrees: (appState.useWorktrees as boolean) || false,
showProfilesOnly: (appState.showProfilesOnly as boolean) || false,
defaultPlanningMode:
- (appState.defaultPlanningMode as GlobalSettings["defaultPlanningMode"]) ||
- "skip",
- defaultRequirePlanApproval:
- (appState.defaultRequirePlanApproval as boolean) || false,
- defaultAIProfileId:
- (appState.defaultAIProfileId as string | null) || null,
+ (appState.defaultPlanningMode as GlobalSettings['defaultPlanningMode']) || 'skip',
+ defaultRequirePlanApproval: (appState.defaultRequirePlanApproval as boolean) || false,
+ defaultAIProfileId: (appState.defaultAIProfileId as string | null) || null,
muteDoneSound: (appState.muteDoneSound as boolean) || false,
enhancementModel:
- (appState.enhancementModel as GlobalSettings["enhancementModel"]) ||
- "sonnet",
+ (appState.enhancementModel as GlobalSettings['enhancementModel']) || 'sonnet',
keyboardShortcuts:
(appState.keyboardShortcuts as KeyboardShortcuts) ||
DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts,
aiProfiles: (appState.aiProfiles as AIProfile[]) || [],
projects: (appState.projects as ProjectRef[]) || [],
- trashedProjects:
- (appState.trashedProjects as TrashedProjectRef[]) || [],
+ trashedProjects: (appState.trashedProjects as TrashedProjectRef[]) || [],
projectHistory: (appState.projectHistory as string[]) || [],
projectHistoryIndex: (appState.projectHistoryIndex as number) || -1,
lastSelectedSessionByProject:
- (appState.lastSelectedSessionByProject as Record) ||
- {},
+ (appState.lastSelectedSessionByProject as Record) || {},
};
// Add direct localStorage values
- if (localStorageData["automaker:lastProjectDir"]) {
- globalSettings.lastProjectDir =
- localStorageData["automaker:lastProjectDir"];
+ if (localStorageData['automaker:lastProjectDir']) {
+ globalSettings.lastProjectDir = localStorageData['automaker:lastProjectDir'];
}
- if (localStorageData["file-browser-recent-folders"]) {
+ if (localStorageData['file-browser-recent-folders']) {
try {
globalSettings.recentFolders = JSON.parse(
- localStorageData["file-browser-recent-folders"]
+ localStorageData['file-browser-recent-folders']
);
} catch {
globalSettings.recentFolders = [];
}
}
- if (localStorageData["worktree-panel-collapsed"]) {
+ if (localStorageData['worktree-panel-collapsed']) {
globalSettings.worktreePanelCollapsed =
- localStorageData["worktree-panel-collapsed"] === "true";
+ localStorageData['worktree-panel-collapsed'] === 'true';
}
// Save global settings
await this.updateGlobalSettings(globalSettings);
migratedGlobalSettings = true;
- logger.info("Migrated global settings from localStorage");
+ logger.info('Migrated global settings from localStorage');
// Extract and save credentials
if (appState.apiKeys) {
@@ -499,13 +475,13 @@ export class SettingsService {
};
await this.updateCredentials({
apiKeys: {
- anthropic: apiKeys.anthropic || "",
- google: apiKeys.google || "",
- openai: apiKeys.openai || "",
+ anthropic: apiKeys.anthropic || '',
+ google: apiKeys.google || '',
+ openai: apiKeys.openai || '',
},
});
migratedCredentials = true;
- logger.info("Migrated credentials from localStorage");
+ logger.info('Migrated credentials from localStorage');
}
// Migrate per-project settings
@@ -522,14 +498,10 @@ export class SettingsService {
// Get unique project paths that have per-project settings
const projectPaths = new Set();
if (boardBackgroundByProject) {
- Object.keys(boardBackgroundByProject).forEach((p) =>
- projectPaths.add(p)
- );
+ Object.keys(boardBackgroundByProject).forEach((p) => projectPaths.add(p));
}
if (currentWorktreeByProject) {
- Object.keys(currentWorktreeByProject).forEach((p) =>
- projectPaths.add(p)
- );
+ Object.keys(currentWorktreeByProject).forEach((p) => projectPaths.add(p));
}
if (worktreesByProject) {
Object.keys(worktreesByProject).forEach((p) => projectPaths.add(p));
@@ -551,17 +523,15 @@ export class SettingsService {
// Get theme from project object
const project = projects.find((p) => p.path === projectPath);
if (project?.theme) {
- projectSettings.theme = project.theme as ProjectSettings["theme"];
+ projectSettings.theme = project.theme as ProjectSettings['theme'];
}
if (boardBackgroundByProject?.[projectPath]) {
- projectSettings.boardBackground =
- boardBackgroundByProject[projectPath];
+ projectSettings.boardBackground = boardBackgroundByProject[projectPath];
}
if (currentWorktreeByProject?.[projectPath]) {
- projectSettings.currentWorktree =
- currentWorktreeByProject[projectPath];
+ projectSettings.currentWorktree = currentWorktreeByProject[projectPath];
}
if (worktreesByProject?.[projectPath]) {
@@ -573,15 +543,11 @@ export class SettingsService {
migratedProjectCount++;
}
} catch (e) {
- errors.push(
- `Failed to migrate project settings for ${projectPath}: ${e}`
- );
+ errors.push(`Failed to migrate project settings for ${projectPath}: ${e}`);
}
}
- logger.info(
- `Migration complete: ${migratedProjectCount} projects migrated`
- );
+ logger.info(`Migration complete: ${migratedProjectCount} projects migrated`);
return {
success: errors.length === 0,
@@ -591,7 +557,7 @@ export class SettingsService {
errors,
};
} catch (error) {
- logger.error("Migration failed:", error);
+ logger.error('Migration failed:', error);
errors.push(`Migration failed: ${error}`);
return {
success: false,
diff --git a/apps/server/src/services/terminal-service.ts b/apps/server/src/services/terminal-service.ts
index 6d8faa7f1..7d59633eb 100644
--- a/apps/server/src/services/terminal-service.ts
+++ b/apps/server/src/services/terminal-service.ts
@@ -5,17 +5,29 @@
* Supports cross-platform shell detection including WSL.
*/
-import * as pty from "node-pty";
-import { EventEmitter } from "events";
-import * as os from "os";
-import * as fs from "fs";
+import * as pty from 'node-pty';
+import { EventEmitter } from 'events';
+import * as os from 'os';
+import * as fs from 'fs';
+import * as path from 'path';
// Maximum scrollback buffer size (characters)
const MAX_SCROLLBACK_SIZE = 50000; // ~50KB per terminal
+// Session limit constants - shared with routes/settings.ts
+export const MIN_MAX_SESSIONS = 1;
+export const MAX_MAX_SESSIONS = 1000;
+
+// Maximum number of concurrent terminal sessions
+// Can be overridden via TERMINAL_MAX_SESSIONS environment variable
+// Default set to 1000 - effectively unlimited for most use cases
+let maxSessions = parseInt(process.env.TERMINAL_MAX_SESSIONS || '1000', 10);
+
// Throttle output to prevent overwhelming WebSocket under heavy load
-const OUTPUT_THROTTLE_MS = 16; // ~60fps max update rate
-const OUTPUT_BATCH_SIZE = 8192; // Max bytes to send per batch
+// Using 4ms for responsive input feedback while still preventing flood
+// Note: 16ms caused perceived input lag, especially with backspace
+const OUTPUT_THROTTLE_MS = 4; // ~250fps max update rate for responsive input
+const OUTPUT_BATCH_SIZE = 4096; // Smaller batches for lower latency
export interface TerminalSession {
id: string;
@@ -53,20 +65,20 @@ export class TerminalService extends EventEmitter {
const platform = os.platform();
// Check if running in WSL
- if (platform === "linux" && this.isWSL()) {
+ if (platform === 'linux' && this.isWSL()) {
// In WSL, prefer the user's configured shell or bash
- const userShell = process.env.SHELL || "/bin/bash";
+ const userShell = process.env.SHELL || '/bin/bash';
if (fs.existsSync(userShell)) {
- return { shell: userShell, args: ["--login"] };
+ return { shell: userShell, args: ['--login'] };
}
- return { shell: "/bin/bash", args: ["--login"] };
+ return { shell: '/bin/bash', args: ['--login'] };
}
switch (platform) {
- case "win32": {
+ case 'win32': {
// Windows: prefer PowerShell, fall back to cmd
- const pwsh = "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe";
- const pwshCore = "C:\\Program Files\\PowerShell\\7\\pwsh.exe";
+ const pwsh = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe';
+ const pwshCore = 'C:\\Program Files\\PowerShell\\7\\pwsh.exe';
if (fs.existsSync(pwshCore)) {
return { shell: pwshCore, args: [] };
@@ -74,32 +86,32 @@ export class TerminalService extends EventEmitter {
if (fs.existsSync(pwsh)) {
return { shell: pwsh, args: [] };
}
- return { shell: "cmd.exe", args: [] };
+ return { shell: 'cmd.exe', args: [] };
}
- case "darwin": {
+ case 'darwin': {
// macOS: prefer user's shell, then zsh, then bash
const userShell = process.env.SHELL;
if (userShell && fs.existsSync(userShell)) {
- return { shell: userShell, args: ["--login"] };
+ return { shell: userShell, args: ['--login'] };
}
- if (fs.existsSync("/bin/zsh")) {
- return { shell: "/bin/zsh", args: ["--login"] };
+ if (fs.existsSync('/bin/zsh')) {
+ return { shell: '/bin/zsh', args: ['--login'] };
}
- return { shell: "/bin/bash", args: ["--login"] };
+ return { shell: '/bin/bash', args: ['--login'] };
}
- case "linux":
+ case 'linux':
default: {
// Linux: prefer user's shell, then bash, then sh
const userShell = process.env.SHELL;
if (userShell && fs.existsSync(userShell)) {
- return { shell: userShell, args: ["--login"] };
+ return { shell: userShell, args: ['--login'] };
}
- if (fs.existsSync("/bin/bash")) {
- return { shell: "/bin/bash", args: ["--login"] };
+ if (fs.existsSync('/bin/bash')) {
+ return { shell: '/bin/bash', args: ['--login'] };
}
- return { shell: "/bin/sh", args: [] };
+ return { shell: '/bin/sh', args: [] };
}
}
}
@@ -110,9 +122,9 @@ export class TerminalService extends EventEmitter {
isWSL(): boolean {
try {
// Check /proc/version for Microsoft/WSL indicators
- if (fs.existsSync("/proc/version")) {
- const version = fs.readFileSync("/proc/version", "utf-8").toLowerCase();
- return version.includes("microsoft") || version.includes("wsl");
+ if (fs.existsSync('/proc/version')) {
+ const version = fs.readFileSync('/proc/version', 'utf-8').toLowerCase();
+ return version.includes('microsoft') || version.includes('wsl');
}
// Check for WSL environment variable
if (process.env.WSL_DISTRO_NAME || process.env.WSLENV) {
@@ -144,6 +156,7 @@ export class TerminalService extends EventEmitter {
/**
* Validate and resolve a working directory path
+ * Includes basic sanitization against null bytes and path normalization
*/
private resolveWorkingDirectory(requestedCwd?: string): string {
const homeDir = os.homedir();
@@ -156,11 +169,23 @@ export class TerminalService extends EventEmitter {
// Clean up the path
let cwd = requestedCwd.trim();
+ // Reject paths with null bytes (could bypass path checks)
+ if (cwd.includes('\0')) {
+ console.warn(`[Terminal] Rejecting path with null byte: ${cwd.replace(/\0/g, '\\0')}`);
+ return homeDir;
+ }
+
// Fix double slashes at start (but not for Windows UNC paths)
- if (cwd.startsWith("//") && !cwd.startsWith("//wsl")) {
+ if (cwd.startsWith('//') && !cwd.startsWith('//wsl')) {
cwd = cwd.slice(1);
}
+ // Normalize the path to resolve . and .. segments
+ // Skip normalization for WSL UNC paths as path.resolve would break them
+ if (!cwd.startsWith('//wsl')) {
+ cwd = path.resolve(cwd);
+ }
+
// Check if path exists and is a directory
try {
const stat = fs.statSync(cwd);
@@ -175,11 +200,42 @@ export class TerminalService extends EventEmitter {
}
}
+ /**
+ * Get current session count
+ */
+ getSessionCount(): number {
+ return this.sessions.size;
+ }
+
+ /**
+ * Get maximum allowed sessions
+ */
+ getMaxSessions(): number {
+ return maxSessions;
+ }
+
+ /**
+ * Set maximum allowed sessions (can be called dynamically)
+ */
+ setMaxSessions(limit: number): void {
+ if (limit >= MIN_MAX_SESSIONS && limit <= MAX_MAX_SESSIONS) {
+ maxSessions = limit;
+ console.log(`[Terminal] Max sessions limit updated to ${limit}`);
+ }
+ }
+
/**
* Create a new terminal session
+ * Returns null if the maximum session limit has been reached
*/
- createSession(options: TerminalOptions = {}): TerminalSession {
- const id = `term-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
+ createSession(options: TerminalOptions = {}): TerminalSession | null {
+ // Check session limit
+ if (this.sessions.size >= maxSessions) {
+ console.error(`[Terminal] Max sessions (${maxSessions}) reached, refusing new session`);
+ return null;
+ }
+
+ const id = `term-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
const { shell: detectedShell, args: shellArgs } = this.detectShell();
const shell = options.shell || detectedShell;
@@ -188,18 +244,22 @@ export class TerminalService extends EventEmitter {
const cwd = this.resolveWorkingDirectory(options.cwd);
// Build environment with some useful defaults
+ // These settings ensure consistent terminal behavior across platforms
const env: Record = {
...process.env,
- TERM: "xterm-256color",
- COLORTERM: "truecolor",
- TERM_PROGRAM: "automaker-terminal",
+ TERM: 'xterm-256color',
+ COLORTERM: 'truecolor',
+ TERM_PROGRAM: 'automaker-terminal',
+ // Ensure proper locale for character handling
+ LANG: process.env.LANG || 'en_US.UTF-8',
+ LC_ALL: process.env.LC_ALL || process.env.LANG || 'en_US.UTF-8',
...options.env,
};
console.log(`[Terminal] Creating session ${id} with shell: ${shell} in ${cwd}`);
const ptyProcess = pty.spawn(shell, shellArgs, {
- name: "xterm-256color",
+ name: 'xterm-256color',
cols: options.cols || 80,
rows: options.rows || 24,
cwd,
@@ -212,8 +272,8 @@ export class TerminalService extends EventEmitter {
cwd,
createdAt: new Date(),
shell,
- scrollbackBuffer: "",
- outputBuffer: "",
+ scrollbackBuffer: '',
+ outputBuffer: '',
flushTimeout: null,
resizeInProgress: false,
resizeDebounceTimeout: null,
@@ -233,12 +293,12 @@ export class TerminalService extends EventEmitter {
// Schedule another flush for remaining data
session.flushTimeout = setTimeout(flushOutput, OUTPUT_THROTTLE_MS);
} else {
- session.outputBuffer = "";
+ session.outputBuffer = '';
session.flushTimeout = null;
}
this.dataCallbacks.forEach((cb) => cb(id, dataToSend));
- this.emit("data", id, dataToSend);
+ this.emit('data', id, dataToSend);
};
// Forward data events with throttling
@@ -271,7 +331,7 @@ export class TerminalService extends EventEmitter {
console.log(`[Terminal] Session ${id} exited with code ${exitCode}`);
this.sessions.delete(id);
this.exitCallbacks.forEach((cb) => cb(id, exitCode));
- this.emit("exit", id, exitCode);
+ this.emit('exit', id, exitCode);
});
console.log(`[Terminal] Session ${id} created successfully`);
@@ -333,6 +393,7 @@ export class TerminalService extends EventEmitter {
/**
* Kill a terminal session
+ * Attempts graceful SIGTERM first, then SIGKILL after 1 second if still alive
*/
killSession(sessionId: string): boolean {
const session = this.sessions.get(sessionId);
@@ -350,12 +411,32 @@ export class TerminalService extends EventEmitter {
clearTimeout(session.resizeDebounceTimeout);
session.resizeDebounceTimeout = null;
}
- session.pty.kill();
- this.sessions.delete(sessionId);
- console.log(`[Terminal] Session ${sessionId} killed`);
+
+ // First try graceful SIGTERM to allow process cleanup
+ console.log(`[Terminal] Session ${sessionId} sending SIGTERM`);
+ session.pty.kill('SIGTERM');
+
+ // Schedule SIGKILL fallback if process doesn't exit gracefully
+ // The onExit handler will remove session from map when it actually exits
+ setTimeout(() => {
+ if (this.sessions.has(sessionId)) {
+ console.log(`[Terminal] Session ${sessionId} still alive after SIGTERM, sending SIGKILL`);
+ try {
+ session.pty.kill('SIGKILL');
+ } catch {
+ // Process may have already exited
+ }
+ // Force remove from map if still present
+ this.sessions.delete(sessionId);
+ }
+ }, 1000);
+
+ console.log(`[Terminal] Session ${sessionId} kill initiated`);
return true;
} catch (error) {
console.error(`[Terminal] Error killing session ${sessionId}:`, error);
+ // Still try to remove from map even if kill fails
+ this.sessions.delete(sessionId);
return false;
}
}
@@ -386,7 +467,7 @@ export class TerminalService extends EventEmitter {
// Clear any pending output that hasn't been flushed yet
// This data is already in scrollbackBuffer
- session.outputBuffer = "";
+ session.outputBuffer = '';
if (session.flushTimeout) {
clearTimeout(session.flushTimeout);
session.flushTimeout = null;
diff --git a/apps/server/src/types/settings.ts b/apps/server/src/types/settings.ts
index ef9f32d63..4b4fa3ac5 100644
--- a/apps/server/src/types/settings.ts
+++ b/apps/server/src/types/settings.ts
@@ -22,7 +22,7 @@ export type {
BoardBackgroundSettings,
WorktreeInfo,
ProjectSettings,
-} from "@automaker/types";
+} from '@automaker/types';
export {
DEFAULT_KEYBOARD_SHORTCUTS,
@@ -32,4 +32,4 @@ export {
SETTINGS_VERSION,
CREDENTIALS_VERSION,
PROJECT_SETTINGS_VERSION,
-} from "@automaker/types";
+} from '@automaker/types';
diff --git a/apps/server/tests/fixtures/images.ts b/apps/server/tests/fixtures/images.ts
index b14f4adfc..f7e768c63 100644
--- a/apps/server/tests/fixtures/images.ts
+++ b/apps/server/tests/fixtures/images.ts
@@ -4,11 +4,11 @@
// 1x1 transparent PNG base64 data
export const pngBase64Fixture =
- "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==";
+ 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==';
export const imageDataFixture = {
base64: pngBase64Fixture,
- mimeType: "image/png",
- filename: "test.png",
- originalPath: "/path/to/test.png",
+ mimeType: 'image/png',
+ filename: 'test.png',
+ originalPath: '/path/to/test.png',
};
diff --git a/apps/server/tests/fixtures/messages.ts b/apps/server/tests/fixtures/messages.ts
index 731131e1f..56eb75f79 100644
--- a/apps/server/tests/fixtures/messages.ts
+++ b/apps/server/tests/fixtures/messages.ts
@@ -2,38 +2,33 @@
* Message fixtures for testing providers and lib utilities
*/
-import type {
- ConversationMessage,
- ProviderMessage,
- ContentBlock,
-} from "@automaker/types";
+import type { ConversationMessage, ProviderMessage, ContentBlock } from '@automaker/types';
export const conversationHistoryFixture: ConversationMessage[] = [
{
- role: "user",
- content: "Hello, can you help me?",
+ role: 'user',
+ content: 'Hello, can you help me?',
},
{
- role: "assistant",
- content: "Of course! How can I assist you today?",
+ role: 'assistant',
+ content: 'Of course! How can I assist you today?',
},
{
- role: "user",
+ role: 'user',
content: [
- { type: "text", text: "What is in this image?" },
+ { type: 'text', text: 'What is in this image?' },
{
- type: "image",
- source: { type: "base64", media_type: "image/png", data: "base64data" },
+ type: 'image',
+ source: { type: 'base64', media_type: 'image/png', data: 'base64data' },
},
],
},
];
export const claudeProviderMessageFixture: ProviderMessage = {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "This is a test response" }],
+ role: 'assistant',
+ content: [{ type: 'text', text: 'This is a test response' }],
},
};
-
diff --git a/apps/server/tests/integration/helpers/git-test-repo.ts b/apps/server/tests/integration/helpers/git-test-repo.ts
index f307bbb16..4ec959264 100644
--- a/apps/server/tests/integration/helpers/git-test-repo.ts
+++ b/apps/server/tests/integration/helpers/git-test-repo.ts
@@ -1,11 +1,11 @@
/**
* Helper for creating test git repositories for integration tests
*/
-import { exec } from "child_process";
-import { promisify } from "util";
-import * as fs from "fs/promises";
-import * as path from "path";
-import * as os from "os";
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import * as fs from 'fs/promises';
+import * as path from 'path';
+import * as os from 'os';
const execAsync = promisify(exec);
@@ -18,36 +18,36 @@ export interface TestRepo {
* Create a temporary git repository for testing
*/
export async function createTestGitRepo(): Promise {
- const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "automaker-test-"));
+ const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-test-'));
// Initialize git repo
- await execAsync("git init", { cwd: tmpDir });
+ await execAsync('git init', { cwd: tmpDir });
await execAsync('git config user.email "test@example.com"', { cwd: tmpDir });
await execAsync('git config user.name "Test User"', { cwd: tmpDir });
// Create initial commit
- await fs.writeFile(path.join(tmpDir, "README.md"), "# Test Project\n");
- await execAsync("git add .", { cwd: tmpDir });
+ await fs.writeFile(path.join(tmpDir, 'README.md'), '# Test Project\n');
+ await execAsync('git add .', { cwd: tmpDir });
await execAsync('git commit -m "Initial commit"', { cwd: tmpDir });
// Create main branch explicitly
- await execAsync("git branch -M main", { cwd: tmpDir });
+ await execAsync('git branch -M main', { cwd: tmpDir });
return {
path: tmpDir,
cleanup: async () => {
try {
// Remove all worktrees first
- const { stdout } = await execAsync("git worktree list --porcelain", {
+ const { stdout } = await execAsync('git worktree list --porcelain', {
cwd: tmpDir,
- }).catch(() => ({ stdout: "" }));
+ }).catch(() => ({ stdout: '' }));
const worktrees = stdout
- .split("\n\n")
+ .split('\n\n')
.slice(1) // Skip main worktree
.map((block) => {
- const pathLine = block.split("\n").find((line) => line.startsWith("worktree "));
- return pathLine ? pathLine.replace("worktree ", "") : null;
+ const pathLine = block.split('\n').find((line) => line.startsWith('worktree '));
+ return pathLine ? pathLine.replace('worktree ', '') : null;
})
.filter(Boolean);
@@ -64,7 +64,7 @@ export async function createTestGitRepo(): Promise {
// Remove the repository
await fs.rm(tmpDir, { recursive: true, force: true });
} catch (error) {
- console.error("Failed to cleanup test repo:", error);
+ console.error('Failed to cleanup test repo:', error);
}
},
};
@@ -78,24 +78,21 @@ export async function createTestFeature(
featureId: string,
featureData: any
): Promise {
- const featuresDir = path.join(repoPath, ".automaker", "features");
+ const featuresDir = path.join(repoPath, '.automaker', 'features');
const featureDir = path.join(featuresDir, featureId);
await fs.mkdir(featureDir, { recursive: true });
- await fs.writeFile(
- path.join(featureDir, "feature.json"),
- JSON.stringify(featureData, null, 2)
- );
+ await fs.writeFile(path.join(featureDir, 'feature.json'), JSON.stringify(featureData, null, 2));
}
/**
* Get list of git branches
*/
export async function listBranches(repoPath: string): Promise {
- const { stdout } = await execAsync("git branch --list", { cwd: repoPath });
+ const { stdout } = await execAsync('git branch --list', { cwd: repoPath });
return stdout
- .split("\n")
- .map((line) => line.trim().replace(/^[*+]\s*/, ""))
+ .split('\n')
+ .map((line) => line.trim().replace(/^[*+]\s*/, ''))
.filter(Boolean);
}
@@ -104,16 +101,16 @@ export async function listBranches(repoPath: string): Promise {
*/
export async function listWorktrees(repoPath: string): Promise {
try {
- const { stdout } = await execAsync("git worktree list --porcelain", {
+ const { stdout } = await execAsync('git worktree list --porcelain', {
cwd: repoPath,
});
return stdout
- .split("\n\n")
+ .split('\n\n')
.slice(1) // Skip main worktree
.map((block) => {
- const pathLine = block.split("\n").find((line) => line.startsWith("worktree "));
- return pathLine ? pathLine.replace("worktree ", "") : null;
+ const pathLine = block.split('\n').find((line) => line.startsWith('worktree '));
+ return pathLine ? pathLine.replace('worktree ', '') : null;
})
.filter(Boolean) as string[];
} catch {
@@ -124,10 +121,7 @@ export async function listWorktrees(repoPath: string): Promise {
/**
* Check if a branch exists
*/
-export async function branchExists(
- repoPath: string,
- branchName: string
-): Promise {
+export async function branchExists(repoPath: string, branchName: string): Promise {
const branches = await listBranches(repoPath);
return branches.includes(branchName);
}
@@ -135,10 +129,7 @@ export async function branchExists(
/**
* Check if a worktree exists
*/
-export async function worktreeExists(
- repoPath: string,
- worktreePath: string
-): Promise {
+export async function worktreeExists(repoPath: string, worktreePath: string): Promise {
const worktrees = await listWorktrees(repoPath);
return worktrees.some((wt) => wt === worktreePath);
}
diff --git a/apps/server/tests/integration/routes/worktree/create.integration.test.ts b/apps/server/tests/integration/routes/worktree/create.integration.test.ts
index 03b85e7ec..433b610ab 100644
--- a/apps/server/tests/integration/routes/worktree/create.integration.test.ts
+++ b/apps/server/tests/integration/routes/worktree/create.integration.test.ts
@@ -1,22 +1,20 @@
-import { describe, it, expect, vi, afterEach } from "vitest";
-import { createCreateHandler } from "@/routes/worktree/routes/create.js";
-import { AUTOMAKER_INITIAL_COMMIT_MESSAGE } from "@/routes/worktree/common.js";
-import { exec } from "child_process";
-import { promisify } from "util";
-import * as fs from "fs/promises";
-import * as os from "os";
-import * as path from "path";
+import { describe, it, expect, vi, afterEach } from 'vitest';
+import { createCreateHandler } from '@/routes/worktree/routes/create.js';
+import { AUTOMAKER_INITIAL_COMMIT_MESSAGE } from '@/routes/worktree/common.js';
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import * as fs from 'fs/promises';
+import * as os from 'os';
+import * as path from 'path';
const execAsync = promisify(exec);
-describe("worktree create route - repositories without commits", () => {
+describe('worktree create route - repositories without commits', () => {
let repoPath: string | null = null;
async function initRepoWithoutCommit() {
- repoPath = await fs.mkdtemp(
- path.join(os.tmpdir(), "automaker-no-commit-")
- );
- await execAsync("git init", { cwd: repoPath });
+ repoPath = await fs.mkdtemp(path.join(os.tmpdir(), 'automaker-no-commit-'));
+ await execAsync('git init', { cwd: repoPath });
await execAsync('git config user.email "test@example.com"', {
cwd: repoPath,
});
@@ -32,14 +30,14 @@ describe("worktree create route - repositories without commits", () => {
repoPath = null;
});
- it("creates an initial commit before adding a worktree when HEAD is missing", async () => {
+ it('creates an initial commit before adding a worktree when HEAD is missing', async () => {
await initRepoWithoutCommit();
const handler = createCreateHandler();
const json = vi.fn();
const status = vi.fn().mockReturnThis();
const req = {
- body: { projectPath: repoPath, branchName: "feature/no-head" },
+ body: { projectPath: repoPath, branchName: 'feature/no-head' },
} as any;
const res = {
json,
@@ -53,17 +51,12 @@ describe("worktree create route - repositories without commits", () => {
const payload = json.mock.calls[0][0];
expect(payload.success).toBe(true);
- const { stdout: commitCount } = await execAsync(
- "git rev-list --count HEAD",
- { cwd: repoPath! }
- );
+ const { stdout: commitCount } = await execAsync('git rev-list --count HEAD', {
+ cwd: repoPath!,
+ });
expect(Number(commitCount.trim())).toBeGreaterThan(0);
- const { stdout: latestMessage } = await execAsync(
- "git log -1 --pretty=%B",
- { cwd: repoPath! }
- );
+ const { stdout: latestMessage } = await execAsync('git log -1 --pretty=%B', { cwd: repoPath! });
expect(latestMessage.trim()).toBe(AUTOMAKER_INITIAL_COMMIT_MESSAGE);
});
});
-
diff --git a/apps/server/tests/integration/services/auto-mode-service.integration.test.ts b/apps/server/tests/integration/services/auto-mode-service.integration.test.ts
index ebf0857fd..d9d6ee132 100644
--- a/apps/server/tests/integration/services/auto-mode-service.integration.test.ts
+++ b/apps/server/tests/integration/services/auto-mode-service.integration.test.ts
@@ -1,7 +1,7 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { AutoModeService } from "@/services/auto-mode-service.js";
-import { ProviderFactory } from "@/providers/provider-factory.js";
-import { FeatureLoader } from "@/services/feature-loader.js";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import { AutoModeService } from '@/services/auto-mode-service.js';
+import { ProviderFactory } from '@/providers/provider-factory.js';
+import { FeatureLoader } from '@/services/feature-loader.js';
import {
createTestGitRepo,
createTestFeature,
@@ -10,17 +10,17 @@ import {
branchExists,
worktreeExists,
type TestRepo,
-} from "../helpers/git-test-repo.js";
-import * as fs from "fs/promises";
-import * as path from "path";
-import { exec } from "child_process";
-import { promisify } from "util";
+} from '../helpers/git-test-repo.js';
+import * as fs from 'fs/promises';
+import * as path from 'path';
+import { exec } from 'child_process';
+import { promisify } from 'util';
const execAsync = promisify(exec);
-vi.mock("@/providers/provider-factory.js");
+vi.mock('@/providers/provider-factory.js');
-describe("auto-mode-service.ts (integration)", () => {
+describe('auto-mode-service.ts (integration)', () => {
let service: AutoModeService;
let testRepo: TestRepo;
let featureLoader: FeatureLoader;
@@ -46,22 +46,22 @@ describe("auto-mode-service.ts (integration)", () => {
}
});
- describe("worktree operations", () => {
- it("should use existing git worktree for feature", async () => {
- const branchName = "feature/test-feature-1";
-
+ describe('worktree operations', () => {
+ it('should use existing git worktree for feature', async () => {
+ const branchName = 'feature/test-feature-1';
+
// Create a test feature with branchName set
- await createTestFeature(testRepo.path, "test-feature-1", {
- id: "test-feature-1",
- category: "test",
- description: "Test feature",
- status: "pending",
+ await createTestFeature(testRepo.path, 'test-feature-1', {
+ id: 'test-feature-1',
+ category: 'test',
+ description: 'Test feature',
+ status: 'pending',
branchName: branchName,
});
// Create worktree before executing (worktrees are now created when features are added/edited)
- const worktreesDir = path.join(testRepo.path, ".worktrees");
- const worktreePath = path.join(worktreesDir, "test-feature-1");
+ const worktreesDir = path.join(testRepo.path, '.worktrees');
+ const worktreePath = path.join(worktreesDir, 'test-feature-1');
await fs.mkdir(worktreesDir, { recursive: true });
await execAsync(`git worktree add -b ${branchName} "${worktreePath}" HEAD`, {
cwd: testRepo.path,
@@ -69,30 +69,28 @@ describe("auto-mode-service.ts (integration)", () => {
// Mock provider to complete quickly
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "Feature implemented" }],
+ role: 'assistant',
+ content: [{ type: 'text', text: 'Feature implemented' }],
},
};
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Execute feature with worktrees enabled
await service.executeFeature(
testRepo.path,
- "test-feature-1",
+ 'test-feature-1',
true, // useWorktrees
false // isAutoMode
);
@@ -107,8 +105,8 @@ describe("auto-mode-service.ts (integration)", () => {
const worktrees = await listWorktrees(testRepo.path);
expect(worktrees.length).toBeGreaterThan(0);
// Verify that at least one worktree path contains our feature ID
- const worktreePathsMatch = worktrees.some(wt =>
- wt.includes("test-feature-1") || wt.includes(".worktrees")
+ const worktreePathsMatch = worktrees.some(
+ (wt) => wt.includes('test-feature-1') || wt.includes('.worktrees')
);
expect(worktreePathsMatch).toBe(true);
@@ -116,243 +114,200 @@ describe("auto-mode-service.ts (integration)", () => {
// This is expected behavior - manual cleanup is required
}, 30000);
- it("should handle error gracefully", async () => {
- await createTestFeature(testRepo.path, "test-feature-error", {
- id: "test-feature-error",
- category: "test",
- description: "Test feature that errors",
- status: "pending",
+ it('should handle error gracefully', async () => {
+ await createTestFeature(testRepo.path, 'test-feature-error', {
+ id: 'test-feature-error',
+ category: 'test',
+ description: 'Test feature that errors',
+ status: 'pending',
});
// Mock provider that throws error
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
- throw new Error("Provider error");
+ throw new Error('Provider error');
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Execute feature (should handle error)
- await service.executeFeature(
- testRepo.path,
- "test-feature-error",
- true,
- false
- );
+ await service.executeFeature(testRepo.path, 'test-feature-error', true, false);
// Verify feature status was updated to backlog (error status)
- const feature = await featureLoader.get(
- testRepo.path,
- "test-feature-error"
- );
- expect(feature?.status).toBe("backlog");
+ const feature = await featureLoader.get(testRepo.path, 'test-feature-error');
+ expect(feature?.status).toBe('backlog');
}, 30000);
- it("should work without worktrees", async () => {
- await createTestFeature(testRepo.path, "test-no-worktree", {
- id: "test-no-worktree",
- category: "test",
- description: "Test without worktree",
- status: "pending",
+ it('should work without worktrees', async () => {
+ await createTestFeature(testRepo.path, 'test-no-worktree', {
+ id: 'test-no-worktree',
+ category: 'test',
+ description: 'Test without worktree',
+ status: 'pending',
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Execute without worktrees
await service.executeFeature(
testRepo.path,
- "test-no-worktree",
+ 'test-no-worktree',
false, // useWorktrees = false
false
);
// Feature should be updated successfully
- const feature = await featureLoader.get(
- testRepo.path,
- "test-no-worktree"
- );
- expect(feature?.status).toBe("waiting_approval");
+ const feature = await featureLoader.get(testRepo.path, 'test-no-worktree');
+ expect(feature?.status).toBe('waiting_approval');
}, 30000);
});
- describe("feature execution", () => {
- it("should execute feature and update status", async () => {
- await createTestFeature(testRepo.path, "feature-exec-1", {
- id: "feature-exec-1",
- category: "ui",
- description: "Execute this feature",
- status: "pending",
+ describe('feature execution', () => {
+ it('should execute feature and update status', async () => {
+ await createTestFeature(testRepo.path, 'feature-exec-1', {
+ id: 'feature-exec-1',
+ category: 'ui',
+ description: 'Execute this feature',
+ status: 'pending',
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "Implemented the feature" }],
+ role: 'assistant',
+ content: [{ type: 'text', text: 'Implemented the feature' }],
},
};
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
await service.executeFeature(
testRepo.path,
- "feature-exec-1",
+ 'feature-exec-1',
false, // Don't use worktrees so agent output is saved to main project
false
);
// Check feature status was updated
- const feature = await featureLoader.get(testRepo.path, "feature-exec-1");
- expect(feature?.status).toBe("waiting_approval");
+ const feature = await featureLoader.get(testRepo.path, 'feature-exec-1');
+ expect(feature?.status).toBe('waiting_approval');
// Check agent output was saved
- const agentOutput = await featureLoader.getAgentOutput(
- testRepo.path,
- "feature-exec-1"
- );
+ const agentOutput = await featureLoader.getAgentOutput(testRepo.path, 'feature-exec-1');
expect(agentOutput).toBeTruthy();
- expect(agentOutput).toContain("Implemented the feature");
+ expect(agentOutput).toContain('Implemented the feature');
}, 30000);
- it("should handle feature not found", async () => {
+ it('should handle feature not found', async () => {
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Try to execute non-existent feature
- await service.executeFeature(
- testRepo.path,
- "nonexistent-feature",
- true,
- false
- );
+ await service.executeFeature(testRepo.path, 'nonexistent-feature', true, false);
// Should emit error event
expect(mockEvents.emit).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
- featureId: "nonexistent-feature",
- error: expect.stringContaining("not found"),
+ featureId: 'nonexistent-feature',
+ error: expect.stringContaining('not found'),
})
);
}, 30000);
- it("should prevent duplicate feature execution", async () => {
- await createTestFeature(testRepo.path, "feature-dup", {
- id: "feature-dup",
- category: "test",
- description: "Duplicate test",
- status: "pending",
+ it('should prevent duplicate feature execution', async () => {
+ await createTestFeature(testRepo.path, 'feature-dup', {
+ id: 'feature-dup',
+ category: 'test',
+ description: 'Duplicate test',
+ status: 'pending',
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
// Simulate slow execution
await new Promise((resolve) => setTimeout(resolve, 500));
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Start first execution
- const promise1 = service.executeFeature(
- testRepo.path,
- "feature-dup",
- false,
- false
- );
+ const promise1 = service.executeFeature(testRepo.path, 'feature-dup', false, false);
// Try to start second execution (should throw)
await expect(
- service.executeFeature(testRepo.path, "feature-dup", false, false)
- ).rejects.toThrow("already running");
+ service.executeFeature(testRepo.path, 'feature-dup', false, false)
+ ).rejects.toThrow('already running');
await promise1;
}, 30000);
- it("should use feature-specific model", async () => {
- await createTestFeature(testRepo.path, "feature-model", {
- id: "feature-model",
- category: "test",
- description: "Model test",
- status: "pending",
- model: "claude-sonnet-4-20250514",
+ it('should use feature-specific model', async () => {
+ await createTestFeature(testRepo.path, 'feature-model', {
+ id: 'feature-model',
+ category: 'test',
+ description: 'Model test',
+ status: 'pending',
+ model: 'claude-sonnet-4-20250514',
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
- await service.executeFeature(
- testRepo.path,
- "feature-model",
- false,
- false
- );
+ await service.executeFeature(testRepo.path, 'feature-model', false, false);
// Should have used claude-sonnet-4-20250514
- expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith(
- "claude-sonnet-4-20250514"
- );
+ expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith('claude-sonnet-4-20250514');
}, 30000);
});
- describe("auto loop", () => {
- it("should start and stop auto loop", async () => {
+ describe('auto loop', () => {
+ it('should start and stop auto loop', async () => {
const startPromise = service.startAutoLoop(testRepo.path, 2);
// Give it time to start
@@ -365,35 +320,33 @@ describe("auto-mode-service.ts (integration)", () => {
await startPromise.catch(() => {}); // Cleanup
}, 10000);
- it("should process pending features in auto loop", async () => {
+ it('should process pending features in auto loop', async () => {
// Create multiple pending features
- await createTestFeature(testRepo.path, "auto-1", {
- id: "auto-1",
- category: "test",
- description: "Auto feature 1",
- status: "pending",
+ await createTestFeature(testRepo.path, 'auto-1', {
+ id: 'auto-1',
+ category: 'test',
+ description: 'Auto feature 1',
+ status: 'pending',
});
- await createTestFeature(testRepo.path, "auto-2", {
- id: "auto-2",
- category: "test",
- description: "Auto feature 2",
- status: "pending",
+ await createTestFeature(testRepo.path, 'auto-2', {
+ id: 'auto-2',
+ category: 'test',
+ description: 'Auto feature 2',
+ status: 'pending',
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Start auto loop
const startPromise = service.startAutoLoop(testRepo.path, 2);
@@ -406,25 +359,25 @@ describe("auto-mode-service.ts (integration)", () => {
await startPromise.catch(() => {});
// Check that features were updated
- const feature1 = await featureLoader.get(testRepo.path, "auto-1");
- const feature2 = await featureLoader.get(testRepo.path, "auto-2");
+ const feature1 = await featureLoader.get(testRepo.path, 'auto-1');
+ const feature2 = await featureLoader.get(testRepo.path, 'auto-2');
// At least one should have been processed
const processedCount = [feature1, feature2].filter(
- (f) => f?.status === "waiting_approval" || f?.status === "in_progress"
+ (f) => f?.status === 'waiting_approval' || f?.status === 'in_progress'
).length;
expect(processedCount).toBeGreaterThan(0);
}, 15000);
- it("should respect max concurrency", async () => {
+ it('should respect max concurrency', async () => {
// Create 5 features
for (let i = 1; i <= 5; i++) {
await createTestFeature(testRepo.path, `concurrent-${i}`, {
id: `concurrent-${i}`,
- category: "test",
+ category: 'test',
description: `Concurrent feature ${i}`,
- status: "pending",
+ status: 'pending',
});
}
@@ -432,7 +385,7 @@ describe("auto-mode-service.ts (integration)", () => {
let maxConcurrent = 0;
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
concurrentCount++;
maxConcurrent = Math.max(maxConcurrent, concurrentCount);
@@ -443,15 +396,13 @@ describe("auto-mode-service.ts (integration)", () => {
concurrentCount--;
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Start with max concurrency of 2
const startPromise = service.startAutoLoop(testRepo.path, 2);
@@ -466,7 +417,7 @@ describe("auto-mode-service.ts (integration)", () => {
expect(maxConcurrent).toBeLessThanOrEqual(2);
}, 15000);
- it("should emit auto mode events", async () => {
+ it('should emit auto mode events', async () => {
const startPromise = service.startAutoLoop(testRepo.path, 1);
// Wait for start event
@@ -474,7 +425,7 @@ describe("auto-mode-service.ts (integration)", () => {
// Check start event was emitted
const startEvent = mockEvents.emit.mock.calls.find((call) =>
- call[1]?.message?.includes("Auto mode started")
+ call[1]?.message?.includes('Auto mode started')
);
expect(startEvent).toBeTruthy();
@@ -484,74 +435,69 @@ describe("auto-mode-service.ts (integration)", () => {
// Check stop event was emitted (emitted immediately by stopAutoLoop)
const stopEvent = mockEvents.emit.mock.calls.find(
(call) =>
- call[1]?.type === "auto_mode_stopped" ||
- call[1]?.message?.includes("Auto mode stopped")
+ call[1]?.type === 'auto_mode_stopped' || call[1]?.message?.includes('Auto mode stopped')
);
expect(stopEvent).toBeTruthy();
}, 10000);
});
- describe("error handling", () => {
- it("should handle provider errors gracefully", async () => {
- await createTestFeature(testRepo.path, "error-feature", {
- id: "error-feature",
- category: "test",
- description: "Error test",
- status: "pending",
+ describe('error handling', () => {
+ it('should handle provider errors gracefully', async () => {
+ await createTestFeature(testRepo.path, 'error-feature', {
+ id: 'error-feature',
+ category: 'test',
+ description: 'Error test',
+ status: 'pending',
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
- throw new Error("Provider execution failed");
+ throw new Error('Provider execution failed');
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
// Should not throw
- await service.executeFeature(testRepo.path, "error-feature", true, false);
+ await service.executeFeature(testRepo.path, 'error-feature', true, false);
// Feature should be marked as backlog (error status)
- const feature = await featureLoader.get(testRepo.path, "error-feature");
- expect(feature?.status).toBe("backlog");
+ const feature = await featureLoader.get(testRepo.path, 'error-feature');
+ expect(feature?.status).toBe('backlog');
}, 30000);
- it("should continue auto loop after feature error", async () => {
- await createTestFeature(testRepo.path, "fail-1", {
- id: "fail-1",
- category: "test",
- description: "Will fail",
- status: "pending",
+ it('should continue auto loop after feature error', async () => {
+ await createTestFeature(testRepo.path, 'fail-1', {
+ id: 'fail-1',
+ category: 'test',
+ description: 'Will fail',
+ status: 'pending',
});
- await createTestFeature(testRepo.path, "success-1", {
- id: "success-1",
- category: "test",
- description: "Will succeed",
- status: "pending",
+ await createTestFeature(testRepo.path, 'success-1', {
+ id: 'success-1',
+ category: 'test',
+ description: 'Will succeed',
+ status: 'pending',
});
let callCount = 0;
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
callCount++;
if (callCount === 1) {
- throw new Error("First feature fails");
+ throw new Error('First feature fails');
}
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
const startPromise = service.startAutoLoop(testRepo.path, 1);
@@ -566,200 +512,177 @@ describe("auto-mode-service.ts (integration)", () => {
}, 15000);
});
- describe("planning mode", () => {
- it("should execute feature with skip planning mode", async () => {
- await createTestFeature(testRepo.path, "skip-plan-feature", {
- id: "skip-plan-feature",
- category: "test",
- description: "Feature with skip planning",
- status: "pending",
- planningMode: "skip",
+ describe('planning mode', () => {
+ it('should execute feature with skip planning mode', async () => {
+ await createTestFeature(testRepo.path, 'skip-plan-feature', {
+ id: 'skip-plan-feature',
+ category: 'test',
+ description: 'Feature with skip planning',
+ status: 'pending',
+ planningMode: 'skip',
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "Feature implemented" }],
+ role: 'assistant',
+ content: [{ type: 'text', text: 'Feature implemented' }],
},
};
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
- await service.executeFeature(
- testRepo.path,
- "skip-plan-feature",
- false,
- false
- );
+ await service.executeFeature(testRepo.path, 'skip-plan-feature', false, false);
- const feature = await featureLoader.get(testRepo.path, "skip-plan-feature");
- expect(feature?.status).toBe("waiting_approval");
+ const feature = await featureLoader.get(testRepo.path, 'skip-plan-feature');
+ expect(feature?.status).toBe('waiting_approval');
}, 30000);
- it("should execute feature with lite planning mode without approval", async () => {
- await createTestFeature(testRepo.path, "lite-plan-feature", {
- id: "lite-plan-feature",
- category: "test",
- description: "Feature with lite planning",
- status: "pending",
- planningMode: "lite",
+ it('should execute feature with lite planning mode without approval', async () => {
+ await createTestFeature(testRepo.path, 'lite-plan-feature', {
+ id: 'lite-plan-feature',
+ category: 'test',
+ description: 'Feature with lite planning',
+ status: 'pending',
+ planningMode: 'lite',
requirePlanApproval: false,
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "[PLAN_GENERATED] Planning outline complete.\n\nFeature implemented" }],
+ role: 'assistant',
+ content: [
+ {
+ type: 'text',
+ text: '[PLAN_GENERATED] Planning outline complete.\n\nFeature implemented',
+ },
+ ],
},
};
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
- await service.executeFeature(
- testRepo.path,
- "lite-plan-feature",
- false,
- false
- );
+ await service.executeFeature(testRepo.path, 'lite-plan-feature', false, false);
- const feature = await featureLoader.get(testRepo.path, "lite-plan-feature");
- expect(feature?.status).toBe("waiting_approval");
+ const feature = await featureLoader.get(testRepo.path, 'lite-plan-feature');
+ expect(feature?.status).toBe('waiting_approval');
}, 30000);
- it("should emit planning_started event for spec mode", async () => {
- await createTestFeature(testRepo.path, "spec-plan-feature", {
- id: "spec-plan-feature",
- category: "test",
- description: "Feature with spec planning",
- status: "pending",
- planningMode: "spec",
+ it('should emit planning_started event for spec mode', async () => {
+ await createTestFeature(testRepo.path, 'spec-plan-feature', {
+ id: 'spec-plan-feature',
+ category: 'test',
+ description: 'Feature with spec planning',
+ status: 'pending',
+ planningMode: 'spec',
requirePlanApproval: false,
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "Spec generated\n\n[SPEC_GENERATED] Review the spec." }],
+ role: 'assistant',
+ content: [
+ { type: 'text', text: 'Spec generated\n\n[SPEC_GENERATED] Review the spec.' },
+ ],
},
};
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
- await service.executeFeature(
- testRepo.path,
- "spec-plan-feature",
- false,
- false
- );
+ await service.executeFeature(testRepo.path, 'spec-plan-feature', false, false);
// Check planning_started event was emitted
- const planningEvent = mockEvents.emit.mock.calls.find(
- (call) => call[1]?.mode === "spec"
- );
+ const planningEvent = mockEvents.emit.mock.calls.find((call) => call[1]?.mode === 'spec');
expect(planningEvent).toBeTruthy();
}, 30000);
- it("should handle feature with full planning mode", async () => {
- await createTestFeature(testRepo.path, "full-plan-feature", {
- id: "full-plan-feature",
- category: "test",
- description: "Feature with full planning",
- status: "pending",
- planningMode: "full",
+ it('should handle feature with full planning mode', async () => {
+ await createTestFeature(testRepo.path, 'full-plan-feature', {
+ id: 'full-plan-feature',
+ category: 'test',
+ description: 'Feature with full planning',
+ status: 'pending',
+ planningMode: 'full',
requirePlanApproval: false,
});
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "Full spec with phases\n\n[SPEC_GENERATED] Review." }],
+ role: 'assistant',
+ content: [
+ { type: 'text', text: 'Full spec with phases\n\n[SPEC_GENERATED] Review.' },
+ ],
},
};
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
- await service.executeFeature(
- testRepo.path,
- "full-plan-feature",
- false,
- false
- );
+ await service.executeFeature(testRepo.path, 'full-plan-feature', false, false);
// Check planning_started event was emitted with full mode
- const planningEvent = mockEvents.emit.mock.calls.find(
- (call) => call[1]?.mode === "full"
- );
+ const planningEvent = mockEvents.emit.mock.calls.find((call) => call[1]?.mode === 'full');
expect(planningEvent).toBeTruthy();
}, 30000);
- it("should track pending approval correctly", async () => {
+ it('should track pending approval correctly', async () => {
// Initially no pending approvals
- expect(service.hasPendingApproval("non-existent")).toBe(false);
+ expect(service.hasPendingApproval('non-existent')).toBe(false);
});
- it("should cancel pending approval gracefully", () => {
+ it('should cancel pending approval gracefully', () => {
// Should not throw when cancelling non-existent approval
- expect(() => service.cancelPlanApproval("non-existent")).not.toThrow();
+ expect(() => service.cancelPlanApproval('non-existent')).not.toThrow();
});
- it("should resolve approval with error for non-existent feature", async () => {
+ it('should resolve approval with error for non-existent feature', async () => {
const result = await service.resolvePlanApproval(
- "non-existent",
+ 'non-existent',
true,
undefined,
undefined,
undefined
);
expect(result.success).toBe(false);
- expect(result.error).toContain("No pending approval");
+ expect(result.error).toContain('No pending approval');
});
});
});
diff --git a/apps/server/tests/setup.ts b/apps/server/tests/setup.ts
index 2b00c6144..15ecc9dca 100644
--- a/apps/server/tests/setup.ts
+++ b/apps/server/tests/setup.ts
@@ -3,11 +3,11 @@
* Runs before each test file
*/
-import { vi, beforeEach } from "vitest";
+import { vi, beforeEach } from 'vitest';
// Set test environment variables
-process.env.NODE_ENV = "test";
-process.env.DATA_DIR = "/tmp/test-data";
+process.env.NODE_ENV = 'test';
+process.env.DATA_DIR = '/tmp/test-data';
// Reset all mocks before each test
beforeEach(() => {
diff --git a/apps/server/tests/unit/lib/app-spec-format.test.ts b/apps/server/tests/unit/lib/app-spec-format.test.ts
index 43eb5359f..eef788140 100644
--- a/apps/server/tests/unit/lib/app-spec-format.test.ts
+++ b/apps/server/tests/unit/lib/app-spec-format.test.ts
@@ -1,143 +1,137 @@
-import { describe, it, expect } from "vitest";
+import { describe, it, expect } from 'vitest';
import {
specToXml,
getStructuredSpecPromptInstruction,
getAppSpecFormatInstruction,
APP_SPEC_XML_FORMAT,
type SpecOutput,
-} from "@/lib/app-spec-format.js";
+} from '@/lib/app-spec-format.js';
-describe("app-spec-format.ts", () => {
- describe("specToXml", () => {
- it("should convert minimal spec to XML", () => {
+describe('app-spec-format.ts', () => {
+ describe('specToXml', () => {
+ it('should convert minimal spec to XML', () => {
const spec: SpecOutput = {
- project_name: "Test Project",
- overview: "A test project",
- technology_stack: ["TypeScript", "Node.js"],
- core_capabilities: ["Testing", "Development"],
- implemented_features: [
- { name: "Feature 1", description: "First feature" },
- ],
+ project_name: 'Test Project',
+ overview: 'A test project',
+ technology_stack: ['TypeScript', 'Node.js'],
+ core_capabilities: ['Testing', 'Development'],
+ implemented_features: [{ name: 'Feature 1', description: 'First feature' }],
};
const xml = specToXml(spec);
expect(xml).toContain('');
- expect(xml).toContain("");
- expect(xml).toContain("");
- expect(xml).toContain("Test Project");
- expect(xml).toContain("TypeScript");
- expect(xml).toContain("Testing");
+ expect(xml).toContain('');
+ expect(xml).toContain('');
+ expect(xml).toContain('Test Project');
+ expect(xml).toContain('TypeScript');
+ expect(xml).toContain('Testing');
});
- it("should escape XML special characters", () => {
+ it('should escape XML special characters', () => {
const spec: SpecOutput = {
- project_name: "Test & Project",
- overview: "Description with ",
- technology_stack: ["TypeScript"],
- core_capabilities: ["Cap"],
+ project_name: 'Test & Project',
+ overview: 'Description with ',
+ technology_stack: ['TypeScript'],
+ core_capabilities: ['Cap'],
implemented_features: [],
};
const xml = specToXml(spec);
- expect(xml).toContain("Test & Project");
- expect(xml).toContain("<tags>");
+ expect(xml).toContain('Test & Project');
+ expect(xml).toContain('<tags>');
});
- it("should include file_locations when provided", () => {
+ it('should include file_locations when provided', () => {
const spec: SpecOutput = {
- project_name: "Test",
- overview: "Test",
- technology_stack: ["TS"],
- core_capabilities: ["Cap"],
+ project_name: 'Test',
+ overview: 'Test',
+ technology_stack: ['TS'],
+ core_capabilities: ['Cap'],
implemented_features: [
{
- name: "Feature",
- description: "Desc",
- file_locations: ["src/index.ts"],
+ name: 'Feature',
+ description: 'Desc',
+ file_locations: ['src/index.ts'],
},
],
};
const xml = specToXml(spec);
- expect(xml).toContain("");
- expect(xml).toContain("src/index.ts");
+ expect(xml).toContain('');
+ expect(xml).toContain('src/index.ts');
});
- it("should not include file_locations when empty", () => {
+ it('should not include file_locations when empty', () => {
const spec: SpecOutput = {
- project_name: "Test",
- overview: "Test",
- technology_stack: ["TS"],
- core_capabilities: ["Cap"],
- implemented_features: [
- { name: "Feature", description: "Desc", file_locations: [] },
- ],
+ project_name: 'Test',
+ overview: 'Test',
+ technology_stack: ['TS'],
+ core_capabilities: ['Cap'],
+ implemented_features: [{ name: 'Feature', description: 'Desc', file_locations: [] }],
};
const xml = specToXml(spec);
- expect(xml).not.toContain("");
+ expect(xml).not.toContain('');
});
- it("should include additional_requirements when provided", () => {
+ it('should include additional_requirements when provided', () => {
const spec: SpecOutput = {
- project_name: "Test",
- overview: "Test",
- technology_stack: ["TS"],
- core_capabilities: ["Cap"],
+ project_name: 'Test',
+ overview: 'Test',
+ technology_stack: ['TS'],
+ core_capabilities: ['Cap'],
implemented_features: [],
- additional_requirements: ["Node.js 18+"],
+ additional_requirements: ['Node.js 18+'],
};
const xml = specToXml(spec);
- expect(xml).toContain("");
- expect(xml).toContain("Node.js 18+");
+ expect(xml).toContain('');
+ expect(xml).toContain('Node.js 18+');
});
- it("should include development_guidelines when provided", () => {
+ it('should include development_guidelines when provided', () => {
const spec: SpecOutput = {
- project_name: "Test",
- overview: "Test",
- technology_stack: ["TS"],
- core_capabilities: ["Cap"],
+ project_name: 'Test',
+ overview: 'Test',
+ technology_stack: ['TS'],
+ core_capabilities: ['Cap'],
implemented_features: [],
- development_guidelines: ["Use ESLint"],
+ development_guidelines: ['Use ESLint'],
};
const xml = specToXml(spec);
- expect(xml).toContain("");
- expect(xml).toContain("Use ESLint");
+ expect(xml).toContain('');
+ expect(xml).toContain('Use ESLint');
});
- it("should include implementation_roadmap when provided", () => {
+ it('should include implementation_roadmap when provided', () => {
const spec: SpecOutput = {
- project_name: "Test",
- overview: "Test",
- technology_stack: ["TS"],
- core_capabilities: ["Cap"],
+ project_name: 'Test',
+ overview: 'Test',
+ technology_stack: ['TS'],
+ core_capabilities: ['Cap'],
implemented_features: [],
- implementation_roadmap: [
- { phase: "Phase 1", status: "completed", description: "Setup" },
- ],
+ implementation_roadmap: [{ phase: 'Phase 1', status: 'completed', description: 'Setup' }],
};
const xml = specToXml(spec);
- expect(xml).toContain("");
- expect(xml).toContain("completed");
+ expect(xml).toContain('');
+ expect(xml).toContain('completed');
});
- it("should not include optional sections when empty", () => {
+ it('should not include optional sections when empty', () => {
const spec: SpecOutput = {
- project_name: "Test",
- overview: "Test",
- technology_stack: ["TS"],
- core_capabilities: ["Cap"],
+ project_name: 'Test',
+ overview: 'Test',
+ technology_stack: ['TS'],
+ core_capabilities: ['Cap'],
implemented_features: [],
additional_requirements: [],
development_guidelines: [],
@@ -146,44 +140,44 @@ describe("app-spec-format.ts", () => {
const xml = specToXml(spec);
- expect(xml).not.toContain("");
- expect(xml).not.toContain("");
- expect(xml).not.toContain("");
+ expect(xml).not.toContain('');
+ expect(xml).not.toContain('');
+ expect(xml).not.toContain('');
});
});
- describe("getStructuredSpecPromptInstruction", () => {
- it("should return non-empty prompt instruction", () => {
+ describe('getStructuredSpecPromptInstruction', () => {
+ it('should return non-empty prompt instruction', () => {
const instruction = getStructuredSpecPromptInstruction();
expect(instruction).toBeTruthy();
expect(instruction.length).toBeGreaterThan(100);
});
- it("should mention required fields", () => {
+ it('should mention required fields', () => {
const instruction = getStructuredSpecPromptInstruction();
- expect(instruction).toContain("project_name");
- expect(instruction).toContain("overview");
- expect(instruction).toContain("technology_stack");
+ expect(instruction).toContain('project_name');
+ expect(instruction).toContain('overview');
+ expect(instruction).toContain('technology_stack');
});
});
- describe("getAppSpecFormatInstruction", () => {
- it("should return non-empty format instruction", () => {
+ describe('getAppSpecFormatInstruction', () => {
+ it('should return non-empty format instruction', () => {
const instruction = getAppSpecFormatInstruction();
expect(instruction).toBeTruthy();
expect(instruction.length).toBeGreaterThan(100);
});
- it("should include critical formatting requirements", () => {
+ it('should include critical formatting requirements', () => {
const instruction = getAppSpecFormatInstruction();
- expect(instruction).toContain("CRITICAL FORMATTING REQUIREMENTS");
+ expect(instruction).toContain('CRITICAL FORMATTING REQUIREMENTS');
});
});
- describe("APP_SPEC_XML_FORMAT", () => {
- it("should contain valid XML template structure", () => {
- expect(APP_SPEC_XML_FORMAT).toContain("");
- expect(APP_SPEC_XML_FORMAT).toContain("");
+ describe('APP_SPEC_XML_FORMAT', () => {
+ it('should contain valid XML template structure', () => {
+ expect(APP_SPEC_XML_FORMAT).toContain('');
+ expect(APP_SPEC_XML_FORMAT).toContain('');
});
});
});
diff --git a/apps/server/tests/unit/lib/auth.test.ts b/apps/server/tests/unit/lib/auth.test.ts
index 97390bd3c..91c1c461d 100644
--- a/apps/server/tests/unit/lib/auth.test.ts
+++ b/apps/server/tests/unit/lib/auth.test.ts
@@ -1,20 +1,20 @@
-import { describe, it, expect, beforeEach, vi } from "vitest";
-import { createMockExpressContext } from "../../utils/mocks.js";
+import { describe, it, expect, beforeEach, vi } from 'vitest';
+import { createMockExpressContext } from '../../utils/mocks.js';
/**
* Note: auth.ts reads AUTOMAKER_API_KEY at module load time.
* We need to reset modules and reimport for each test to get fresh state.
*/
-describe("auth.ts", () => {
+describe('auth.ts', () => {
beforeEach(() => {
vi.resetModules();
});
- describe("authMiddleware - no API key", () => {
- it("should call next() when no API key is set", async () => {
+ describe('authMiddleware - no API key', () => {
+ it('should call next() when no API key is set', async () => {
delete process.env.AUTOMAKER_API_KEY;
- const { authMiddleware } = await import("@/lib/auth.js");
+ const { authMiddleware } = await import('@/lib/auth.js');
const { req, res, next } = createMockExpressContext();
authMiddleware(req, res, next);
@@ -24,11 +24,11 @@ describe("auth.ts", () => {
});
});
- describe("authMiddleware - with API key", () => {
- it("should reject request without API key header", async () => {
- process.env.AUTOMAKER_API_KEY = "test-secret-key";
+ describe('authMiddleware - with API key', () => {
+ it('should reject request without API key header', async () => {
+ process.env.AUTOMAKER_API_KEY = 'test-secret-key';
- const { authMiddleware } = await import("@/lib/auth.js");
+ const { authMiddleware } = await import('@/lib/auth.js');
const { req, res, next } = createMockExpressContext();
authMiddleware(req, res, next);
@@ -36,34 +36,34 @@ describe("auth.ts", () => {
expect(res.status).toHaveBeenCalledWith(401);
expect(res.json).toHaveBeenCalledWith({
success: false,
- error: "Authentication required. Provide X-API-Key header.",
+ error: 'Authentication required. Provide X-API-Key header.',
});
expect(next).not.toHaveBeenCalled();
});
- it("should reject request with invalid API key", async () => {
- process.env.AUTOMAKER_API_KEY = "test-secret-key";
+ it('should reject request with invalid API key', async () => {
+ process.env.AUTOMAKER_API_KEY = 'test-secret-key';
- const { authMiddleware } = await import("@/lib/auth.js");
+ const { authMiddleware } = await import('@/lib/auth.js');
const { req, res, next } = createMockExpressContext();
- req.headers["x-api-key"] = "wrong-key";
+ req.headers['x-api-key'] = 'wrong-key';
authMiddleware(req, res, next);
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
success: false,
- error: "Invalid API key.",
+ error: 'Invalid API key.',
});
expect(next).not.toHaveBeenCalled();
});
- it("should call next() with valid API key", async () => {
- process.env.AUTOMAKER_API_KEY = "test-secret-key";
+ it('should call next() with valid API key', async () => {
+ process.env.AUTOMAKER_API_KEY = 'test-secret-key';
- const { authMiddleware } = await import("@/lib/auth.js");
- const { req, res, next} = createMockExpressContext();
- req.headers["x-api-key"] = "test-secret-key";
+ const { authMiddleware } = await import('@/lib/auth.js');
+ const { req, res, next } = createMockExpressContext();
+ req.headers['x-api-key'] = 'test-secret-key';
authMiddleware(req, res, next);
@@ -72,44 +72,44 @@ describe("auth.ts", () => {
});
});
- describe("isAuthEnabled", () => {
- it("should return false when no API key is set", async () => {
+ describe('isAuthEnabled', () => {
+ it('should return false when no API key is set', async () => {
delete process.env.AUTOMAKER_API_KEY;
- const { isAuthEnabled } = await import("@/lib/auth.js");
+ const { isAuthEnabled } = await import('@/lib/auth.js');
expect(isAuthEnabled()).toBe(false);
});
- it("should return true when API key is set", async () => {
- process.env.AUTOMAKER_API_KEY = "test-key";
+ it('should return true when API key is set', async () => {
+ process.env.AUTOMAKER_API_KEY = 'test-key';
- const { isAuthEnabled } = await import("@/lib/auth.js");
+ const { isAuthEnabled } = await import('@/lib/auth.js');
expect(isAuthEnabled()).toBe(true);
});
});
- describe("getAuthStatus", () => {
- it("should return disabled status when no API key", async () => {
+ describe('getAuthStatus', () => {
+ it('should return disabled status when no API key', async () => {
delete process.env.AUTOMAKER_API_KEY;
- const { getAuthStatus } = await import("@/lib/auth.js");
+ const { getAuthStatus } = await import('@/lib/auth.js');
const status = getAuthStatus();
expect(status).toEqual({
enabled: false,
- method: "none",
+ method: 'none',
});
});
- it("should return enabled status when API key is set", async () => {
- process.env.AUTOMAKER_API_KEY = "test-key";
+ it('should return enabled status when API key is set', async () => {
+ process.env.AUTOMAKER_API_KEY = 'test-key';
- const { getAuthStatus } = await import("@/lib/auth.js");
+ const { getAuthStatus } = await import('@/lib/auth.js');
const status = getAuthStatus();
expect(status).toEqual({
enabled: true,
- method: "api_key",
+ method: 'api_key',
});
});
});
diff --git a/apps/server/tests/unit/lib/automaker-paths.test.ts b/apps/server/tests/unit/lib/automaker-paths.test.ts
index 15d2945c5..09042ca03 100644
--- a/apps/server/tests/unit/lib/automaker-paths.test.ts
+++ b/apps/server/tests/unit/lib/automaker-paths.test.ts
@@ -1,7 +1,7 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import path from "path";
-import fs from "fs/promises";
-import os from "os";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import path from 'path';
+import fs from 'fs/promises';
+import os from 'os';
import {
getAutomakerDir,
getFeaturesDir,
@@ -17,97 +17,85 @@ import {
getCredentialsPath,
getProjectSettingsPath,
ensureDataDir,
-} from "@automaker/platform";
+} from '@automaker/platform';
-describe("automaker-paths.ts", () => {
- const projectPath = path.join("/test", "project");
+describe('automaker-paths.ts', () => {
+ const projectPath = path.join('/test', 'project');
- describe("getAutomakerDir", () => {
- it("should return path to .automaker directory", () => {
- expect(getAutomakerDir(projectPath)).toBe(
- path.join(projectPath, ".automaker")
- );
+ describe('getAutomakerDir', () => {
+ it('should return path to .automaker directory', () => {
+ expect(getAutomakerDir(projectPath)).toBe(path.join(projectPath, '.automaker'));
});
- it("should handle paths with trailing slashes", () => {
- const pathWithSlash = path.join("/test", "project") + path.sep;
- expect(getAutomakerDir(pathWithSlash)).toBe(
- path.join(pathWithSlash, ".automaker")
- );
+ it('should handle paths with trailing slashes', () => {
+ const pathWithSlash = path.join('/test', 'project') + path.sep;
+ expect(getAutomakerDir(pathWithSlash)).toBe(path.join(pathWithSlash, '.automaker'));
});
});
- describe("getFeaturesDir", () => {
- it("should return path to features directory", () => {
- expect(getFeaturesDir(projectPath)).toBe(
- path.join(projectPath, ".automaker", "features")
- );
+ describe('getFeaturesDir', () => {
+ it('should return path to features directory', () => {
+ expect(getFeaturesDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'features'));
});
});
- describe("getFeatureDir", () => {
- it("should return path to specific feature directory", () => {
- expect(getFeatureDir(projectPath, "feature-123")).toBe(
- path.join(projectPath, ".automaker", "features", "feature-123")
+ describe('getFeatureDir', () => {
+ it('should return path to specific feature directory', () => {
+ expect(getFeatureDir(projectPath, 'feature-123')).toBe(
+ path.join(projectPath, '.automaker', 'features', 'feature-123')
);
});
- it("should handle feature IDs with special characters", () => {
- expect(getFeatureDir(projectPath, "my-feature_v2")).toBe(
- path.join(projectPath, ".automaker", "features", "my-feature_v2")
+ it('should handle feature IDs with special characters', () => {
+ expect(getFeatureDir(projectPath, 'my-feature_v2')).toBe(
+ path.join(projectPath, '.automaker', 'features', 'my-feature_v2')
);
});
});
- describe("getFeatureImagesDir", () => {
- it("should return path to feature images directory", () => {
- expect(getFeatureImagesDir(projectPath, "feature-123")).toBe(
- path.join(projectPath, ".automaker", "features", "feature-123", "images")
+ describe('getFeatureImagesDir', () => {
+ it('should return path to feature images directory', () => {
+ expect(getFeatureImagesDir(projectPath, 'feature-123')).toBe(
+ path.join(projectPath, '.automaker', 'features', 'feature-123', 'images')
);
});
});
- describe("getBoardDir", () => {
- it("should return path to board directory", () => {
- expect(getBoardDir(projectPath)).toBe(
- path.join(projectPath, ".automaker", "board")
- );
+ describe('getBoardDir', () => {
+ it('should return path to board directory', () => {
+ expect(getBoardDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'board'));
});
});
- describe("getImagesDir", () => {
- it("should return path to images directory", () => {
- expect(getImagesDir(projectPath)).toBe(
- path.join(projectPath, ".automaker", "images")
- );
+ describe('getImagesDir', () => {
+ it('should return path to images directory', () => {
+ expect(getImagesDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'images'));
});
});
- describe("getWorktreesDir", () => {
- it("should return path to worktrees directory", () => {
- expect(getWorktreesDir(projectPath)).toBe(
- path.join(projectPath, ".automaker", "worktrees")
- );
+ describe('getWorktreesDir', () => {
+ it('should return path to worktrees directory', () => {
+ expect(getWorktreesDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'worktrees'));
});
});
- describe("getAppSpecPath", () => {
- it("should return path to app_spec.txt file", () => {
+ describe('getAppSpecPath', () => {
+ it('should return path to app_spec.txt file', () => {
expect(getAppSpecPath(projectPath)).toBe(
- path.join(projectPath, ".automaker", "app_spec.txt")
+ path.join(projectPath, '.automaker', 'app_spec.txt')
);
});
});
- describe("getBranchTrackingPath", () => {
- it("should return path to active-branches.json file", () => {
+ describe('getBranchTrackingPath', () => {
+ it('should return path to active-branches.json file', () => {
expect(getBranchTrackingPath(projectPath)).toBe(
- path.join(projectPath, ".automaker", "active-branches.json")
+ path.join(projectPath, '.automaker', 'active-branches.json')
);
});
});
- describe("ensureAutomakerDir", () => {
+ describe('ensureAutomakerDir', () => {
let testDir: string;
beforeEach(async () => {
@@ -123,16 +111,16 @@ describe("automaker-paths.ts", () => {
}
});
- it("should create automaker directory and return path", async () => {
+ it('should create automaker directory and return path', async () => {
const result = await ensureAutomakerDir(testDir);
- expect(result).toBe(path.join(testDir, ".automaker"));
+ expect(result).toBe(path.join(testDir, '.automaker'));
const stats = await fs.stat(result);
expect(stats.isDirectory()).toBe(true);
});
- it("should succeed if directory already exists", async () => {
- const automakerDir = path.join(testDir, ".automaker");
+ it('should succeed if directory already exists', async () => {
+ const automakerDir = path.join(testDir, '.automaker');
await fs.mkdir(automakerDir, { recursive: true });
const result = await ensureAutomakerDir(testDir);
@@ -141,53 +129,49 @@ describe("automaker-paths.ts", () => {
});
});
- describe("getGlobalSettingsPath", () => {
- it("should return path to settings.json in data directory", () => {
- const dataDir = "/test/data";
+ describe('getGlobalSettingsPath', () => {
+ it('should return path to settings.json in data directory', () => {
+ const dataDir = '/test/data';
const result = getGlobalSettingsPath(dataDir);
- expect(result).toBe(path.join(dataDir, "settings.json"));
+ expect(result).toBe(path.join(dataDir, 'settings.json'));
});
- it("should handle paths with trailing slashes", () => {
- const dataDir = "/test/data" + path.sep;
+ it('should handle paths with trailing slashes', () => {
+ const dataDir = '/test/data' + path.sep;
const result = getGlobalSettingsPath(dataDir);
- expect(result).toBe(path.join(dataDir, "settings.json"));
+ expect(result).toBe(path.join(dataDir, 'settings.json'));
});
});
- describe("getCredentialsPath", () => {
- it("should return path to credentials.json in data directory", () => {
- const dataDir = "/test/data";
+ describe('getCredentialsPath', () => {
+ it('should return path to credentials.json in data directory', () => {
+ const dataDir = '/test/data';
const result = getCredentialsPath(dataDir);
- expect(result).toBe(path.join(dataDir, "credentials.json"));
+ expect(result).toBe(path.join(dataDir, 'credentials.json'));
});
- it("should handle paths with trailing slashes", () => {
- const dataDir = "/test/data" + path.sep;
+ it('should handle paths with trailing slashes', () => {
+ const dataDir = '/test/data' + path.sep;
const result = getCredentialsPath(dataDir);
- expect(result).toBe(path.join(dataDir, "credentials.json"));
+ expect(result).toBe(path.join(dataDir, 'credentials.json'));
});
});
- describe("getProjectSettingsPath", () => {
- it("should return path to settings.json in project .automaker directory", () => {
- const projectPath = "/test/project";
+ describe('getProjectSettingsPath', () => {
+ it('should return path to settings.json in project .automaker directory', () => {
+ const projectPath = '/test/project';
const result = getProjectSettingsPath(projectPath);
- expect(result).toBe(
- path.join(projectPath, ".automaker", "settings.json")
- );
+ expect(result).toBe(path.join(projectPath, '.automaker', 'settings.json'));
});
- it("should handle paths with trailing slashes", () => {
- const projectPath = "/test/project" + path.sep;
+ it('should handle paths with trailing slashes', () => {
+ const projectPath = '/test/project' + path.sep;
const result = getProjectSettingsPath(projectPath);
- expect(result).toBe(
- path.join(projectPath, ".automaker", "settings.json")
- );
+ expect(result).toBe(path.join(projectPath, '.automaker', 'settings.json'));
});
});
- describe("ensureDataDir", () => {
+ describe('ensureDataDir', () => {
let testDir: string;
beforeEach(async () => {
@@ -202,7 +186,7 @@ describe("automaker-paths.ts", () => {
}
});
- it("should create data directory and return path", async () => {
+ it('should create data directory and return path', async () => {
const result = await ensureDataDir(testDir);
expect(result).toBe(testDir);
@@ -210,7 +194,7 @@ describe("automaker-paths.ts", () => {
expect(stats.isDirectory()).toBe(true);
});
- it("should succeed if directory already exists", async () => {
+ it('should succeed if directory already exists', async () => {
await fs.mkdir(testDir, { recursive: true });
const result = await ensureDataDir(testDir);
@@ -218,8 +202,8 @@ describe("automaker-paths.ts", () => {
expect(result).toBe(testDir);
});
- it("should create nested directories", async () => {
- const nestedDir = path.join(testDir, "nested", "deep");
+ it('should create nested directories', async () => {
+ const nestedDir = path.join(testDir, 'nested', 'deep');
const result = await ensureDataDir(nestedDir);
expect(result).toBe(nestedDir);
diff --git a/apps/server/tests/unit/lib/conversation-utils.test.ts b/apps/server/tests/unit/lib/conversation-utils.test.ts
index 3fa85bf26..bb7c6684e 100644
--- a/apps/server/tests/unit/lib/conversation-utils.test.ts
+++ b/apps/server/tests/unit/lib/conversation-utils.test.ts
@@ -1,146 +1,146 @@
-import { describe, it, expect } from "vitest";
+import { describe, it, expect } from 'vitest';
import {
extractTextFromContent,
normalizeContentBlocks,
formatHistoryAsText,
convertHistoryToMessages,
-} from "@automaker/utils";
-import { conversationHistoryFixture } from "../../fixtures/messages.js";
+} from '@automaker/utils';
+import { conversationHistoryFixture } from '../../fixtures/messages.js';
-describe("conversation-utils.ts", () => {
- describe("extractTextFromContent", () => {
- it("should return string content as-is", () => {
- const result = extractTextFromContent("Hello world");
- expect(result).toBe("Hello world");
+describe('conversation-utils.ts', () => {
+ describe('extractTextFromContent', () => {
+ it('should return string content as-is', () => {
+ const result = extractTextFromContent('Hello world');
+ expect(result).toBe('Hello world');
});
- it("should extract text from single text block", () => {
- const content = [{ type: "text", text: "Hello" }];
+ it('should extract text from single text block', () => {
+ const content = [{ type: 'text', text: 'Hello' }];
const result = extractTextFromContent(content);
- expect(result).toBe("Hello");
+ expect(result).toBe('Hello');
});
- it("should extract and join multiple text blocks with newlines", () => {
+ it('should extract and join multiple text blocks with newlines', () => {
const content = [
- { type: "text", text: "First block" },
- { type: "text", text: "Second block" },
- { type: "text", text: "Third block" },
+ { type: 'text', text: 'First block' },
+ { type: 'text', text: 'Second block' },
+ { type: 'text', text: 'Third block' },
];
const result = extractTextFromContent(content);
- expect(result).toBe("First block\nSecond block\nThird block");
+ expect(result).toBe('First block\nSecond block\nThird block');
});
- it("should ignore non-text blocks", () => {
+ it('should ignore non-text blocks', () => {
const content = [
- { type: "text", text: "Text content" },
- { type: "image", source: { type: "base64", data: "abc" } },
- { type: "text", text: "More text" },
- { type: "tool_use", name: "bash", input: {} },
+ { type: 'text', text: 'Text content' },
+ { type: 'image', source: { type: 'base64', data: 'abc' } },
+ { type: 'text', text: 'More text' },
+ { type: 'tool_use', name: 'bash', input: {} },
];
const result = extractTextFromContent(content);
- expect(result).toBe("Text content\nMore text");
+ expect(result).toBe('Text content\nMore text');
});
- it("should handle blocks without text property", () => {
+ it('should handle blocks without text property', () => {
const content = [
- { type: "text", text: "Valid" },
- { type: "text" } as any,
- { type: "text", text: "Also valid" },
+ { type: 'text', text: 'Valid' },
+ { type: 'text' } as any,
+ { type: 'text', text: 'Also valid' },
];
const result = extractTextFromContent(content);
- expect(result).toBe("Valid\n\nAlso valid");
+ expect(result).toBe('Valid\n\nAlso valid');
});
- it("should handle empty array", () => {
+ it('should handle empty array', () => {
const result = extractTextFromContent([]);
- expect(result).toBe("");
+ expect(result).toBe('');
});
- it("should handle array with only non-text blocks", () => {
+ it('should handle array with only non-text blocks', () => {
const content = [
- { type: "image", source: {} },
- { type: "tool_use", name: "test" },
+ { type: 'image', source: {} },
+ { type: 'tool_use', name: 'test' },
];
const result = extractTextFromContent(content);
- expect(result).toBe("");
+ expect(result).toBe('');
});
});
- describe("normalizeContentBlocks", () => {
- it("should convert string to content block array", () => {
- const result = normalizeContentBlocks("Hello");
- expect(result).toEqual([{ type: "text", text: "Hello" }]);
+ describe('normalizeContentBlocks', () => {
+ it('should convert string to content block array', () => {
+ const result = normalizeContentBlocks('Hello');
+ expect(result).toEqual([{ type: 'text', text: 'Hello' }]);
});
- it("should return array content as-is", () => {
+ it('should return array content as-is', () => {
const content = [
- { type: "text", text: "Hello" },
- { type: "image", source: {} },
+ { type: 'text', text: 'Hello' },
+ { type: 'image', source: {} },
];
const result = normalizeContentBlocks(content);
expect(result).toBe(content);
expect(result).toHaveLength(2);
});
- it("should handle empty string", () => {
- const result = normalizeContentBlocks("");
- expect(result).toEqual([{ type: "text", text: "" }]);
+ it('should handle empty string', () => {
+ const result = normalizeContentBlocks('');
+ expect(result).toEqual([{ type: 'text', text: '' }]);
});
});
- describe("formatHistoryAsText", () => {
- it("should return empty string for empty history", () => {
+ describe('formatHistoryAsText', () => {
+ it('should return empty string for empty history', () => {
const result = formatHistoryAsText([]);
- expect(result).toBe("");
+ expect(result).toBe('');
});
- it("should format single user message", () => {
- const history = [{ role: "user" as const, content: "Hello" }];
+ it('should format single user message', () => {
+ const history = [{ role: 'user' as const, content: 'Hello' }];
const result = formatHistoryAsText(history);
- expect(result).toContain("Previous conversation:");
- expect(result).toContain("User: Hello");
- expect(result).toContain("---");
+ expect(result).toContain('Previous conversation:');
+ expect(result).toContain('User: Hello');
+ expect(result).toContain('---');
});
- it("should format single assistant message", () => {
- const history = [{ role: "assistant" as const, content: "Hi there" }];
+ it('should format single assistant message', () => {
+ const history = [{ role: 'assistant' as const, content: 'Hi there' }];
const result = formatHistoryAsText(history);
- expect(result).toContain("Assistant: Hi there");
+ expect(result).toContain('Assistant: Hi there');
});
- it("should format multiple messages with correct roles", () => {
+ it('should format multiple messages with correct roles', () => {
const history = conversationHistoryFixture.slice(0, 2);
const result = formatHistoryAsText(history);
- expect(result).toContain("User: Hello, can you help me?");
- expect(result).toContain("Assistant: Of course! How can I assist you today?");
- expect(result).toContain("---");
+ expect(result).toContain('User: Hello, can you help me?');
+ expect(result).toContain('Assistant: Of course! How can I assist you today?');
+ expect(result).toContain('---');
});
- it("should handle messages with array content (multipart)", () => {
+ it('should handle messages with array content (multipart)', () => {
const history = [conversationHistoryFixture[2]]; // Has text + image
const result = formatHistoryAsText(history);
- expect(result).toContain("What is in this image?");
- expect(result).not.toContain("base64"); // Should not include image data
+ expect(result).toContain('What is in this image?');
+ expect(result).not.toContain('base64'); // Should not include image data
});
- it("should format all messages from fixture", () => {
+ it('should format all messages from fixture', () => {
const result = formatHistoryAsText(conversationHistoryFixture);
- expect(result).toContain("Previous conversation:");
- expect(result).toContain("User: Hello, can you help me?");
- expect(result).toContain("Assistant: Of course!");
- expect(result).toContain("User: What is in this image?");
- expect(result).toContain("---");
+ expect(result).toContain('Previous conversation:');
+ expect(result).toContain('User: Hello, can you help me?');
+ expect(result).toContain('Assistant: Of course!');
+ expect(result).toContain('User: What is in this image?');
+ expect(result).toContain('---');
});
- it("should separate messages with double newlines", () => {
+ it('should separate messages with double newlines', () => {
const history = [
- { role: "user" as const, content: "First" },
- { role: "assistant" as const, content: "Second" },
+ { role: 'user' as const, content: 'First' },
+ { role: 'assistant' as const, content: 'Second' },
];
const result = formatHistoryAsText(history);
@@ -148,73 +148,71 @@ describe("conversation-utils.ts", () => {
});
});
- describe("convertHistoryToMessages", () => {
- it("should convert empty history", () => {
+ describe('convertHistoryToMessages', () => {
+ it('should convert empty history', () => {
const result = convertHistoryToMessages([]);
expect(result).toEqual([]);
});
- it("should convert single message to SDK format", () => {
- const history = [{ role: "user" as const, content: "Hello" }];
+ it('should convert single message to SDK format', () => {
+ const history = [{ role: 'user' as const, content: 'Hello' }];
const result = convertHistoryToMessages(history);
expect(result).toHaveLength(1);
expect(result[0]).toMatchObject({
- type: "user",
- session_id: "",
+ type: 'user',
+ session_id: '',
message: {
- role: "user",
- content: [{ type: "text", text: "Hello" }],
+ role: 'user',
+ content: [{ type: 'text', text: 'Hello' }],
},
parent_tool_use_id: null,
});
});
- it("should normalize string content to array", () => {
- const history = [{ role: "assistant" as const, content: "Response" }];
+ it('should normalize string content to array', () => {
+ const history = [{ role: 'assistant' as const, content: 'Response' }];
const result = convertHistoryToMessages(history);
- expect(result[0].message.content).toEqual([
- { type: "text", text: "Response" },
- ]);
+ expect(result[0].message.content).toEqual([{ type: 'text', text: 'Response' }]);
});
- it("should preserve array content", () => {
+ it('should preserve array content', () => {
const history = [
{
- role: "user" as const,
+ role: 'user' as const,
content: [
- { type: "text", text: "Hello" },
- { type: "image", source: {} },
+ { type: 'text', text: 'Hello' },
+ { type: 'image', source: {} },
],
},
];
const result = convertHistoryToMessages(history);
expect(result[0].message.content).toHaveLength(2);
- expect(result[0].message.content[0]).toEqual({ type: "text", text: "Hello" });
+ expect(result[0].message.content[0]).toEqual({ type: 'text', text: 'Hello' });
});
- it("should convert multiple messages", () => {
+ it('should convert multiple messages', () => {
const history = conversationHistoryFixture.slice(0, 2);
const result = convertHistoryToMessages(history);
expect(result).toHaveLength(2);
- expect(result[0].type).toBe("user");
- expect(result[1].type).toBe("assistant");
+ expect(result[0].type).toBe('user');
+ expect(result[1].type).toBe('assistant');
});
- it("should set correct fields for SDK format", () => {
- const history = [{ role: "user" as const, content: "Test" }];
+ it('should set correct fields for SDK format', () => {
+ const history = [{ role: 'user' as const, content: 'Test' }];
const result = convertHistoryToMessages(history);
- expect(result[0].session_id).toBe("");
+ expect(result[0].session_id).toBe('');
expect(result[0].parent_tool_use_id).toBeNull();
- expect(result[0].type).toBe("user");
- expect(result[0].message.role).toBe("user");
+ expect(result[0].type).toBe('user');
+ expect(result[0].message.role).toBe('user');
});
- it("should handle all messages from fixture", () => {
+ it('should handle all messages from fixture', () => {
const result = convertHistoryToMessages(conversationHistoryFixture);
expect(result).toHaveLength(3);
diff --git a/apps/server/tests/unit/lib/dependency-resolver.test.ts b/apps/server/tests/unit/lib/dependency-resolver.test.ts
index 28a461b61..b018dacdd 100644
--- a/apps/server/tests/unit/lib/dependency-resolver.test.ts
+++ b/apps/server/tests/unit/lib/dependency-resolver.test.ts
@@ -1,11 +1,11 @@
-import { describe, it, expect } from "vitest";
+import { describe, it, expect } from 'vitest';
import {
resolveDependencies,
areDependenciesSatisfied,
getBlockingDependencies,
type DependencyResolutionResult,
-} from "@automaker/dependency-resolver";
-import type { Feature } from "@automaker/types";
+} from '@automaker/dependency-resolver';
+import type { Feature } from '@automaker/types';
// Helper to create test features
function createFeature(
@@ -20,17 +20,17 @@ function createFeature(
): Feature {
return {
id,
- category: options.category || "test",
+ category: options.category || 'test',
description: options.description || `Feature ${id}`,
- status: options.status || "backlog",
+ status: options.status || 'backlog',
priority: options.priority,
dependencies: options.dependencies,
};
}
-describe("dependency-resolver.ts", () => {
- describe("resolveDependencies", () => {
- it("should handle empty feature list", () => {
+describe('dependency-resolver.ts', () => {
+ describe('resolveDependencies', () => {
+ it('should handle empty feature list', () => {
const result = resolveDependencies([]);
expect(result.orderedFeatures).toEqual([]);
@@ -39,103 +39,103 @@ describe("dependency-resolver.ts", () => {
expect(result.blockedFeatures.size).toBe(0);
});
- it("should handle features with no dependencies", () => {
+ it('should handle features with no dependencies', () => {
const features = [
- createFeature("f1", { priority: 1 }),
- createFeature("f2", { priority: 2 }),
- createFeature("f3", { priority: 3 }),
+ createFeature('f1', { priority: 1 }),
+ createFeature('f2', { priority: 2 }),
+ createFeature('f3', { priority: 3 }),
];
const result = resolveDependencies(features);
expect(result.orderedFeatures).toHaveLength(3);
- expect(result.orderedFeatures[0].id).toBe("f1"); // Highest priority first
- expect(result.orderedFeatures[1].id).toBe("f2");
- expect(result.orderedFeatures[2].id).toBe("f3");
+ expect(result.orderedFeatures[0].id).toBe('f1'); // Highest priority first
+ expect(result.orderedFeatures[1].id).toBe('f2');
+ expect(result.orderedFeatures[2].id).toBe('f3');
expect(result.circularDependencies).toEqual([]);
expect(result.missingDependencies.size).toBe(0);
expect(result.blockedFeatures.size).toBe(0);
});
- it("should order features by dependencies (simple chain)", () => {
+ it('should order features by dependencies (simple chain)', () => {
const features = [
- createFeature("f3", { dependencies: ["f2"] }),
- createFeature("f1"),
- createFeature("f2", { dependencies: ["f1"] }),
+ createFeature('f3', { dependencies: ['f2'] }),
+ createFeature('f1'),
+ createFeature('f2', { dependencies: ['f1'] }),
];
const result = resolveDependencies(features);
expect(result.orderedFeatures).toHaveLength(3);
- expect(result.orderedFeatures[0].id).toBe("f1");
- expect(result.orderedFeatures[1].id).toBe("f2");
- expect(result.orderedFeatures[2].id).toBe("f3");
+ expect(result.orderedFeatures[0].id).toBe('f1');
+ expect(result.orderedFeatures[1].id).toBe('f2');
+ expect(result.orderedFeatures[2].id).toBe('f3');
expect(result.circularDependencies).toEqual([]);
});
- it("should respect priority within same dependency level", () => {
+ it('should respect priority within same dependency level', () => {
const features = [
- createFeature("f1", { priority: 3, dependencies: ["base"] }),
- createFeature("f2", { priority: 1, dependencies: ["base"] }),
- createFeature("f3", { priority: 2, dependencies: ["base"] }),
- createFeature("base"),
+ createFeature('f1', { priority: 3, dependencies: ['base'] }),
+ createFeature('f2', { priority: 1, dependencies: ['base'] }),
+ createFeature('f3', { priority: 2, dependencies: ['base'] }),
+ createFeature('base'),
];
const result = resolveDependencies(features);
- expect(result.orderedFeatures[0].id).toBe("base");
- expect(result.orderedFeatures[1].id).toBe("f2"); // Priority 1
- expect(result.orderedFeatures[2].id).toBe("f3"); // Priority 2
- expect(result.orderedFeatures[3].id).toBe("f1"); // Priority 3
+ expect(result.orderedFeatures[0].id).toBe('base');
+ expect(result.orderedFeatures[1].id).toBe('f2'); // Priority 1
+ expect(result.orderedFeatures[2].id).toBe('f3'); // Priority 2
+ expect(result.orderedFeatures[3].id).toBe('f1'); // Priority 3
});
- it("should use default priority of 2 when not specified", () => {
+ it('should use default priority of 2 when not specified', () => {
const features = [
- createFeature("f1", { priority: 1 }),
- createFeature("f2"), // No priority = default 2
- createFeature("f3", { priority: 3 }),
+ createFeature('f1', { priority: 1 }),
+ createFeature('f2'), // No priority = default 2
+ createFeature('f3', { priority: 3 }),
];
const result = resolveDependencies(features);
- expect(result.orderedFeatures[0].id).toBe("f1");
- expect(result.orderedFeatures[1].id).toBe("f2");
- expect(result.orderedFeatures[2].id).toBe("f3");
+ expect(result.orderedFeatures[0].id).toBe('f1');
+ expect(result.orderedFeatures[1].id).toBe('f2');
+ expect(result.orderedFeatures[2].id).toBe('f3');
});
- it("should detect missing dependencies", () => {
+ it('should detect missing dependencies', () => {
const features = [
- createFeature("f1", { dependencies: ["missing1", "missing2"] }),
- createFeature("f2", { dependencies: ["f1", "missing3"] }),
+ createFeature('f1', { dependencies: ['missing1', 'missing2'] }),
+ createFeature('f2', { dependencies: ['f1', 'missing3'] }),
];
const result = resolveDependencies(features);
expect(result.missingDependencies.size).toBe(2);
- expect(result.missingDependencies.get("f1")).toEqual(["missing1", "missing2"]);
- expect(result.missingDependencies.get("f2")).toEqual(["missing3"]);
+ expect(result.missingDependencies.get('f1')).toEqual(['missing1', 'missing2']);
+ expect(result.missingDependencies.get('f2')).toEqual(['missing3']);
expect(result.orderedFeatures).toHaveLength(2);
});
- it("should detect blocked features (incomplete dependencies)", () => {
+ it('should detect blocked features (incomplete dependencies)', () => {
const features = [
- createFeature("f1", { status: "in_progress" }),
- createFeature("f2", { status: "backlog", dependencies: ["f1"] }),
- createFeature("f3", { status: "completed" }),
- createFeature("f4", { status: "backlog", dependencies: ["f3"] }),
+ createFeature('f1', { status: 'in_progress' }),
+ createFeature('f2', { status: 'backlog', dependencies: ['f1'] }),
+ createFeature('f3', { status: 'completed' }),
+ createFeature('f4', { status: 'backlog', dependencies: ['f3'] }),
];
const result = resolveDependencies(features);
expect(result.blockedFeatures.size).toBe(1);
- expect(result.blockedFeatures.get("f2")).toEqual(["f1"]);
- expect(result.blockedFeatures.has("f4")).toBe(false); // f3 is completed
+ expect(result.blockedFeatures.get('f2')).toEqual(['f1']);
+ expect(result.blockedFeatures.has('f4')).toBe(false); // f3 is completed
});
- it("should not block features whose dependencies are verified", () => {
+ it('should not block features whose dependencies are verified', () => {
const features = [
- createFeature("f1", { status: "verified" }),
- createFeature("f2", { status: "backlog", dependencies: ["f1"] }),
+ createFeature('f1', { status: 'verified' }),
+ createFeature('f2', { status: 'backlog', dependencies: ['f1'] }),
];
const result = resolveDependencies(features);
@@ -143,25 +143,25 @@ describe("dependency-resolver.ts", () => {
expect(result.blockedFeatures.size).toBe(0);
});
- it("should detect circular dependencies (simple cycle)", () => {
+ it('should detect circular dependencies (simple cycle)', () => {
const features = [
- createFeature("f1", { dependencies: ["f2"] }),
- createFeature("f2", { dependencies: ["f1"] }),
+ createFeature('f1', { dependencies: ['f2'] }),
+ createFeature('f2', { dependencies: ['f1'] }),
];
const result = resolveDependencies(features);
expect(result.circularDependencies).toHaveLength(1);
- expect(result.circularDependencies[0]).toContain("f1");
- expect(result.circularDependencies[0]).toContain("f2");
+ expect(result.circularDependencies[0]).toContain('f1');
+ expect(result.circularDependencies[0]).toContain('f2');
expect(result.orderedFeatures).toHaveLength(2); // Features still included
});
- it("should detect circular dependencies (multi-node cycle)", () => {
+ it('should detect circular dependencies (multi-node cycle)', () => {
const features = [
- createFeature("f1", { dependencies: ["f3"] }),
- createFeature("f2", { dependencies: ["f1"] }),
- createFeature("f3", { dependencies: ["f2"] }),
+ createFeature('f1', { dependencies: ['f3'] }),
+ createFeature('f2', { dependencies: ['f1'] }),
+ createFeature('f3', { dependencies: ['f2'] }),
];
const result = resolveDependencies(features);
@@ -170,47 +170,47 @@ describe("dependency-resolver.ts", () => {
expect(result.orderedFeatures).toHaveLength(3);
});
- it("should handle mixed valid and circular dependencies", () => {
+ it('should handle mixed valid and circular dependencies', () => {
const features = [
- createFeature("base"),
- createFeature("f1", { dependencies: ["base", "f2"] }),
- createFeature("f2", { dependencies: ["f1"] }), // Circular with f1
- createFeature("f3", { dependencies: ["base"] }),
+ createFeature('base'),
+ createFeature('f1', { dependencies: ['base', 'f2'] }),
+ createFeature('f2', { dependencies: ['f1'] }), // Circular with f1
+ createFeature('f3', { dependencies: ['base'] }),
];
const result = resolveDependencies(features);
expect(result.circularDependencies.length).toBeGreaterThan(0);
- expect(result.orderedFeatures[0].id).toBe("base");
+ expect(result.orderedFeatures[0].id).toBe('base');
expect(result.orderedFeatures).toHaveLength(4);
});
- it("should handle complex dependency graph", () => {
+ it('should handle complex dependency graph', () => {
const features = [
- createFeature("ui", { dependencies: ["api", "auth"], priority: 1 }),
- createFeature("api", { dependencies: ["db"], priority: 2 }),
- createFeature("auth", { dependencies: ["db"], priority: 1 }),
- createFeature("db", { priority: 1 }),
- createFeature("tests", { dependencies: ["ui"], priority: 3 }),
+ createFeature('ui', { dependencies: ['api', 'auth'], priority: 1 }),
+ createFeature('api', { dependencies: ['db'], priority: 2 }),
+ createFeature('auth', { dependencies: ['db'], priority: 1 }),
+ createFeature('db', { priority: 1 }),
+ createFeature('tests', { dependencies: ['ui'], priority: 3 }),
];
const result = resolveDependencies(features);
- const order = result.orderedFeatures.map(f => f.id);
+ const order = result.orderedFeatures.map((f) => f.id);
- expect(order[0]).toBe("db");
- expect(order.indexOf("db")).toBeLessThan(order.indexOf("api"));
- expect(order.indexOf("db")).toBeLessThan(order.indexOf("auth"));
- expect(order.indexOf("api")).toBeLessThan(order.indexOf("ui"));
- expect(order.indexOf("auth")).toBeLessThan(order.indexOf("ui"));
- expect(order.indexOf("ui")).toBeLessThan(order.indexOf("tests"));
+ expect(order[0]).toBe('db');
+ expect(order.indexOf('db')).toBeLessThan(order.indexOf('api'));
+ expect(order.indexOf('db')).toBeLessThan(order.indexOf('auth'));
+ expect(order.indexOf('api')).toBeLessThan(order.indexOf('ui'));
+ expect(order.indexOf('auth')).toBeLessThan(order.indexOf('ui'));
+ expect(order.indexOf('ui')).toBeLessThan(order.indexOf('tests'));
expect(result.circularDependencies).toEqual([]);
});
- it("should handle features with empty dependencies array", () => {
+ it('should handle features with empty dependencies array', () => {
const features = [
- createFeature("f1", { dependencies: [] }),
- createFeature("f2", { dependencies: [] }),
+ createFeature('f1', { dependencies: [] }),
+ createFeature('f2', { dependencies: [] }),
];
const result = resolveDependencies(features);
@@ -220,22 +220,20 @@ describe("dependency-resolver.ts", () => {
expect(result.blockedFeatures.size).toBe(0);
});
- it("should track multiple blocking dependencies", () => {
+ it('should track multiple blocking dependencies', () => {
const features = [
- createFeature("f1", { status: "in_progress" }),
- createFeature("f2", { status: "backlog" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'in_progress' }),
+ createFeature('f2', { status: 'backlog' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
const result = resolveDependencies(features);
- expect(result.blockedFeatures.get("f3")).toEqual(["f1", "f2"]);
+ expect(result.blockedFeatures.get('f3')).toEqual(['f1', 'f2']);
});
- it("should handle self-referencing dependency", () => {
- const features = [
- createFeature("f1", { dependencies: ["f1"] }),
- ];
+ it('should handle self-referencing dependency', () => {
+ const features = [createFeature('f1', { dependencies: ['f1'] })];
const result = resolveDependencies(features);
@@ -244,195 +242,191 @@ describe("dependency-resolver.ts", () => {
});
});
- describe("areDependenciesSatisfied", () => {
- it("should return true for feature with no dependencies", () => {
- const feature = createFeature("f1");
+ describe('areDependenciesSatisfied', () => {
+ it('should return true for feature with no dependencies', () => {
+ const feature = createFeature('f1');
const allFeatures = [feature];
expect(areDependenciesSatisfied(feature, allFeatures)).toBe(true);
});
- it("should return true for feature with empty dependencies array", () => {
- const feature = createFeature("f1", { dependencies: [] });
+ it('should return true for feature with empty dependencies array', () => {
+ const feature = createFeature('f1', { dependencies: [] });
const allFeatures = [feature];
expect(areDependenciesSatisfied(feature, allFeatures)).toBe(true);
});
- it("should return true when all dependencies are completed", () => {
+ it('should return true when all dependencies are completed', () => {
const allFeatures = [
- createFeature("f1", { status: "completed" }),
- createFeature("f2", { status: "completed" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'completed' }),
+ createFeature('f2', { status: 'completed' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(true);
});
- it("should return true when all dependencies are verified", () => {
+ it('should return true when all dependencies are verified', () => {
const allFeatures = [
- createFeature("f1", { status: "verified" }),
- createFeature("f2", { status: "verified" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'verified' }),
+ createFeature('f2', { status: 'verified' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(true);
});
- it("should return true when dependencies are mix of completed and verified", () => {
+ it('should return true when dependencies are mix of completed and verified', () => {
const allFeatures = [
- createFeature("f1", { status: "completed" }),
- createFeature("f2", { status: "verified" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'completed' }),
+ createFeature('f2', { status: 'verified' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(true);
});
- it("should return false when any dependency is in_progress", () => {
+ it('should return false when any dependency is in_progress', () => {
const allFeatures = [
- createFeature("f1", { status: "completed" }),
- createFeature("f2", { status: "in_progress" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'completed' }),
+ createFeature('f2', { status: 'in_progress' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(false);
});
- it("should return false when any dependency is in backlog", () => {
+ it('should return false when any dependency is in backlog', () => {
const allFeatures = [
- createFeature("f1", { status: "completed" }),
- createFeature("f2", { status: "backlog" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'completed' }),
+ createFeature('f2', { status: 'backlog' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(false);
});
- it("should return false when dependency is missing", () => {
- const allFeatures = [
- createFeature("f1", { status: "backlog", dependencies: ["missing"] }),
- ];
+ it('should return false when dependency is missing', () => {
+ const allFeatures = [createFeature('f1', { status: 'backlog', dependencies: ['missing'] })];
expect(areDependenciesSatisfied(allFeatures[0], allFeatures)).toBe(false);
});
- it("should return false when multiple dependencies are incomplete", () => {
+ it('should return false when multiple dependencies are incomplete', () => {
const allFeatures = [
- createFeature("f1", { status: "backlog" }),
- createFeature("f2", { status: "in_progress" }),
- createFeature("f3", { status: "waiting_approval" }),
- createFeature("f4", { status: "backlog", dependencies: ["f1", "f2", "f3"] }),
+ createFeature('f1', { status: 'backlog' }),
+ createFeature('f2', { status: 'in_progress' }),
+ createFeature('f3', { status: 'waiting_approval' }),
+ createFeature('f4', { status: 'backlog', dependencies: ['f1', 'f2', 'f3'] }),
];
expect(areDependenciesSatisfied(allFeatures[3], allFeatures)).toBe(false);
});
});
- describe("getBlockingDependencies", () => {
- it("should return empty array for feature with no dependencies", () => {
- const feature = createFeature("f1");
+ describe('getBlockingDependencies', () => {
+ it('should return empty array for feature with no dependencies', () => {
+ const feature = createFeature('f1');
const allFeatures = [feature];
expect(getBlockingDependencies(feature, allFeatures)).toEqual([]);
});
- it("should return empty array for feature with empty dependencies array", () => {
- const feature = createFeature("f1", { dependencies: [] });
+ it('should return empty array for feature with empty dependencies array', () => {
+ const feature = createFeature('f1', { dependencies: [] });
const allFeatures = [feature];
expect(getBlockingDependencies(feature, allFeatures)).toEqual([]);
});
- it("should return empty array when all dependencies are completed", () => {
+ it('should return empty array when all dependencies are completed', () => {
const allFeatures = [
- createFeature("f1", { status: "completed" }),
- createFeature("f2", { status: "completed" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'completed' }),
+ createFeature('f2', { status: 'completed' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual([]);
});
- it("should return empty array when all dependencies are verified", () => {
+ it('should return empty array when all dependencies are verified', () => {
const allFeatures = [
- createFeature("f1", { status: "verified" }),
- createFeature("f2", { status: "verified" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'verified' }),
+ createFeature('f2', { status: 'verified' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual([]);
});
- it("should return blocking dependencies in backlog status", () => {
+ it('should return blocking dependencies in backlog status', () => {
const allFeatures = [
- createFeature("f1", { status: "backlog" }),
- createFeature("f2", { status: "completed" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'backlog' }),
+ createFeature('f2', { status: 'completed' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
- expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(["f1"]);
+ expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(['f1']);
});
- it("should return blocking dependencies in in_progress status", () => {
+ it('should return blocking dependencies in in_progress status', () => {
const allFeatures = [
- createFeature("f1", { status: "in_progress" }),
- createFeature("f2", { status: "verified" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'in_progress' }),
+ createFeature('f2', { status: 'verified' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
- expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(["f1"]);
+ expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(['f1']);
});
- it("should return blocking dependencies in waiting_approval status", () => {
+ it('should return blocking dependencies in waiting_approval status', () => {
const allFeatures = [
- createFeature("f1", { status: "waiting_approval" }),
- createFeature("f2", { status: "completed" }),
- createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
+ createFeature('f1', { status: 'waiting_approval' }),
+ createFeature('f2', { status: 'completed' }),
+ createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
];
- expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(["f1"]);
+ expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(['f1']);
});
- it("should return all blocking dependencies", () => {
+ it('should return all blocking dependencies', () => {
const allFeatures = [
- createFeature("f1", { status: "backlog" }),
- createFeature("f2", { status: "in_progress" }),
- createFeature("f3", { status: "waiting_approval" }),
- createFeature("f4", { status: "completed" }),
- createFeature("f5", { status: "backlog", dependencies: ["f1", "f2", "f3", "f4"] }),
+ createFeature('f1', { status: 'backlog' }),
+ createFeature('f2', { status: 'in_progress' }),
+ createFeature('f3', { status: 'waiting_approval' }),
+ createFeature('f4', { status: 'completed' }),
+ createFeature('f5', { status: 'backlog', dependencies: ['f1', 'f2', 'f3', 'f4'] }),
];
const blocking = getBlockingDependencies(allFeatures[4], allFeatures);
expect(blocking).toHaveLength(3);
- expect(blocking).toContain("f1");
- expect(blocking).toContain("f2");
- expect(blocking).toContain("f3");
- expect(blocking).not.toContain("f4");
+ expect(blocking).toContain('f1');
+ expect(blocking).toContain('f2');
+ expect(blocking).toContain('f3');
+ expect(blocking).not.toContain('f4');
});
- it("should handle missing dependencies", () => {
- const allFeatures = [
- createFeature("f1", { status: "backlog", dependencies: ["missing"] }),
- ];
+ it('should handle missing dependencies', () => {
+ const allFeatures = [createFeature('f1', { status: 'backlog', dependencies: ['missing'] })];
// Missing dependencies won't be in the blocking list since they don't exist
expect(getBlockingDependencies(allFeatures[0], allFeatures)).toEqual([]);
});
- it("should handle mix of completed, verified, and incomplete dependencies", () => {
+ it('should handle mix of completed, verified, and incomplete dependencies', () => {
const allFeatures = [
- createFeature("f1", { status: "completed" }),
- createFeature("f2", { status: "verified" }),
- createFeature("f3", { status: "in_progress" }),
- createFeature("f4", { status: "backlog" }),
- createFeature("f5", { status: "backlog", dependencies: ["f1", "f2", "f3", "f4"] }),
+ createFeature('f1', { status: 'completed' }),
+ createFeature('f2', { status: 'verified' }),
+ createFeature('f3', { status: 'in_progress' }),
+ createFeature('f4', { status: 'backlog' }),
+ createFeature('f5', { status: 'backlog', dependencies: ['f1', 'f2', 'f3', 'f4'] }),
];
const blocking = getBlockingDependencies(allFeatures[4], allFeatures);
expect(blocking).toHaveLength(2);
- expect(blocking).toContain("f3");
- expect(blocking).toContain("f4");
+ expect(blocking).toContain('f3');
+ expect(blocking).toContain('f4');
});
});
});
diff --git a/apps/server/tests/unit/lib/enhancement-prompts.test.ts b/apps/server/tests/unit/lib/enhancement-prompts.test.ts
index d780612d3..ab1398619 100644
--- a/apps/server/tests/unit/lib/enhancement-prompts.test.ts
+++ b/apps/server/tests/unit/lib/enhancement-prompts.test.ts
@@ -1,4 +1,4 @@
-import { describe, it, expect } from "vitest";
+import { describe, it, expect } from 'vitest';
import {
getEnhancementPrompt,
getSystemPrompt,
@@ -15,38 +15,38 @@ import {
SIMPLIFY_EXAMPLES,
ACCEPTANCE_EXAMPLES,
type EnhancementMode,
-} from "@/lib/enhancement-prompts.js";
+} from '@/lib/enhancement-prompts.js';
-describe("enhancement-prompts.ts", () => {
- describe("System Prompt Constants", () => {
- it("should have non-empty improve system prompt", () => {
+describe('enhancement-prompts.ts', () => {
+ describe('System Prompt Constants', () => {
+ it('should have non-empty improve system prompt', () => {
expect(IMPROVE_SYSTEM_PROMPT).toBeDefined();
expect(IMPROVE_SYSTEM_PROMPT.length).toBeGreaterThan(100);
- expect(IMPROVE_SYSTEM_PROMPT).toContain("ANALYZE");
- expect(IMPROVE_SYSTEM_PROMPT).toContain("CLARIFY");
+ expect(IMPROVE_SYSTEM_PROMPT).toContain('ANALYZE');
+ expect(IMPROVE_SYSTEM_PROMPT).toContain('CLARIFY');
});
- it("should have non-empty technical system prompt", () => {
+ it('should have non-empty technical system prompt', () => {
expect(TECHNICAL_SYSTEM_PROMPT).toBeDefined();
expect(TECHNICAL_SYSTEM_PROMPT.length).toBeGreaterThan(100);
- expect(TECHNICAL_SYSTEM_PROMPT).toContain("technical");
+ expect(TECHNICAL_SYSTEM_PROMPT).toContain('technical');
});
- it("should have non-empty simplify system prompt", () => {
+ it('should have non-empty simplify system prompt', () => {
expect(SIMPLIFY_SYSTEM_PROMPT).toBeDefined();
expect(SIMPLIFY_SYSTEM_PROMPT.length).toBeGreaterThan(100);
- expect(SIMPLIFY_SYSTEM_PROMPT).toContain("simplify");
+ expect(SIMPLIFY_SYSTEM_PROMPT).toContain('simplify');
});
- it("should have non-empty acceptance system prompt", () => {
+ it('should have non-empty acceptance system prompt', () => {
expect(ACCEPTANCE_SYSTEM_PROMPT).toBeDefined();
expect(ACCEPTANCE_SYSTEM_PROMPT.length).toBeGreaterThan(100);
- expect(ACCEPTANCE_SYSTEM_PROMPT).toContain("acceptance criteria");
+ expect(ACCEPTANCE_SYSTEM_PROMPT).toContain('acceptance criteria');
});
});
- describe("Example Constants", () => {
- it("should have improve examples with input and output", () => {
+ describe('Example Constants', () => {
+ it('should have improve examples with input and output', () => {
expect(IMPROVE_EXAMPLES).toBeDefined();
expect(IMPROVE_EXAMPLES.length).toBeGreaterThan(0);
IMPROVE_EXAMPLES.forEach((example) => {
@@ -57,7 +57,7 @@ describe("enhancement-prompts.ts", () => {
});
});
- it("should have technical examples with input and output", () => {
+ it('should have technical examples with input and output', () => {
expect(TECHNICAL_EXAMPLES).toBeDefined();
expect(TECHNICAL_EXAMPLES.length).toBeGreaterThan(0);
TECHNICAL_EXAMPLES.forEach((example) => {
@@ -66,7 +66,7 @@ describe("enhancement-prompts.ts", () => {
});
});
- it("should have simplify examples with input and output", () => {
+ it('should have simplify examples with input and output', () => {
expect(SIMPLIFY_EXAMPLES).toBeDefined();
expect(SIMPLIFY_EXAMPLES.length).toBeGreaterThan(0);
SIMPLIFY_EXAMPLES.forEach((example) => {
@@ -75,7 +75,7 @@ describe("enhancement-prompts.ts", () => {
});
});
- it("should have acceptance examples with input and output", () => {
+ it('should have acceptance examples with input and output', () => {
expect(ACCEPTANCE_EXAMPLES).toBeDefined();
expect(ACCEPTANCE_EXAMPLES.length).toBeGreaterThan(0);
ACCEPTANCE_EXAMPLES.forEach((example) => {
@@ -85,66 +85,66 @@ describe("enhancement-prompts.ts", () => {
});
});
- describe("getEnhancementPrompt", () => {
- it("should return config for improve mode", () => {
- const config = getEnhancementPrompt("improve");
+ describe('getEnhancementPrompt', () => {
+ it('should return config for improve mode', () => {
+ const config = getEnhancementPrompt('improve');
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
- expect(config.description).toContain("clear");
+ expect(config.description).toContain('clear');
});
- it("should return config for technical mode", () => {
- const config = getEnhancementPrompt("technical");
+ it('should return config for technical mode', () => {
+ const config = getEnhancementPrompt('technical');
expect(config.systemPrompt).toBe(TECHNICAL_SYSTEM_PROMPT);
- expect(config.description).toContain("technical");
+ expect(config.description).toContain('technical');
});
- it("should return config for simplify mode", () => {
- const config = getEnhancementPrompt("simplify");
+ it('should return config for simplify mode', () => {
+ const config = getEnhancementPrompt('simplify');
expect(config.systemPrompt).toBe(SIMPLIFY_SYSTEM_PROMPT);
- expect(config.description).toContain("concise");
+ expect(config.description).toContain('concise');
});
- it("should return config for acceptance mode", () => {
- const config = getEnhancementPrompt("acceptance");
+ it('should return config for acceptance mode', () => {
+ const config = getEnhancementPrompt('acceptance');
expect(config.systemPrompt).toBe(ACCEPTANCE_SYSTEM_PROMPT);
- expect(config.description).toContain("acceptance");
+ expect(config.description).toContain('acceptance');
});
- it("should handle case-insensitive mode", () => {
- const config = getEnhancementPrompt("IMPROVE");
+ it('should handle case-insensitive mode', () => {
+ const config = getEnhancementPrompt('IMPROVE');
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
});
- it("should fall back to improve for invalid mode", () => {
- const config = getEnhancementPrompt("invalid-mode");
+ it('should fall back to improve for invalid mode', () => {
+ const config = getEnhancementPrompt('invalid-mode');
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
});
- it("should fall back to improve for empty string", () => {
- const config = getEnhancementPrompt("");
+ it('should fall back to improve for empty string', () => {
+ const config = getEnhancementPrompt('');
expect(config.systemPrompt).toBe(IMPROVE_SYSTEM_PROMPT);
});
});
- describe("getSystemPrompt", () => {
- it("should return correct system prompt for each mode", () => {
- expect(getSystemPrompt("improve")).toBe(IMPROVE_SYSTEM_PROMPT);
- expect(getSystemPrompt("technical")).toBe(TECHNICAL_SYSTEM_PROMPT);
- expect(getSystemPrompt("simplify")).toBe(SIMPLIFY_SYSTEM_PROMPT);
- expect(getSystemPrompt("acceptance")).toBe(ACCEPTANCE_SYSTEM_PROMPT);
+ describe('getSystemPrompt', () => {
+ it('should return correct system prompt for each mode', () => {
+ expect(getSystemPrompt('improve')).toBe(IMPROVE_SYSTEM_PROMPT);
+ expect(getSystemPrompt('technical')).toBe(TECHNICAL_SYSTEM_PROMPT);
+ expect(getSystemPrompt('simplify')).toBe(SIMPLIFY_SYSTEM_PROMPT);
+ expect(getSystemPrompt('acceptance')).toBe(ACCEPTANCE_SYSTEM_PROMPT);
});
});
- describe("getExamples", () => {
- it("should return correct examples for each mode", () => {
- expect(getExamples("improve")).toBe(IMPROVE_EXAMPLES);
- expect(getExamples("technical")).toBe(TECHNICAL_EXAMPLES);
- expect(getExamples("simplify")).toBe(SIMPLIFY_EXAMPLES);
- expect(getExamples("acceptance")).toBe(ACCEPTANCE_EXAMPLES);
+ describe('getExamples', () => {
+ it('should return correct examples for each mode', () => {
+ expect(getExamples('improve')).toBe(IMPROVE_EXAMPLES);
+ expect(getExamples('technical')).toBe(TECHNICAL_EXAMPLES);
+ expect(getExamples('simplify')).toBe(SIMPLIFY_EXAMPLES);
+ expect(getExamples('acceptance')).toBe(ACCEPTANCE_EXAMPLES);
});
- it("should return arrays with example objects", () => {
- const modes: EnhancementMode[] = ["improve", "technical", "simplify", "acceptance"];
+ it('should return arrays with example objects', () => {
+ const modes: EnhancementMode[] = ['improve', 'technical', 'simplify', 'acceptance'];
modes.forEach((mode) => {
const examples = getExamples(mode);
expect(Array.isArray(examples)).toBe(true);
@@ -153,38 +153,38 @@ describe("enhancement-prompts.ts", () => {
});
});
- describe("buildUserPrompt", () => {
- const testText = "Add a logout button";
+ describe('buildUserPrompt', () => {
+ const testText = 'Add a logout button';
- it("should build prompt with examples by default", () => {
- const prompt = buildUserPrompt("improve", testText);
- expect(prompt).toContain("Example 1:");
+ it('should build prompt with examples by default', () => {
+ const prompt = buildUserPrompt('improve', testText);
+ expect(prompt).toContain('Example 1:');
expect(prompt).toContain(testText);
- expect(prompt).toContain("Now, please enhance the following task description:");
+ expect(prompt).toContain('Now, please enhance the following task description:');
});
- it("should build prompt without examples when includeExamples is false", () => {
- const prompt = buildUserPrompt("improve", testText, false);
- expect(prompt).not.toContain("Example 1:");
+ it('should build prompt without examples when includeExamples is false', () => {
+ const prompt = buildUserPrompt('improve', testText, false);
+ expect(prompt).not.toContain('Example 1:');
expect(prompt).toContain(testText);
- expect(prompt).toContain("Please enhance the following task description:");
+ expect(prompt).toContain('Please enhance the following task description:');
});
- it("should include all examples for improve mode", () => {
- const prompt = buildUserPrompt("improve", testText);
+ it('should include all examples for improve mode', () => {
+ const prompt = buildUserPrompt('improve', testText);
IMPROVE_EXAMPLES.forEach((example, index) => {
expect(prompt).toContain(`Example ${index + 1}:`);
expect(prompt).toContain(example.input);
});
});
- it("should include separator between examples", () => {
- const prompt = buildUserPrompt("improve", testText);
- expect(prompt).toContain("---");
+ it('should include separator between examples', () => {
+ const prompt = buildUserPrompt('improve', testText);
+ expect(prompt).toContain('---');
});
- it("should work with all enhancement modes", () => {
- const modes: EnhancementMode[] = ["improve", "technical", "simplify", "acceptance"];
+ it('should work with all enhancement modes', () => {
+ const modes: EnhancementMode[] = ['improve', 'technical', 'simplify', 'acceptance'];
modes.forEach((mode) => {
const prompt = buildUserPrompt(mode, testText);
expect(prompt).toContain(testText);
@@ -192,40 +192,40 @@ describe("enhancement-prompts.ts", () => {
});
});
- it("should preserve the original text exactly", () => {
- const specialText = "Add feature with special chars: <>&\"'";
- const prompt = buildUserPrompt("improve", specialText);
+ it('should preserve the original text exactly', () => {
+ const specialText = 'Add feature with special chars: <>&"\'';
+ const prompt = buildUserPrompt('improve', specialText);
expect(prompt).toContain(specialText);
});
});
- describe("isValidEnhancementMode", () => {
- it("should return true for valid modes", () => {
- expect(isValidEnhancementMode("improve")).toBe(true);
- expect(isValidEnhancementMode("technical")).toBe(true);
- expect(isValidEnhancementMode("simplify")).toBe(true);
- expect(isValidEnhancementMode("acceptance")).toBe(true);
+ describe('isValidEnhancementMode', () => {
+ it('should return true for valid modes', () => {
+ expect(isValidEnhancementMode('improve')).toBe(true);
+ expect(isValidEnhancementMode('technical')).toBe(true);
+ expect(isValidEnhancementMode('simplify')).toBe(true);
+ expect(isValidEnhancementMode('acceptance')).toBe(true);
});
- it("should return false for invalid modes", () => {
- expect(isValidEnhancementMode("invalid")).toBe(false);
- expect(isValidEnhancementMode("IMPROVE")).toBe(false); // case-sensitive
- expect(isValidEnhancementMode("")).toBe(false);
- expect(isValidEnhancementMode("random")).toBe(false);
+ it('should return false for invalid modes', () => {
+ expect(isValidEnhancementMode('invalid')).toBe(false);
+ expect(isValidEnhancementMode('IMPROVE')).toBe(false); // case-sensitive
+ expect(isValidEnhancementMode('')).toBe(false);
+ expect(isValidEnhancementMode('random')).toBe(false);
});
});
- describe("getAvailableEnhancementModes", () => {
- it("should return all four enhancement modes", () => {
+ describe('getAvailableEnhancementModes', () => {
+ it('should return all four enhancement modes', () => {
const modes = getAvailableEnhancementModes();
expect(modes).toHaveLength(4);
- expect(modes).toContain("improve");
- expect(modes).toContain("technical");
- expect(modes).toContain("simplify");
- expect(modes).toContain("acceptance");
+ expect(modes).toContain('improve');
+ expect(modes).toContain('technical');
+ expect(modes).toContain('simplify');
+ expect(modes).toContain('acceptance');
});
- it("should return an array", () => {
+ it('should return an array', () => {
const modes = getAvailableEnhancementModes();
expect(Array.isArray(modes)).toBe(true);
});
diff --git a/apps/server/tests/unit/lib/error-handler.test.ts b/apps/server/tests/unit/lib/error-handler.test.ts
index 7e5a1ac43..87a160368 100644
--- a/apps/server/tests/unit/lib/error-handler.test.ts
+++ b/apps/server/tests/unit/lib/error-handler.test.ts
@@ -1,4 +1,4 @@
-import { describe, it, expect } from "vitest";
+import { describe, it, expect } from 'vitest';
import {
isAbortError,
isAuthenticationError,
@@ -6,206 +6,206 @@ import {
classifyError,
getUserFriendlyErrorMessage,
type ErrorType,
-} from "@automaker/utils";
+} from '@automaker/utils';
-describe("error-handler.ts", () => {
- describe("isAbortError", () => {
- it("should detect AbortError by error name", () => {
- const error = new Error("Operation cancelled");
- error.name = "AbortError";
+describe('error-handler.ts', () => {
+ describe('isAbortError', () => {
+ it('should detect AbortError by error name', () => {
+ const error = new Error('Operation cancelled');
+ error.name = 'AbortError';
expect(isAbortError(error)).toBe(true);
});
- it("should detect abort error by message content", () => {
- const error = new Error("Request was aborted");
+ it('should detect abort error by message content', () => {
+ const error = new Error('Request was aborted');
expect(isAbortError(error)).toBe(true);
});
- it("should return false for non-abort errors", () => {
- const error = new Error("Something else went wrong");
+ it('should return false for non-abort errors', () => {
+ const error = new Error('Something else went wrong');
expect(isAbortError(error)).toBe(false);
});
- it("should return false for non-Error objects", () => {
- expect(isAbortError("not an error")).toBe(false);
+ it('should return false for non-Error objects', () => {
+ expect(isAbortError('not an error')).toBe(false);
expect(isAbortError(null)).toBe(false);
expect(isAbortError(undefined)).toBe(false);
});
});
- describe("isCancellationError", () => {
+ describe('isCancellationError', () => {
it("should detect 'cancelled' message", () => {
- expect(isCancellationError("Operation was cancelled")).toBe(true);
+ expect(isCancellationError('Operation was cancelled')).toBe(true);
});
it("should detect 'canceled' message", () => {
- expect(isCancellationError("Request was canceled")).toBe(true);
+ expect(isCancellationError('Request was canceled')).toBe(true);
});
it("should detect 'stopped' message", () => {
- expect(isCancellationError("Process was stopped")).toBe(true);
+ expect(isCancellationError('Process was stopped')).toBe(true);
});
it("should detect 'aborted' message", () => {
- expect(isCancellationError("Task was aborted")).toBe(true);
+ expect(isCancellationError('Task was aborted')).toBe(true);
});
- it("should be case insensitive", () => {
- expect(isCancellationError("CANCELLED")).toBe(true);
- expect(isCancellationError("Canceled")).toBe(true);
+ it('should be case insensitive', () => {
+ expect(isCancellationError('CANCELLED')).toBe(true);
+ expect(isCancellationError('Canceled')).toBe(true);
});
- it("should return false for non-cancellation errors", () => {
- expect(isCancellationError("File not found")).toBe(false);
- expect(isCancellationError("Network error")).toBe(false);
+ it('should return false for non-cancellation errors', () => {
+ expect(isCancellationError('File not found')).toBe(false);
+ expect(isCancellationError('Network error')).toBe(false);
});
});
- describe("isAuthenticationError", () => {
+ describe('isAuthenticationError', () => {
it("should detect 'Authentication failed' message", () => {
- expect(isAuthenticationError("Authentication failed")).toBe(true);
+ expect(isAuthenticationError('Authentication failed')).toBe(true);
});
it("should detect 'Invalid API key' message", () => {
- expect(isAuthenticationError("Invalid API key provided")).toBe(true);
+ expect(isAuthenticationError('Invalid API key provided')).toBe(true);
});
it("should detect 'authentication_failed' message", () => {
- expect(isAuthenticationError("authentication_failed")).toBe(true);
+ expect(isAuthenticationError('authentication_failed')).toBe(true);
});
it("should detect 'Fix external API key' message", () => {
- expect(isAuthenticationError("Fix external API key configuration")).toBe(true);
+ expect(isAuthenticationError('Fix external API key configuration')).toBe(true);
});
- it("should return false for non-authentication errors", () => {
- expect(isAuthenticationError("Network connection error")).toBe(false);
- expect(isAuthenticationError("File not found")).toBe(false);
+ it('should return false for non-authentication errors', () => {
+ expect(isAuthenticationError('Network connection error')).toBe(false);
+ expect(isAuthenticationError('File not found')).toBe(false);
});
- it("should be case sensitive", () => {
- expect(isAuthenticationError("authentication Failed")).toBe(false);
+ it('should be case sensitive', () => {
+ expect(isAuthenticationError('authentication Failed')).toBe(false);
});
});
- describe("classifyError", () => {
- it("should classify authentication errors", () => {
- const error = new Error("Authentication failed");
+ describe('classifyError', () => {
+ it('should classify authentication errors', () => {
+ const error = new Error('Authentication failed');
const result = classifyError(error);
- expect(result.type).toBe("authentication");
+ expect(result.type).toBe('authentication');
expect(result.isAuth).toBe(true);
expect(result.isAbort).toBe(false);
- expect(result.message).toBe("Authentication failed");
+ expect(result.message).toBe('Authentication failed');
expect(result.originalError).toBe(error);
});
- it("should classify abort errors", () => {
- const error = new Error("Operation aborted");
- error.name = "AbortError";
+ it('should classify abort errors', () => {
+ const error = new Error('Operation aborted');
+ error.name = 'AbortError';
const result = classifyError(error);
- expect(result.type).toBe("abort");
+ expect(result.type).toBe('abort');
expect(result.isAbort).toBe(true);
expect(result.isAuth).toBe(false);
- expect(result.message).toBe("Operation aborted");
+ expect(result.message).toBe('Operation aborted');
});
- it("should prioritize auth over abort if both match", () => {
- const error = new Error("Authentication failed and aborted");
+ it('should prioritize auth over abort if both match', () => {
+ const error = new Error('Authentication failed and aborted');
const result = classifyError(error);
- expect(result.type).toBe("authentication");
+ expect(result.type).toBe('authentication');
expect(result.isAuth).toBe(true);
expect(result.isAbort).toBe(true); // Still detected as abort too
});
- it("should classify cancellation errors", () => {
- const error = new Error("Operation was cancelled");
+ it('should classify cancellation errors', () => {
+ const error = new Error('Operation was cancelled');
const result = classifyError(error);
- expect(result.type).toBe("cancellation");
+ expect(result.type).toBe('cancellation');
expect(result.isCancellation).toBe(true);
expect(result.isAbort).toBe(false);
expect(result.isAuth).toBe(false);
});
- it("should prioritize abort over cancellation if both match", () => {
- const error = new Error("Operation aborted");
- error.name = "AbortError";
+ it('should prioritize abort over cancellation if both match', () => {
+ const error = new Error('Operation aborted');
+ error.name = 'AbortError';
const result = classifyError(error);
- expect(result.type).toBe("abort");
+ expect(result.type).toBe('abort');
expect(result.isAbort).toBe(true);
expect(result.isCancellation).toBe(true); // Still detected as cancellation too
});
it("should classify cancellation errors with 'canceled' spelling", () => {
- const error = new Error("Request was canceled");
+ const error = new Error('Request was canceled');
const result = classifyError(error);
- expect(result.type).toBe("cancellation");
+ expect(result.type).toBe('cancellation');
expect(result.isCancellation).toBe(true);
});
it("should classify cancellation errors with 'stopped' message", () => {
- const error = new Error("Process was stopped");
+ const error = new Error('Process was stopped');
const result = classifyError(error);
- expect(result.type).toBe("cancellation");
+ expect(result.type).toBe('cancellation');
expect(result.isCancellation).toBe(true);
});
- it("should classify generic Error as execution error", () => {
- const error = new Error("Something went wrong");
+ it('should classify generic Error as execution error', () => {
+ const error = new Error('Something went wrong');
const result = classifyError(error);
- expect(result.type).toBe("execution");
+ expect(result.type).toBe('execution');
expect(result.isAuth).toBe(false);
expect(result.isAbort).toBe(false);
});
- it("should classify non-Error objects as unknown", () => {
- const error = "string error";
+ it('should classify non-Error objects as unknown', () => {
+ const error = 'string error';
const result = classifyError(error);
- expect(result.type).toBe("unknown");
- expect(result.message).toBe("string error");
+ expect(result.type).toBe('unknown');
+ expect(result.message).toBe('string error');
});
- it("should handle null and undefined", () => {
+ it('should handle null and undefined', () => {
const nullResult = classifyError(null);
- expect(nullResult.type).toBe("unknown");
- expect(nullResult.message).toBe("Unknown error");
+ expect(nullResult.type).toBe('unknown');
+ expect(nullResult.message).toBe('Unknown error');
const undefinedResult = classifyError(undefined);
- expect(undefinedResult.type).toBe("unknown");
- expect(undefinedResult.message).toBe("Unknown error");
+ expect(undefinedResult.type).toBe('unknown');
+ expect(undefinedResult.message).toBe('Unknown error');
});
});
- describe("getUserFriendlyErrorMessage", () => {
- it("should return friendly message for abort errors", () => {
- const error = new Error("abort");
+ describe('getUserFriendlyErrorMessage', () => {
+ it('should return friendly message for abort errors', () => {
+ const error = new Error('abort');
const result = getUserFriendlyErrorMessage(error);
- expect(result).toBe("Operation was cancelled");
+ expect(result).toBe('Operation was cancelled');
});
- it("should return friendly message for authentication errors", () => {
- const error = new Error("Authentication failed");
+ it('should return friendly message for authentication errors', () => {
+ const error = new Error('Authentication failed');
const result = getUserFriendlyErrorMessage(error);
- expect(result).toBe("Authentication failed. Please check your API key.");
+ expect(result).toBe('Authentication failed. Please check your API key.');
});
- it("should return original message for other errors", () => {
- const error = new Error("File not found");
+ it('should return original message for other errors', () => {
+ const error = new Error('File not found');
const result = getUserFriendlyErrorMessage(error);
- expect(result).toBe("File not found");
+ expect(result).toBe('File not found');
});
- it("should handle non-Error objects", () => {
- const result = getUserFriendlyErrorMessage("Custom error");
- expect(result).toBe("Custom error");
+ it('should handle non-Error objects', () => {
+ const result = getUserFriendlyErrorMessage('Custom error');
+ expect(result).toBe('Custom error');
});
});
});
diff --git a/apps/server/tests/unit/lib/events.test.ts b/apps/server/tests/unit/lib/events.test.ts
index 4741a3659..a8a780928 100644
--- a/apps/server/tests/unit/lib/events.test.ts
+++ b/apps/server/tests/unit/lib/events.test.ts
@@ -1,20 +1,20 @@
-import { describe, it, expect, vi } from "vitest";
-import { createEventEmitter, type EventType } from "@/lib/events.js";
+import { describe, it, expect, vi } from 'vitest';
+import { createEventEmitter, type EventType } from '@/lib/events.js';
-describe("events.ts", () => {
- describe("createEventEmitter", () => {
- it("should emit events to single subscriber", () => {
+describe('events.ts', () => {
+ describe('createEventEmitter', () => {
+ it('should emit events to single subscriber', () => {
const emitter = createEventEmitter();
const callback = vi.fn();
emitter.subscribe(callback);
- emitter.emit("agent:stream", { message: "test" });
+ emitter.emit('agent:stream', { message: 'test' });
expect(callback).toHaveBeenCalledOnce();
- expect(callback).toHaveBeenCalledWith("agent:stream", { message: "test" });
+ expect(callback).toHaveBeenCalledWith('agent:stream', { message: 'test' });
});
- it("should emit events to multiple subscribers", () => {
+ it('should emit events to multiple subscribers', () => {
const emitter = createEventEmitter();
const callback1 = vi.fn();
const callback2 = vi.fn();
@@ -23,42 +23,42 @@ describe("events.ts", () => {
emitter.subscribe(callback1);
emitter.subscribe(callback2);
emitter.subscribe(callback3);
- emitter.emit("feature:started", { id: "123" });
+ emitter.emit('feature:started', { id: '123' });
expect(callback1).toHaveBeenCalledOnce();
expect(callback2).toHaveBeenCalledOnce();
expect(callback3).toHaveBeenCalledOnce();
- expect(callback1).toHaveBeenCalledWith("feature:started", { id: "123" });
+ expect(callback1).toHaveBeenCalledWith('feature:started', { id: '123' });
});
- it("should support unsubscribe functionality", () => {
+ it('should support unsubscribe functionality', () => {
const emitter = createEventEmitter();
const callback = vi.fn();
const unsubscribe = emitter.subscribe(callback);
- emitter.emit("agent:stream", { test: 1 });
+ emitter.emit('agent:stream', { test: 1 });
expect(callback).toHaveBeenCalledOnce();
unsubscribe();
- emitter.emit("agent:stream", { test: 2 });
+ emitter.emit('agent:stream', { test: 2 });
expect(callback).toHaveBeenCalledOnce(); // Still called only once
});
- it("should handle errors in subscribers without crashing", () => {
+ it('should handle errors in subscribers without crashing', () => {
const emitter = createEventEmitter();
const errorCallback = vi.fn(() => {
- throw new Error("Subscriber error");
+ throw new Error('Subscriber error');
});
const normalCallback = vi.fn();
- const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
+ const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
emitter.subscribe(errorCallback);
emitter.subscribe(normalCallback);
expect(() => {
- emitter.emit("feature:error", { error: "test" });
+ emitter.emit('feature:error', { error: 'test' });
}).not.toThrow();
expect(errorCallback).toHaveBeenCalledOnce();
@@ -68,17 +68,17 @@ describe("events.ts", () => {
consoleSpy.mockRestore();
});
- it("should emit different event types", () => {
+ it('should emit different event types', () => {
const emitter = createEventEmitter();
const callback = vi.fn();
emitter.subscribe(callback);
const eventTypes: EventType[] = [
- "agent:stream",
- "auto-mode:started",
- "feature:completed",
- "project:analysis-progress",
+ 'agent:stream',
+ 'auto-mode:started',
+ 'feature:completed',
+ 'project:analysis-progress',
];
eventTypes.forEach((type) => {
@@ -88,15 +88,15 @@ describe("events.ts", () => {
expect(callback).toHaveBeenCalledTimes(4);
});
- it("should handle emitting without subscribers", () => {
+ it('should handle emitting without subscribers', () => {
const emitter = createEventEmitter();
expect(() => {
- emitter.emit("agent:stream", { test: true });
+ emitter.emit('agent:stream', { test: true });
}).not.toThrow();
});
- it("should allow multiple subscriptions and unsubscriptions", () => {
+ it('should allow multiple subscriptions and unsubscriptions', () => {
const emitter = createEventEmitter();
const callback1 = vi.fn();
const callback2 = vi.fn();
@@ -106,14 +106,14 @@ describe("events.ts", () => {
const unsub2 = emitter.subscribe(callback2);
const unsub3 = emitter.subscribe(callback3);
- emitter.emit("feature:started", { test: 1 });
+ emitter.emit('feature:started', { test: 1 });
expect(callback1).toHaveBeenCalledOnce();
expect(callback2).toHaveBeenCalledOnce();
expect(callback3).toHaveBeenCalledOnce();
unsub2();
- emitter.emit("feature:started", { test: 2 });
+ emitter.emit('feature:started', { test: 2 });
expect(callback1).toHaveBeenCalledTimes(2);
expect(callback2).toHaveBeenCalledOnce(); // Still just once
expect(callback3).toHaveBeenCalledTimes(2);
@@ -121,7 +121,7 @@ describe("events.ts", () => {
unsub1();
unsub3();
- emitter.emit("feature:started", { test: 3 });
+ emitter.emit('feature:started', { test: 3 });
expect(callback1).toHaveBeenCalledTimes(2);
expect(callback2).toHaveBeenCalledOnce();
expect(callback3).toHaveBeenCalledTimes(2);
diff --git a/apps/server/tests/unit/lib/fs-utils.test.ts b/apps/server/tests/unit/lib/fs-utils.test.ts
index 2174822ac..6ac49e6d8 100644
--- a/apps/server/tests/unit/lib/fs-utils.test.ts
+++ b/apps/server/tests/unit/lib/fs-utils.test.ts
@@ -1,10 +1,10 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { mkdirSafe, existsSafe } from "@automaker/utils";
-import fs from "fs/promises";
-import path from "path";
-import os from "os";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import { mkdirSafe, existsSafe } from '@automaker/utils';
+import fs from 'fs/promises';
+import path from 'path';
+import os from 'os';
-describe("fs-utils.ts", () => {
+describe('fs-utils.ts', () => {
let testDir: string;
beforeEach(async () => {
@@ -22,43 +22,41 @@ describe("fs-utils.ts", () => {
}
});
- describe("mkdirSafe", () => {
- it("should create a new directory", async () => {
- const newDir = path.join(testDir, "new-directory");
+ describe('mkdirSafe', () => {
+ it('should create a new directory', async () => {
+ const newDir = path.join(testDir, 'new-directory');
await mkdirSafe(newDir);
const stats = await fs.stat(newDir);
expect(stats.isDirectory()).toBe(true);
});
- it("should succeed if directory already exists", async () => {
- const existingDir = path.join(testDir, "existing");
+ it('should succeed if directory already exists', async () => {
+ const existingDir = path.join(testDir, 'existing');
await fs.mkdir(existingDir);
// Should not throw
await expect(mkdirSafe(existingDir)).resolves.toBeUndefined();
});
- it("should create nested directories", async () => {
- const nestedDir = path.join(testDir, "a", "b", "c");
+ it('should create nested directories', async () => {
+ const nestedDir = path.join(testDir, 'a', 'b', 'c');
await mkdirSafe(nestedDir);
const stats = await fs.stat(nestedDir);
expect(stats.isDirectory()).toBe(true);
});
- it("should throw if path exists as a file", async () => {
- const filePath = path.join(testDir, "file.txt");
- await fs.writeFile(filePath, "content");
+ it('should throw if path exists as a file', async () => {
+ const filePath = path.join(testDir, 'file.txt');
+ await fs.writeFile(filePath, 'content');
- await expect(mkdirSafe(filePath)).rejects.toThrow(
- "Path exists and is not a directory"
- );
+ await expect(mkdirSafe(filePath)).rejects.toThrow('Path exists and is not a directory');
});
- it("should succeed if path is a symlink to a directory", async () => {
- const realDir = path.join(testDir, "real-dir");
- const symlinkPath = path.join(testDir, "link-to-dir");
+ it('should succeed if path is a symlink to a directory', async () => {
+ const realDir = path.join(testDir, 'real-dir');
+ const symlinkPath = path.join(testDir, 'link-to-dir');
await fs.mkdir(realDir);
await fs.symlink(realDir, symlinkPath);
@@ -66,12 +64,12 @@ describe("fs-utils.ts", () => {
await expect(mkdirSafe(symlinkPath)).resolves.toBeUndefined();
});
- it("should handle ELOOP error gracefully when checking path", async () => {
+ it('should handle ELOOP error gracefully when checking path', async () => {
// Mock lstat to throw ELOOP error
const originalLstat = fs.lstat;
- const mkdirSafePath = path.join(testDir, "eloop-path");
-
- vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
+ const mkdirSafePath = path.join(testDir, 'eloop-path');
+
+ vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ELOOP' });
// Should not throw, should return gracefully
await expect(mkdirSafe(mkdirSafePath)).resolves.toBeUndefined();
@@ -79,13 +77,13 @@ describe("fs-utils.ts", () => {
vi.restoreAllMocks();
});
- it("should handle EEXIST error gracefully when creating directory", async () => {
- const newDir = path.join(testDir, "race-condition-dir");
-
+ it('should handle EEXIST error gracefully when creating directory', async () => {
+ const newDir = path.join(testDir, 'race-condition-dir');
+
// Mock lstat to return ENOENT (path doesn't exist)
// Then mock mkdir to throw EEXIST (race condition)
- vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
- vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "EEXIST" });
+ vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ENOENT' });
+ vi.spyOn(fs, 'mkdir').mockRejectedValueOnce({ code: 'EEXIST' });
// Should not throw, should return gracefully
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
@@ -93,13 +91,13 @@ describe("fs-utils.ts", () => {
vi.restoreAllMocks();
});
- it("should handle ELOOP error gracefully when creating directory", async () => {
- const newDir = path.join(testDir, "eloop-create-dir");
-
+ it('should handle ELOOP error gracefully when creating directory', async () => {
+ const newDir = path.join(testDir, 'eloop-create-dir');
+
// Mock lstat to return ENOENT (path doesn't exist)
// Then mock mkdir to throw ELOOP
- vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
- vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "ELOOP" });
+ vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ENOENT' });
+ vi.spyOn(fs, 'mkdir').mockRejectedValueOnce({ code: 'ELOOP' });
// Should not throw, should return gracefully
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
@@ -108,34 +106,34 @@ describe("fs-utils.ts", () => {
});
});
- describe("existsSafe", () => {
- it("should return true for existing file", async () => {
- const filePath = path.join(testDir, "test-file.txt");
- await fs.writeFile(filePath, "content");
+ describe('existsSafe', () => {
+ it('should return true for existing file', async () => {
+ const filePath = path.join(testDir, 'test-file.txt');
+ await fs.writeFile(filePath, 'content');
const exists = await existsSafe(filePath);
expect(exists).toBe(true);
});
- it("should return true for existing directory", async () => {
- const dirPath = path.join(testDir, "test-dir");
+ it('should return true for existing directory', async () => {
+ const dirPath = path.join(testDir, 'test-dir');
await fs.mkdir(dirPath);
const exists = await existsSafe(dirPath);
expect(exists).toBe(true);
});
- it("should return false for non-existent path", async () => {
- const nonExistent = path.join(testDir, "does-not-exist");
+ it('should return false for non-existent path', async () => {
+ const nonExistent = path.join(testDir, 'does-not-exist');
const exists = await existsSafe(nonExistent);
expect(exists).toBe(false);
});
- it("should return true for symlink", async () => {
- const realFile = path.join(testDir, "real-file.txt");
- const symlinkPath = path.join(testDir, "link-to-file");
- await fs.writeFile(realFile, "content");
+ it('should return true for symlink', async () => {
+ const realFile = path.join(testDir, 'real-file.txt');
+ const symlinkPath = path.join(testDir, 'link-to-file');
+ await fs.writeFile(realFile, 'content');
await fs.symlink(realFile, symlinkPath);
const exists = await existsSafe(symlinkPath);
@@ -143,29 +141,29 @@ describe("fs-utils.ts", () => {
});
it("should return true for broken symlink (symlink exists even if target doesn't)", async () => {
- const symlinkPath = path.join(testDir, "broken-link");
- const nonExistent = path.join(testDir, "non-existent-target");
+ const symlinkPath = path.join(testDir, 'broken-link');
+ const nonExistent = path.join(testDir, 'non-existent-target');
await fs.symlink(nonExistent, symlinkPath);
const exists = await existsSafe(symlinkPath);
expect(exists).toBe(true);
});
- it("should return true for ELOOP error (symlink loop)", async () => {
+ it('should return true for ELOOP error (symlink loop)', async () => {
// Mock lstat to throw ELOOP error
- vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
+ vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ELOOP' });
- const exists = await existsSafe("/some/path/with/loop");
+ const exists = await existsSafe('/some/path/with/loop');
expect(exists).toBe(true);
vi.restoreAllMocks();
});
- it("should throw for other errors", async () => {
+ it('should throw for other errors', async () => {
// Mock lstat to throw a non-ENOENT, non-ELOOP error
- vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "EACCES" });
+ vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'EACCES' });
- await expect(existsSafe("/some/path")).rejects.toMatchObject({ code: "EACCES" });
+ await expect(existsSafe('/some/path')).rejects.toMatchObject({ code: 'EACCES' });
vi.restoreAllMocks();
});
diff --git a/apps/server/tests/unit/lib/image-handler.test.ts b/apps/server/tests/unit/lib/image-handler.test.ts
index f57ef0e56..18b04155b 100644
--- a/apps/server/tests/unit/lib/image-handler.test.ts
+++ b/apps/server/tests/unit/lib/image-handler.test.ts
@@ -1,174 +1,164 @@
-import { describe, it, expect, vi, beforeEach } from "vitest";
+import { describe, it, expect, vi, beforeEach } from 'vitest';
import {
getMimeTypeForImage,
readImageAsBase64,
convertImagesToContentBlocks,
formatImagePathsForPrompt,
-} from "@automaker/utils";
-import { pngBase64Fixture } from "../../fixtures/images.js";
-import * as fs from "fs/promises";
+} from '@automaker/utils';
+import { pngBase64Fixture } from '../../fixtures/images.js';
+import * as fs from 'fs/promises';
-vi.mock("fs/promises");
+vi.mock('fs/promises');
-describe("image-handler.ts", () => {
+describe('image-handler.ts', () => {
beforeEach(() => {
vi.clearAllMocks();
});
- describe("getMimeTypeForImage", () => {
- it("should return correct MIME type for .jpg", () => {
- expect(getMimeTypeForImage("test.jpg")).toBe("image/jpeg");
- expect(getMimeTypeForImage("/path/to/test.jpg")).toBe("image/jpeg");
+ describe('getMimeTypeForImage', () => {
+ it('should return correct MIME type for .jpg', () => {
+ expect(getMimeTypeForImage('test.jpg')).toBe('image/jpeg');
+ expect(getMimeTypeForImage('/path/to/test.jpg')).toBe('image/jpeg');
});
- it("should return correct MIME type for .jpeg", () => {
- expect(getMimeTypeForImage("test.jpeg")).toBe("image/jpeg");
+ it('should return correct MIME type for .jpeg', () => {
+ expect(getMimeTypeForImage('test.jpeg')).toBe('image/jpeg');
});
- it("should return correct MIME type for .png", () => {
- expect(getMimeTypeForImage("test.png")).toBe("image/png");
+ it('should return correct MIME type for .png', () => {
+ expect(getMimeTypeForImage('test.png')).toBe('image/png');
});
- it("should return correct MIME type for .gif", () => {
- expect(getMimeTypeForImage("test.gif")).toBe("image/gif");
+ it('should return correct MIME type for .gif', () => {
+ expect(getMimeTypeForImage('test.gif')).toBe('image/gif');
});
- it("should return correct MIME type for .webp", () => {
- expect(getMimeTypeForImage("test.webp")).toBe("image/webp");
+ it('should return correct MIME type for .webp', () => {
+ expect(getMimeTypeForImage('test.webp')).toBe('image/webp');
});
- it("should be case-insensitive", () => {
- expect(getMimeTypeForImage("test.PNG")).toBe("image/png");
- expect(getMimeTypeForImage("test.JPG")).toBe("image/jpeg");
- expect(getMimeTypeForImage("test.GIF")).toBe("image/gif");
- expect(getMimeTypeForImage("test.WEBP")).toBe("image/webp");
+ it('should be case-insensitive', () => {
+ expect(getMimeTypeForImage('test.PNG')).toBe('image/png');
+ expect(getMimeTypeForImage('test.JPG')).toBe('image/jpeg');
+ expect(getMimeTypeForImage('test.GIF')).toBe('image/gif');
+ expect(getMimeTypeForImage('test.WEBP')).toBe('image/webp');
});
- it("should default to image/png for unknown extensions", () => {
- expect(getMimeTypeForImage("test.unknown")).toBe("image/png");
- expect(getMimeTypeForImage("test.txt")).toBe("image/png");
- expect(getMimeTypeForImage("test")).toBe("image/png");
+ it('should default to image/png for unknown extensions', () => {
+ expect(getMimeTypeForImage('test.unknown')).toBe('image/png');
+ expect(getMimeTypeForImage('test.txt')).toBe('image/png');
+ expect(getMimeTypeForImage('test')).toBe('image/png');
});
- it("should handle paths with multiple dots", () => {
- expect(getMimeTypeForImage("my.image.file.jpg")).toBe("image/jpeg");
+ it('should handle paths with multiple dots', () => {
+ expect(getMimeTypeForImage('my.image.file.jpg')).toBe('image/jpeg');
});
});
- describe("readImageAsBase64", () => {
- it("should read image and return base64 data", async () => {
- const mockBuffer = Buffer.from(pngBase64Fixture, "base64");
+ describe('readImageAsBase64', () => {
+ it('should read image and return base64 data', async () => {
+ const mockBuffer = Buffer.from(pngBase64Fixture, 'base64');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- const result = await readImageAsBase64("/path/to/test.png");
+ const result = await readImageAsBase64('/path/to/test.png');
expect(result).toMatchObject({
base64: pngBase64Fixture,
- mimeType: "image/png",
- filename: "test.png",
- originalPath: "/path/to/test.png",
+ mimeType: 'image/png',
+ filename: 'test.png',
+ originalPath: '/path/to/test.png',
});
- expect(fs.readFile).toHaveBeenCalledWith("/path/to/test.png");
+ expect(fs.readFile).toHaveBeenCalledWith('/path/to/test.png');
});
- it("should handle different image formats", async () => {
- const mockBuffer = Buffer.from("jpeg-data");
+ it('should handle different image formats', async () => {
+ const mockBuffer = Buffer.from('jpeg-data');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- const result = await readImageAsBase64("/path/to/photo.jpg");
+ const result = await readImageAsBase64('/path/to/photo.jpg');
- expect(result.mimeType).toBe("image/jpeg");
- expect(result.filename).toBe("photo.jpg");
- expect(result.base64).toBe(mockBuffer.toString("base64"));
+ expect(result.mimeType).toBe('image/jpeg');
+ expect(result.filename).toBe('photo.jpg');
+ expect(result.base64).toBe(mockBuffer.toString('base64'));
});
- it("should extract filename from path", async () => {
- const mockBuffer = Buffer.from("data");
+ it('should extract filename from path', async () => {
+ const mockBuffer = Buffer.from('data');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- const result = await readImageAsBase64("/deep/nested/path/image.webp");
+ const result = await readImageAsBase64('/deep/nested/path/image.webp');
- expect(result.filename).toBe("image.webp");
+ expect(result.filename).toBe('image.webp');
});
- it("should throw error if file cannot be read", async () => {
- vi.mocked(fs.readFile).mockRejectedValue(new Error("File not found"));
+ it('should throw error if file cannot be read', async () => {
+ vi.mocked(fs.readFile).mockRejectedValue(new Error('File not found'));
- await expect(readImageAsBase64("/nonexistent.png")).rejects.toThrow(
- "File not found"
- );
+ await expect(readImageAsBase64('/nonexistent.png')).rejects.toThrow('File not found');
});
});
- describe("convertImagesToContentBlocks", () => {
- it("should convert single image to content block", async () => {
- const mockBuffer = Buffer.from(pngBase64Fixture, "base64");
+ describe('convertImagesToContentBlocks', () => {
+ it('should convert single image to content block', async () => {
+ const mockBuffer = Buffer.from(pngBase64Fixture, 'base64');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- const result = await convertImagesToContentBlocks(["/path/test.png"]);
+ const result = await convertImagesToContentBlocks(['/path/test.png']);
expect(result).toHaveLength(1);
expect(result[0]).toMatchObject({
- type: "image",
+ type: 'image',
source: {
- type: "base64",
- media_type: "image/png",
+ type: 'base64',
+ media_type: 'image/png',
data: pngBase64Fixture,
},
});
});
- it("should convert multiple images to content blocks", async () => {
- const mockBuffer = Buffer.from("test-data");
+ it('should convert multiple images to content blocks', async () => {
+ const mockBuffer = Buffer.from('test-data');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- const result = await convertImagesToContentBlocks([
- "/a.png",
- "/b.jpg",
- "/c.webp",
- ]);
+ const result = await convertImagesToContentBlocks(['/a.png', '/b.jpg', '/c.webp']);
expect(result).toHaveLength(3);
- expect(result[0].source.media_type).toBe("image/png");
- expect(result[1].source.media_type).toBe("image/jpeg");
- expect(result[2].source.media_type).toBe("image/webp");
+ expect(result[0].source.media_type).toBe('image/png');
+ expect(result[1].source.media_type).toBe('image/jpeg');
+ expect(result[2].source.media_type).toBe('image/webp');
});
- it("should resolve relative paths with workDir", async () => {
- const mockBuffer = Buffer.from("data");
+ it('should resolve relative paths with workDir', async () => {
+ const mockBuffer = Buffer.from('data');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- await convertImagesToContentBlocks(["relative.png"], "/work/dir");
+ await convertImagesToContentBlocks(['relative.png'], '/work/dir');
// Use path-agnostic check since Windows uses backslashes
const calls = vi.mocked(fs.readFile).mock.calls;
expect(calls[0][0]).toMatch(/relative\.png$/);
- expect(calls[0][0]).toContain("work");
- expect(calls[0][0]).toContain("dir");
+ expect(calls[0][0]).toContain('work');
+ expect(calls[0][0]).toContain('dir');
});
- it("should handle absolute paths without workDir", async () => {
- const mockBuffer = Buffer.from("data");
+ it('should handle absolute paths without workDir', async () => {
+ const mockBuffer = Buffer.from('data');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- await convertImagesToContentBlocks(["/absolute/path.png"]);
+ await convertImagesToContentBlocks(['/absolute/path.png']);
- expect(fs.readFile).toHaveBeenCalledWith("/absolute/path.png");
+ expect(fs.readFile).toHaveBeenCalledWith('/absolute/path.png');
});
- it("should continue processing on individual image errors", async () => {
+ it('should continue processing on individual image errors', async () => {
vi.mocked(fs.readFile)
- .mockResolvedValueOnce(Buffer.from("ok1"))
- .mockRejectedValueOnce(new Error("Failed"))
- .mockResolvedValueOnce(Buffer.from("ok2"));
+ .mockResolvedValueOnce(Buffer.from('ok1'))
+ .mockRejectedValueOnce(new Error('Failed'))
+ .mockResolvedValueOnce(Buffer.from('ok2'));
- const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
+ const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
- const result = await convertImagesToContentBlocks([
- "/a.png",
- "/b.png",
- "/c.png",
- ]);
+ const result = await convertImagesToContentBlocks(['/a.png', '/b.png', '/c.png']);
expect(result).toHaveLength(2); // Only successful images
expect(consoleSpy).toHaveBeenCalled();
@@ -176,56 +166,52 @@ describe("image-handler.ts", () => {
consoleSpy.mockRestore();
});
- it("should return empty array for empty input", async () => {
+ it('should return empty array for empty input', async () => {
const result = await convertImagesToContentBlocks([]);
expect(result).toEqual([]);
});
- it("should handle undefined workDir", async () => {
- const mockBuffer = Buffer.from("data");
+ it('should handle undefined workDir', async () => {
+ const mockBuffer = Buffer.from('data');
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
- const result = await convertImagesToContentBlocks(["/test.png"], undefined);
+ const result = await convertImagesToContentBlocks(['/test.png'], undefined);
expect(result).toHaveLength(1);
- expect(fs.readFile).toHaveBeenCalledWith("/test.png");
+ expect(fs.readFile).toHaveBeenCalledWith('/test.png');
});
});
- describe("formatImagePathsForPrompt", () => {
- it("should format single image path as bulleted list", () => {
- const result = formatImagePathsForPrompt(["/path/image.png"]);
+ describe('formatImagePathsForPrompt', () => {
+ it('should format single image path as bulleted list', () => {
+ const result = formatImagePathsForPrompt(['/path/image.png']);
- expect(result).toContain("\n\nAttached images:");
- expect(result).toContain("- /path/image.png");
+ expect(result).toContain('\n\nAttached images:');
+ expect(result).toContain('- /path/image.png');
});
- it("should format multiple image paths as bulleted list", () => {
- const result = formatImagePathsForPrompt([
- "/path/a.png",
- "/path/b.jpg",
- "/path/c.webp",
- ]);
+ it('should format multiple image paths as bulleted list', () => {
+ const result = formatImagePathsForPrompt(['/path/a.png', '/path/b.jpg', '/path/c.webp']);
- expect(result).toContain("Attached images:");
- expect(result).toContain("- /path/a.png");
- expect(result).toContain("- /path/b.jpg");
- expect(result).toContain("- /path/c.webp");
+ expect(result).toContain('Attached images:');
+ expect(result).toContain('- /path/a.png');
+ expect(result).toContain('- /path/b.jpg');
+ expect(result).toContain('- /path/c.webp');
});
- it("should return empty string for empty array", () => {
+ it('should return empty string for empty array', () => {
const result = formatImagePathsForPrompt([]);
- expect(result).toBe("");
+ expect(result).toBe('');
});
- it("should start with double newline", () => {
- const result = formatImagePathsForPrompt(["/test.png"]);
- expect(result.startsWith("\n\n")).toBe(true);
+ it('should start with double newline', () => {
+ const result = formatImagePathsForPrompt(['/test.png']);
+ expect(result.startsWith('\n\n')).toBe(true);
});
- it("should handle paths with special characters", () => {
- const result = formatImagePathsForPrompt(["/path/with spaces/image.png"]);
- expect(result).toContain("- /path/with spaces/image.png");
+ it('should handle paths with special characters', () => {
+ const result = formatImagePathsForPrompt(['/path/with spaces/image.png']);
+ expect(result).toContain('- /path/with spaces/image.png');
});
});
});
diff --git a/apps/server/tests/unit/lib/logger.test.ts b/apps/server/tests/unit/lib/logger.test.ts
index fa6034b08..56d98a9f4 100644
--- a/apps/server/tests/unit/lib/logger.test.ts
+++ b/apps/server/tests/unit/lib/logger.test.ts
@@ -1,12 +1,7 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import {
- LogLevel,
- createLogger,
- getLogLevel,
- setLogLevel,
-} from "@automaker/utils";
-
-describe("logger.ts", () => {
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import { LogLevel, createLogger, getLogLevel, setLogLevel } from '@automaker/utils';
+
+describe('logger.ts', () => {
let consoleSpy: {
log: ReturnType;
warn: ReturnType;
@@ -17,9 +12,9 @@ describe("logger.ts", () => {
beforeEach(() => {
originalLogLevel = getLogLevel();
consoleSpy = {
- log: vi.spyOn(console, "log").mockImplementation(() => {}),
- warn: vi.spyOn(console, "warn").mockImplementation(() => {}),
- error: vi.spyOn(console, "error").mockImplementation(() => {}),
+ log: vi.spyOn(console, 'log').mockImplementation(() => {}),
+ warn: vi.spyOn(console, 'warn').mockImplementation(() => {}),
+ error: vi.spyOn(console, 'error').mockImplementation(() => {}),
};
});
@@ -30,8 +25,8 @@ describe("logger.ts", () => {
consoleSpy.error.mockRestore();
});
- describe("LogLevel enum", () => {
- it("should have correct numeric values", () => {
+ describe('LogLevel enum', () => {
+ it('should have correct numeric values', () => {
expect(LogLevel.ERROR).toBe(0);
expect(LogLevel.WARN).toBe(1);
expect(LogLevel.INFO).toBe(2);
@@ -39,8 +34,8 @@ describe("logger.ts", () => {
});
});
- describe("setLogLevel and getLogLevel", () => {
- it("should set and get log level", () => {
+ describe('setLogLevel and getLogLevel', () => {
+ it('should set and get log level', () => {
setLogLevel(LogLevel.DEBUG);
expect(getLogLevel()).toBe(LogLevel.DEBUG);
@@ -49,71 +44,66 @@ describe("logger.ts", () => {
});
});
- describe("createLogger", () => {
- it("should create a logger with context prefix", () => {
+ describe('createLogger', () => {
+ it('should create a logger with context prefix', () => {
setLogLevel(LogLevel.INFO);
- const logger = createLogger("TestContext");
+ const logger = createLogger('TestContext');
- logger.info("test message");
+ logger.info('test message');
- expect(consoleSpy.log).toHaveBeenCalledWith("[TestContext]", "test message");
+ expect(consoleSpy.log).toHaveBeenCalledWith('[TestContext]', 'test message');
});
- it("should log error at all log levels", () => {
- const logger = createLogger("Test");
+ it('should log error at all log levels', () => {
+ const logger = createLogger('Test');
setLogLevel(LogLevel.ERROR);
- logger.error("error message");
- expect(consoleSpy.error).toHaveBeenCalledWith("[Test]", "error message");
+ logger.error('error message');
+ expect(consoleSpy.error).toHaveBeenCalledWith('[Test]', 'error message');
});
- it("should log warn when level is WARN or higher", () => {
- const logger = createLogger("Test");
+ it('should log warn when level is WARN or higher', () => {
+ const logger = createLogger('Test');
setLogLevel(LogLevel.ERROR);
- logger.warn("warn message 1");
+ logger.warn('warn message 1');
expect(consoleSpy.warn).not.toHaveBeenCalled();
setLogLevel(LogLevel.WARN);
- logger.warn("warn message 2");
- expect(consoleSpy.warn).toHaveBeenCalledWith("[Test]", "warn message 2");
+ logger.warn('warn message 2');
+ expect(consoleSpy.warn).toHaveBeenCalledWith('[Test]', 'warn message 2');
});
- it("should log info when level is INFO or higher", () => {
- const logger = createLogger("Test");
+ it('should log info when level is INFO or higher', () => {
+ const logger = createLogger('Test');
setLogLevel(LogLevel.WARN);
- logger.info("info message 1");
+ logger.info('info message 1');
expect(consoleSpy.log).not.toHaveBeenCalled();
setLogLevel(LogLevel.INFO);
- logger.info("info message 2");
- expect(consoleSpy.log).toHaveBeenCalledWith("[Test]", "info message 2");
+ logger.info('info message 2');
+ expect(consoleSpy.log).toHaveBeenCalledWith('[Test]', 'info message 2');
});
- it("should log debug only when level is DEBUG", () => {
- const logger = createLogger("Test");
+ it('should log debug only when level is DEBUG', () => {
+ const logger = createLogger('Test');
setLogLevel(LogLevel.INFO);
- logger.debug("debug message 1");
+ logger.debug('debug message 1');
expect(consoleSpy.log).not.toHaveBeenCalled();
setLogLevel(LogLevel.DEBUG);
- logger.debug("debug message 2");
- expect(consoleSpy.log).toHaveBeenCalledWith("[Test]", "[DEBUG]", "debug message 2");
+ logger.debug('debug message 2');
+ expect(consoleSpy.log).toHaveBeenCalledWith('[Test]', '[DEBUG]', 'debug message 2');
});
- it("should pass multiple arguments to log functions", () => {
+ it('should pass multiple arguments to log functions', () => {
setLogLevel(LogLevel.DEBUG);
- const logger = createLogger("Multi");
-
- logger.info("message", { data: "value" }, 123);
- expect(consoleSpy.log).toHaveBeenCalledWith(
- "[Multi]",
- "message",
- { data: "value" },
- 123
- );
+ const logger = createLogger('Multi');
+
+ logger.info('message', { data: 'value' }, 123);
+ expect(consoleSpy.log).toHaveBeenCalledWith('[Multi]', 'message', { data: 'value' }, 123);
});
});
});
diff --git a/apps/server/tests/unit/lib/model-resolver.test.ts b/apps/server/tests/unit/lib/model-resolver.test.ts
index bda6b380d..5eb1fa701 100644
--- a/apps/server/tests/unit/lib/model-resolver.test.ts
+++ b/apps/server/tests/unit/lib/model-resolver.test.ts
@@ -1,18 +1,18 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import {
resolveModelString,
getEffectiveModel,
CLAUDE_MODEL_MAP,
DEFAULT_MODELS,
-} from "@automaker/model-resolver";
+} from '@automaker/model-resolver';
-describe("model-resolver.ts", () => {
+describe('model-resolver.ts', () => {
let consoleSpy: any;
beforeEach(() => {
consoleSpy = {
- log: vi.spyOn(console, "log").mockImplementation(() => {}),
- warn: vi.spyOn(console, "warn").mockImplementation(() => {}),
+ log: vi.spyOn(console, 'log').mockImplementation(() => {}),
+ warn: vi.spyOn(console, 'warn').mockImplementation(() => {}),
};
});
@@ -21,27 +21,27 @@ describe("model-resolver.ts", () => {
consoleSpy.warn.mockRestore();
});
- describe("resolveModelString", () => {
+ describe('resolveModelString', () => {
it("should resolve 'haiku' alias to full model string", () => {
- const result = resolveModelString("haiku");
- expect(result).toBe("claude-haiku-4-5");
+ const result = resolveModelString('haiku');
+ expect(result).toBe('claude-haiku-4-5');
});
it("should resolve 'sonnet' alias to full model string", () => {
- const result = resolveModelString("sonnet");
- expect(result).toBe("claude-sonnet-4-20250514");
+ const result = resolveModelString('sonnet');
+ expect(result).toBe('claude-sonnet-4-20250514');
});
it("should resolve 'opus' alias to full model string", () => {
- const result = resolveModelString("opus");
- expect(result).toBe("claude-opus-4-5-20251101");
+ const result = resolveModelString('opus');
+ expect(result).toBe('claude-opus-4-5-20251101');
expect(consoleSpy.log).toHaveBeenCalledWith(
expect.stringContaining('Resolved model alias: "opus"')
);
});
- it("should treat unknown models as falling back to default", () => {
- const models = ["o1", "o1-mini", "o3", "gpt-5.2", "unknown-model"];
+ it('should treat unknown models as falling back to default', () => {
+ const models = ['o1', 'o1-mini', 'o3', 'gpt-5.2', 'unknown-model'];
models.forEach((model) => {
const result = resolveModelString(model);
// Should fall back to default since these aren't supported
@@ -49,95 +49,91 @@ describe("model-resolver.ts", () => {
});
});
- it("should pass through full Claude model strings", () => {
- const models = [
- "claude-opus-4-5-20251101",
- "claude-sonnet-4-20250514",
- "claude-haiku-4-5",
- ];
+ it('should pass through full Claude model strings', () => {
+ const models = ['claude-opus-4-5-20251101', 'claude-sonnet-4-20250514', 'claude-haiku-4-5'];
models.forEach((model) => {
const result = resolveModelString(model);
expect(result).toBe(model);
});
expect(consoleSpy.log).toHaveBeenCalledWith(
- expect.stringContaining("Using full Claude model string")
+ expect.stringContaining('Using full Claude model string')
);
});
- it("should return default model when modelKey is undefined", () => {
+ it('should return default model when modelKey is undefined', () => {
const result = resolveModelString(undefined);
expect(result).toBe(DEFAULT_MODELS.claude);
});
- it("should return custom default model when provided", () => {
- const customDefault = "custom-model";
+ it('should return custom default model when provided', () => {
+ const customDefault = 'custom-model';
const result = resolveModelString(undefined, customDefault);
expect(result).toBe(customDefault);
});
- it("should return default for unknown model key", () => {
- const result = resolveModelString("unknown-model");
+ it('should return default for unknown model key', () => {
+ const result = resolveModelString('unknown-model');
expect(result).toBe(DEFAULT_MODELS.claude);
expect(consoleSpy.warn).toHaveBeenCalledWith(
expect.stringContaining('Unknown model key "unknown-model"')
);
});
- it("should handle empty string", () => {
- const result = resolveModelString("");
+ it('should handle empty string', () => {
+ const result = resolveModelString('');
expect(result).toBe(DEFAULT_MODELS.claude);
});
});
- describe("getEffectiveModel", () => {
- it("should prioritize explicit model over session and default", () => {
- const result = getEffectiveModel("opus", "haiku", "gpt-5.2");
- expect(result).toBe("claude-opus-4-5-20251101");
+ describe('getEffectiveModel', () => {
+ it('should prioritize explicit model over session and default', () => {
+ const result = getEffectiveModel('opus', 'haiku', 'gpt-5.2');
+ expect(result).toBe('claude-opus-4-5-20251101');
});
- it("should use session model when explicit is not provided", () => {
- const result = getEffectiveModel(undefined, "sonnet", "gpt-5.2");
- expect(result).toBe("claude-sonnet-4-20250514");
+ it('should use session model when explicit is not provided', () => {
+ const result = getEffectiveModel(undefined, 'sonnet', 'gpt-5.2');
+ expect(result).toBe('claude-sonnet-4-20250514');
});
- it("should use default when neither explicit nor session is provided", () => {
- const customDefault = "claude-haiku-4-5";
+ it('should use default when neither explicit nor session is provided', () => {
+ const customDefault = 'claude-haiku-4-5';
const result = getEffectiveModel(undefined, undefined, customDefault);
expect(result).toBe(customDefault);
});
- it("should use Claude default when no arguments provided", () => {
+ it('should use Claude default when no arguments provided', () => {
const result = getEffectiveModel();
expect(result).toBe(DEFAULT_MODELS.claude);
});
- it("should handle explicit empty strings as undefined", () => {
- const result = getEffectiveModel("", "haiku");
- expect(result).toBe("claude-haiku-4-5");
+ it('should handle explicit empty strings as undefined', () => {
+ const result = getEffectiveModel('', 'haiku');
+ expect(result).toBe('claude-haiku-4-5');
});
});
- describe("CLAUDE_MODEL_MAP", () => {
- it("should have haiku, sonnet, opus mappings", () => {
- expect(CLAUDE_MODEL_MAP).toHaveProperty("haiku");
- expect(CLAUDE_MODEL_MAP).toHaveProperty("sonnet");
- expect(CLAUDE_MODEL_MAP).toHaveProperty("opus");
+ describe('CLAUDE_MODEL_MAP', () => {
+ it('should have haiku, sonnet, opus mappings', () => {
+ expect(CLAUDE_MODEL_MAP).toHaveProperty('haiku');
+ expect(CLAUDE_MODEL_MAP).toHaveProperty('sonnet');
+ expect(CLAUDE_MODEL_MAP).toHaveProperty('opus');
});
- it("should have valid Claude model strings", () => {
- expect(CLAUDE_MODEL_MAP.haiku).toContain("haiku");
- expect(CLAUDE_MODEL_MAP.sonnet).toContain("sonnet");
- expect(CLAUDE_MODEL_MAP.opus).toContain("opus");
+ it('should have valid Claude model strings', () => {
+ expect(CLAUDE_MODEL_MAP.haiku).toContain('haiku');
+ expect(CLAUDE_MODEL_MAP.sonnet).toContain('sonnet');
+ expect(CLAUDE_MODEL_MAP.opus).toContain('opus');
});
});
- describe("DEFAULT_MODELS", () => {
- it("should have claude default", () => {
- expect(DEFAULT_MODELS).toHaveProperty("claude");
+ describe('DEFAULT_MODELS', () => {
+ it('should have claude default', () => {
+ expect(DEFAULT_MODELS).toHaveProperty('claude');
});
- it("should have valid default model", () => {
- expect(DEFAULT_MODELS.claude).toContain("claude");
+ it('should have valid default model', () => {
+ expect(DEFAULT_MODELS.claude).toContain('claude');
});
});
});
diff --git a/apps/server/tests/unit/lib/prompt-builder.test.ts b/apps/server/tests/unit/lib/prompt-builder.test.ts
index 6f76b2095..1577c4aa9 100644
--- a/apps/server/tests/unit/lib/prompt-builder.test.ts
+++ b/apps/server/tests/unit/lib/prompt-builder.test.ts
@@ -1,129 +1,120 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import * as utils from "@automaker/utils";
-import * as fs from "fs/promises";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import * as utils from '@automaker/utils';
+import * as fs from 'fs/promises';
// Mock fs module for the image-handler's readFile calls
-vi.mock("fs/promises");
+vi.mock('fs/promises');
-describe("prompt-builder.ts", () => {
+describe('prompt-builder.ts', () => {
beforeEach(() => {
vi.clearAllMocks();
// Setup default mock for fs.readFile to return a valid image buffer
- vi.mocked(fs.readFile).mockResolvedValue(Buffer.from("fake-image-data"));
+ vi.mocked(fs.readFile).mockResolvedValue(Buffer.from('fake-image-data'));
});
afterEach(() => {
vi.restoreAllMocks();
});
- describe("buildPromptWithImages", () => {
- it("should return plain text when no images provided", async () => {
- const result = await utils.buildPromptWithImages("Hello world");
+ describe('buildPromptWithImages', () => {
+ it('should return plain text when no images provided', async () => {
+ const result = await utils.buildPromptWithImages('Hello world');
expect(result).toEqual({
- content: "Hello world",
+ content: 'Hello world',
hasImages: false,
});
});
- it("should return plain text when imagePaths is empty array", async () => {
- const result = await utils.buildPromptWithImages("Hello world", []);
+ it('should return plain text when imagePaths is empty array', async () => {
+ const result = await utils.buildPromptWithImages('Hello world', []);
expect(result).toEqual({
- content: "Hello world",
+ content: 'Hello world',
hasImages: false,
});
});
- it("should build content blocks with single image", async () => {
- const result = await utils.buildPromptWithImages("Describe this image", [
- "/test.png",
- ]);
+ it('should build content blocks with single image', async () => {
+ const result = await utils.buildPromptWithImages('Describe this image', ['/test.png']);
expect(result.hasImages).toBe(true);
expect(Array.isArray(result.content)).toBe(true);
const content = result.content as Array<{ type: string; text?: string }>;
expect(content).toHaveLength(2);
- expect(content[0]).toEqual({ type: "text", text: "Describe this image" });
- expect(content[1].type).toBe("image");
+ expect(content[0]).toEqual({ type: 'text', text: 'Describe this image' });
+ expect(content[1].type).toBe('image');
});
- it("should build content blocks with multiple images", async () => {
- const result = await utils.buildPromptWithImages("Analyze these", [
- "/a.png",
- "/b.jpg",
- ]);
+ it('should build content blocks with multiple images', async () => {
+ const result = await utils.buildPromptWithImages('Analyze these', ['/a.png', '/b.jpg']);
expect(result.hasImages).toBe(true);
const content = result.content as Array<{ type: string }>;
expect(content).toHaveLength(3); // 1 text + 2 images
- expect(content[0].type).toBe("text");
- expect(content[1].type).toBe("image");
- expect(content[2].type).toBe("image");
+ expect(content[0].type).toBe('text');
+ expect(content[1].type).toBe('image');
+ expect(content[2].type).toBe('image');
});
- it("should include image paths in text when requested", async () => {
+ it('should include image paths in text when requested', async () => {
const result = await utils.buildPromptWithImages(
- "Base prompt",
- ["/test.png"],
+ 'Base prompt',
+ ['/test.png'],
undefined,
true
);
const content = result.content as Array<{ type: string; text?: string }>;
- expect(content[0].text).toContain("Base prompt");
- expect(content[0].text).toContain("/test.png");
+ expect(content[0].text).toContain('Base prompt');
+ expect(content[0].text).toContain('/test.png');
});
- it("should not include image paths by default", async () => {
- const result = await utils.buildPromptWithImages("Base prompt", ["/test.png"]);
+ it('should not include image paths by default', async () => {
+ const result = await utils.buildPromptWithImages('Base prompt', ['/test.png']);
const content = result.content as Array<{ type: string; text?: string }>;
- expect(content[0].text).toBe("Base prompt");
- expect(content[0].text).not.toContain("Attached");
+ expect(content[0].text).toBe('Base prompt');
+ expect(content[0].text).not.toContain('Attached');
});
- it("should handle empty text content", async () => {
- const result = await utils.buildPromptWithImages("", ["/test.png"]);
+ it('should handle empty text content', async () => {
+ const result = await utils.buildPromptWithImages('', ['/test.png']);
expect(result.hasImages).toBe(true);
// When text is empty/whitespace, should only have image blocks
const content = result.content as Array<{ type: string }>;
- expect(content.every((block) => block.type === "image")).toBe(true);
+ expect(content.every((block) => block.type === 'image')).toBe(true);
});
- it("should trim text content before checking if empty", async () => {
- const result = await utils.buildPromptWithImages(" ", ["/test.png"]);
+ it('should trim text content before checking if empty', async () => {
+ const result = await utils.buildPromptWithImages(' ', ['/test.png']);
const content = result.content as Array<{ type: string }>;
// Whitespace-only text should be excluded
- expect(content.every((block) => block.type === "image")).toBe(true);
+ expect(content.every((block) => block.type === 'image')).toBe(true);
});
it("should return text when only one block and it's text", async () => {
// Make readFile reject to simulate image load failure
- vi.mocked(fs.readFile).mockRejectedValue(new Error("File not found"));
+ vi.mocked(fs.readFile).mockRejectedValue(new Error('File not found'));
- const result = await utils.buildPromptWithImages("Just text", ["/missing.png"]);
+ const result = await utils.buildPromptWithImages('Just text', ['/missing.png']);
// If no images are successfully loaded, should return just the text
- expect(result.content).toBe("Just text");
+ expect(result.content).toBe('Just text');
expect(result.hasImages).toBe(true); // Still true because images were requested
});
- it("should pass workDir for path resolution", async () => {
+ it('should pass workDir for path resolution', async () => {
// The function should use workDir to resolve relative paths
- const result = await utils.buildPromptWithImages(
- "Test",
- ["relative.png"],
- "/work/dir"
- );
+ const result = await utils.buildPromptWithImages('Test', ['relative.png'], '/work/dir');
// Verify it tried to read the file (with resolved path including workDir)
expect(fs.readFile).toHaveBeenCalled();
// The path should be resolved using workDir
const readCall = vi.mocked(fs.readFile).mock.calls[0][0];
- expect(readCall).toContain("relative.png");
+ expect(readCall).toContain('relative.png');
});
});
});
diff --git a/apps/server/tests/unit/lib/sdk-options.test.ts b/apps/server/tests/unit/lib/sdk-options.test.ts
index 0a95312ed..c7324d6cf 100644
--- a/apps/server/tests/unit/lib/sdk-options.test.ts
+++ b/apps/server/tests/unit/lib/sdk-options.test.ts
@@ -1,6 +1,6 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
-describe("sdk-options.ts", () => {
+describe('sdk-options.ts', () => {
let originalEnv: NodeJS.ProcessEnv;
beforeEach(() => {
@@ -12,34 +12,34 @@ describe("sdk-options.ts", () => {
process.env = originalEnv;
});
- describe("TOOL_PRESETS", () => {
- it("should export readOnly tools", async () => {
- const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
- expect(TOOL_PRESETS.readOnly).toEqual(["Read", "Glob", "Grep"]);
+ describe('TOOL_PRESETS', () => {
+ it('should export readOnly tools', async () => {
+ const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
+ expect(TOOL_PRESETS.readOnly).toEqual(['Read', 'Glob', 'Grep']);
});
- it("should export specGeneration tools", async () => {
- const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
- expect(TOOL_PRESETS.specGeneration).toEqual(["Read", "Glob", "Grep"]);
+ it('should export specGeneration tools', async () => {
+ const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
+ expect(TOOL_PRESETS.specGeneration).toEqual(['Read', 'Glob', 'Grep']);
});
- it("should export fullAccess tools", async () => {
- const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
- expect(TOOL_PRESETS.fullAccess).toContain("Read");
- expect(TOOL_PRESETS.fullAccess).toContain("Write");
- expect(TOOL_PRESETS.fullAccess).toContain("Edit");
- expect(TOOL_PRESETS.fullAccess).toContain("Bash");
+ it('should export fullAccess tools', async () => {
+ const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
+ expect(TOOL_PRESETS.fullAccess).toContain('Read');
+ expect(TOOL_PRESETS.fullAccess).toContain('Write');
+ expect(TOOL_PRESETS.fullAccess).toContain('Edit');
+ expect(TOOL_PRESETS.fullAccess).toContain('Bash');
});
- it("should export chat tools matching fullAccess", async () => {
- const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
+ it('should export chat tools matching fullAccess', async () => {
+ const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
expect(TOOL_PRESETS.chat).toEqual(TOOL_PRESETS.fullAccess);
});
});
- describe("MAX_TURNS", () => {
- it("should export turn presets", async () => {
- const { MAX_TURNS } = await import("@/lib/sdk-options.js");
+ describe('MAX_TURNS', () => {
+ it('should export turn presets', async () => {
+ const { MAX_TURNS } = await import('@/lib/sdk-options.js');
expect(MAX_TURNS.quick).toBe(50);
expect(MAX_TURNS.standard).toBe(100);
expect(MAX_TURNS.extended).toBe(250);
@@ -47,71 +47,67 @@ describe("sdk-options.ts", () => {
});
});
- describe("getModelForUseCase", () => {
- it("should return explicit model when provided", async () => {
- const { getModelForUseCase } = await import("@/lib/sdk-options.js");
- const result = getModelForUseCase("spec", "claude-sonnet-4-20250514");
- expect(result).toBe("claude-sonnet-4-20250514");
+ describe('getModelForUseCase', () => {
+ it('should return explicit model when provided', async () => {
+ const { getModelForUseCase } = await import('@/lib/sdk-options.js');
+ const result = getModelForUseCase('spec', 'claude-sonnet-4-20250514');
+ expect(result).toBe('claude-sonnet-4-20250514');
});
- it("should use environment variable for spec model", async () => {
- process.env.AUTOMAKER_MODEL_SPEC = "claude-sonnet-4-20250514";
- const { getModelForUseCase } = await import("@/lib/sdk-options.js");
- const result = getModelForUseCase("spec");
- expect(result).toBe("claude-sonnet-4-20250514");
+ it('should use environment variable for spec model', async () => {
+ process.env.AUTOMAKER_MODEL_SPEC = 'claude-sonnet-4-20250514';
+ const { getModelForUseCase } = await import('@/lib/sdk-options.js');
+ const result = getModelForUseCase('spec');
+ expect(result).toBe('claude-sonnet-4-20250514');
});
- it("should use default model for spec when no override", async () => {
+ it('should use default model for spec when no override', async () => {
delete process.env.AUTOMAKER_MODEL_SPEC;
delete process.env.AUTOMAKER_MODEL_DEFAULT;
- const { getModelForUseCase } = await import("@/lib/sdk-options.js");
- const result = getModelForUseCase("spec");
- expect(result).toContain("claude");
+ const { getModelForUseCase } = await import('@/lib/sdk-options.js');
+ const result = getModelForUseCase('spec');
+ expect(result).toContain('claude');
});
- it("should fall back to AUTOMAKER_MODEL_DEFAULT", async () => {
+ it('should fall back to AUTOMAKER_MODEL_DEFAULT', async () => {
delete process.env.AUTOMAKER_MODEL_SPEC;
- process.env.AUTOMAKER_MODEL_DEFAULT = "claude-sonnet-4-20250514";
- const { getModelForUseCase } = await import("@/lib/sdk-options.js");
- const result = getModelForUseCase("spec");
- expect(result).toBe("claude-sonnet-4-20250514");
+ process.env.AUTOMAKER_MODEL_DEFAULT = 'claude-sonnet-4-20250514';
+ const { getModelForUseCase } = await import('@/lib/sdk-options.js');
+ const result = getModelForUseCase('spec');
+ expect(result).toBe('claude-sonnet-4-20250514');
});
});
- describe("createSpecGenerationOptions", () => {
- it("should create options with spec generation settings", async () => {
+ describe('createSpecGenerationOptions', () => {
+ it('should create options with spec generation settings', async () => {
const { createSpecGenerationOptions, TOOL_PRESETS, MAX_TURNS } =
- await import("@/lib/sdk-options.js");
+ await import('@/lib/sdk-options.js');
- const options = createSpecGenerationOptions({ cwd: "/test/path" });
+ const options = createSpecGenerationOptions({ cwd: '/test/path' });
- expect(options.cwd).toBe("/test/path");
+ expect(options.cwd).toBe('/test/path');
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.specGeneration]);
- expect(options.permissionMode).toBe("default");
+ expect(options.permissionMode).toBe('default');
});
- it("should include system prompt when provided", async () => {
- const { createSpecGenerationOptions } = await import(
- "@/lib/sdk-options.js"
- );
+ it('should include system prompt when provided', async () => {
+ const { createSpecGenerationOptions } = await import('@/lib/sdk-options.js');
const options = createSpecGenerationOptions({
- cwd: "/test/path",
- systemPrompt: "Custom prompt",
+ cwd: '/test/path',
+ systemPrompt: 'Custom prompt',
});
- expect(options.systemPrompt).toBe("Custom prompt");
+ expect(options.systemPrompt).toBe('Custom prompt');
});
- it("should include abort controller when provided", async () => {
- const { createSpecGenerationOptions } = await import(
- "@/lib/sdk-options.js"
- );
+ it('should include abort controller when provided', async () => {
+ const { createSpecGenerationOptions } = await import('@/lib/sdk-options.js');
const abortController = new AbortController();
const options = createSpecGenerationOptions({
- cwd: "/test/path",
+ cwd: '/test/path',
abortController,
});
@@ -119,76 +115,73 @@ describe("sdk-options.ts", () => {
});
});
- describe("createFeatureGenerationOptions", () => {
- it("should create options with feature generation settings", async () => {
+ describe('createFeatureGenerationOptions', () => {
+ it('should create options with feature generation settings', async () => {
const { createFeatureGenerationOptions, TOOL_PRESETS, MAX_TURNS } =
- await import("@/lib/sdk-options.js");
+ await import('@/lib/sdk-options.js');
- const options = createFeatureGenerationOptions({ cwd: "/test/path" });
+ const options = createFeatureGenerationOptions({ cwd: '/test/path' });
- expect(options.cwd).toBe("/test/path");
+ expect(options.cwd).toBe('/test/path');
expect(options.maxTurns).toBe(MAX_TURNS.quick);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
});
});
- describe("createSuggestionsOptions", () => {
- it("should create options with suggestions settings", async () => {
- const { createSuggestionsOptions, TOOL_PRESETS, MAX_TURNS } = await import(
- "@/lib/sdk-options.js"
- );
+ describe('createSuggestionsOptions', () => {
+ it('should create options with suggestions settings', async () => {
+ const { createSuggestionsOptions, TOOL_PRESETS, MAX_TURNS } =
+ await import('@/lib/sdk-options.js');
- const options = createSuggestionsOptions({ cwd: "/test/path" });
+ const options = createSuggestionsOptions({ cwd: '/test/path' });
- expect(options.cwd).toBe("/test/path");
+ expect(options.cwd).toBe('/test/path');
expect(options.maxTurns).toBe(MAX_TURNS.extended);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
});
- it("should include systemPrompt when provided", async () => {
- const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
+ it('should include systemPrompt when provided', async () => {
+ const { createSuggestionsOptions } = await import('@/lib/sdk-options.js');
const options = createSuggestionsOptions({
- cwd: "/test/path",
- systemPrompt: "Custom prompt",
+ cwd: '/test/path',
+ systemPrompt: 'Custom prompt',
});
- expect(options.systemPrompt).toBe("Custom prompt");
+ expect(options.systemPrompt).toBe('Custom prompt');
});
- it("should include abortController when provided", async () => {
- const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
+ it('should include abortController when provided', async () => {
+ const { createSuggestionsOptions } = await import('@/lib/sdk-options.js');
const abortController = new AbortController();
const options = createSuggestionsOptions({
- cwd: "/test/path",
+ cwd: '/test/path',
abortController,
});
expect(options.abortController).toBe(abortController);
});
- it("should include outputFormat when provided", async () => {
- const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
+ it('should include outputFormat when provided', async () => {
+ const { createSuggestionsOptions } = await import('@/lib/sdk-options.js');
const options = createSuggestionsOptions({
- cwd: "/test/path",
- outputFormat: { type: "json" },
+ cwd: '/test/path',
+ outputFormat: { type: 'json' },
});
- expect(options.outputFormat).toEqual({ type: "json" });
+ expect(options.outputFormat).toEqual({ type: 'json' });
});
});
- describe("createChatOptions", () => {
- it("should create options with chat settings", async () => {
- const { createChatOptions, TOOL_PRESETS, MAX_TURNS } = await import(
- "@/lib/sdk-options.js"
- );
+ describe('createChatOptions', () => {
+ it('should create options with chat settings', async () => {
+ const { createChatOptions, TOOL_PRESETS, MAX_TURNS } = await import('@/lib/sdk-options.js');
- const options = createChatOptions({ cwd: "/test/path" });
+ const options = createChatOptions({ cwd: '/test/path' });
- expect(options.cwd).toBe("/test/path");
+ expect(options.cwd).toBe('/test/path');
expect(options.maxTurns).toBe(MAX_TURNS.standard);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.chat]);
expect(options.sandbox).toEqual({
@@ -197,41 +190,38 @@ describe("sdk-options.ts", () => {
});
});
- it("should prefer explicit model over session model", async () => {
- const { createChatOptions, getModelForUseCase } = await import(
- "@/lib/sdk-options.js"
- );
+ it('should prefer explicit model over session model', async () => {
+ const { createChatOptions, getModelForUseCase } = await import('@/lib/sdk-options.js');
const options = createChatOptions({
- cwd: "/test/path",
- model: "claude-opus-4-20250514",
- sessionModel: "claude-haiku-3-5-20241022",
+ cwd: '/test/path',
+ model: 'claude-opus-4-20250514',
+ sessionModel: 'claude-haiku-3-5-20241022',
});
- expect(options.model).toBe("claude-opus-4-20250514");
+ expect(options.model).toBe('claude-opus-4-20250514');
});
- it("should use session model when explicit model not provided", async () => {
- const { createChatOptions } = await import("@/lib/sdk-options.js");
+ it('should use session model when explicit model not provided', async () => {
+ const { createChatOptions } = await import('@/lib/sdk-options.js');
const options = createChatOptions({
- cwd: "/test/path",
- sessionModel: "claude-sonnet-4-20250514",
+ cwd: '/test/path',
+ sessionModel: 'claude-sonnet-4-20250514',
});
- expect(options.model).toBe("claude-sonnet-4-20250514");
+ expect(options.model).toBe('claude-sonnet-4-20250514');
});
});
- describe("createAutoModeOptions", () => {
- it("should create options with auto mode settings", async () => {
- const { createAutoModeOptions, TOOL_PRESETS, MAX_TURNS } = await import(
- "@/lib/sdk-options.js"
- );
+ describe('createAutoModeOptions', () => {
+ it('should create options with auto mode settings', async () => {
+ const { createAutoModeOptions, TOOL_PRESETS, MAX_TURNS } =
+ await import('@/lib/sdk-options.js');
- const options = createAutoModeOptions({ cwd: "/test/path" });
+ const options = createAutoModeOptions({ cwd: '/test/path' });
- expect(options.cwd).toBe("/test/path");
+ expect(options.cwd).toBe('/test/path');
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.fullAccess]);
expect(options.sandbox).toEqual({
@@ -240,23 +230,23 @@ describe("sdk-options.ts", () => {
});
});
- it("should include systemPrompt when provided", async () => {
- const { createAutoModeOptions } = await import("@/lib/sdk-options.js");
+ it('should include systemPrompt when provided', async () => {
+ const { createAutoModeOptions } = await import('@/lib/sdk-options.js');
const options = createAutoModeOptions({
- cwd: "/test/path",
- systemPrompt: "Custom prompt",
+ cwd: '/test/path',
+ systemPrompt: 'Custom prompt',
});
- expect(options.systemPrompt).toBe("Custom prompt");
+ expect(options.systemPrompt).toBe('Custom prompt');
});
- it("should include abortController when provided", async () => {
- const { createAutoModeOptions } = await import("@/lib/sdk-options.js");
+ it('should include abortController when provided', async () => {
+ const { createAutoModeOptions } = await import('@/lib/sdk-options.js');
const abortController = new AbortController();
const options = createAutoModeOptions({
- cwd: "/test/path",
+ cwd: '/test/path',
abortController,
});
@@ -264,39 +254,37 @@ describe("sdk-options.ts", () => {
});
});
- describe("createCustomOptions", () => {
- it("should create options with custom settings", async () => {
- const { createCustomOptions } = await import("@/lib/sdk-options.js");
+ describe('createCustomOptions', () => {
+ it('should create options with custom settings', async () => {
+ const { createCustomOptions } = await import('@/lib/sdk-options.js');
const options = createCustomOptions({
- cwd: "/test/path",
+ cwd: '/test/path',
maxTurns: 10,
- allowedTools: ["Read", "Write"],
+ allowedTools: ['Read', 'Write'],
sandbox: { enabled: true },
});
- expect(options.cwd).toBe("/test/path");
+ expect(options.cwd).toBe('/test/path');
expect(options.maxTurns).toBe(10);
- expect(options.allowedTools).toEqual(["Read", "Write"]);
+ expect(options.allowedTools).toEqual(['Read', 'Write']);
expect(options.sandbox).toEqual({ enabled: true });
});
- it("should use defaults when optional params not provided", async () => {
- const { createCustomOptions, TOOL_PRESETS, MAX_TURNS } = await import(
- "@/lib/sdk-options.js"
- );
+ it('should use defaults when optional params not provided', async () => {
+ const { createCustomOptions, TOOL_PRESETS, MAX_TURNS } = await import('@/lib/sdk-options.js');
- const options = createCustomOptions({ cwd: "/test/path" });
+ const options = createCustomOptions({ cwd: '/test/path' });
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
});
- it("should include sandbox when provided", async () => {
- const { createCustomOptions } = await import("@/lib/sdk-options.js");
+ it('should include sandbox when provided', async () => {
+ const { createCustomOptions } = await import('@/lib/sdk-options.js');
const options = createCustomOptions({
- cwd: "/test/path",
+ cwd: '/test/path',
sandbox: { enabled: true, autoAllowBashIfSandboxed: false },
});
@@ -306,23 +294,23 @@ describe("sdk-options.ts", () => {
});
});
- it("should include systemPrompt when provided", async () => {
- const { createCustomOptions } = await import("@/lib/sdk-options.js");
+ it('should include systemPrompt when provided', async () => {
+ const { createCustomOptions } = await import('@/lib/sdk-options.js');
const options = createCustomOptions({
- cwd: "/test/path",
- systemPrompt: "Custom prompt",
+ cwd: '/test/path',
+ systemPrompt: 'Custom prompt',
});
- expect(options.systemPrompt).toBe("Custom prompt");
+ expect(options.systemPrompt).toBe('Custom prompt');
});
- it("should include abortController when provided", async () => {
- const { createCustomOptions } = await import("@/lib/sdk-options.js");
+ it('should include abortController when provided', async () => {
+ const { createCustomOptions } = await import('@/lib/sdk-options.js');
const abortController = new AbortController();
const options = createCustomOptions({
- cwd: "/test/path",
+ cwd: '/test/path',
abortController,
});
diff --git a/apps/server/tests/unit/lib/security.test.ts b/apps/server/tests/unit/lib/security.test.ts
index 18c378da3..bd90d5983 100644
--- a/apps/server/tests/unit/lib/security.test.ts
+++ b/apps/server/tests/unit/lib/security.test.ts
@@ -1,60 +1,56 @@
-import { describe, it, expect, beforeEach, vi } from "vitest";
-import path from "path";
+import { describe, it, expect, beforeEach, vi } from 'vitest';
+import path from 'path';
/**
* Note: security.ts maintains module-level state (allowed paths Set).
* We need to reset modules and reimport for each test to get fresh state.
*/
-describe("security.ts", () => {
+describe('security.ts', () => {
beforeEach(() => {
vi.resetModules();
});
- describe("initAllowedPaths", () => {
- it("should load ALLOWED_ROOT_DIRECTORY if set", async () => {
- process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
+ describe('initAllowedPaths', () => {
+ it('should load ALLOWED_ROOT_DIRECTORY if set', async () => {
+ process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
delete process.env.DATA_DIR;
- const { initAllowedPaths, getAllowedPaths } =
- await import("@automaker/platform");
+ const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
initAllowedPaths();
const allowed = getAllowedPaths();
- expect(allowed).toContain(path.resolve("/projects"));
+ expect(allowed).toContain(path.resolve('/projects'));
});
- it("should include DATA_DIR if set", async () => {
+ it('should include DATA_DIR if set', async () => {
delete process.env.ALLOWED_ROOT_DIRECTORY;
- process.env.DATA_DIR = "/data/dir";
+ process.env.DATA_DIR = '/data/dir';
- const { initAllowedPaths, getAllowedPaths } =
- await import("@automaker/platform");
+ const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
initAllowedPaths();
const allowed = getAllowedPaths();
- expect(allowed).toContain(path.resolve("/data/dir"));
+ expect(allowed).toContain(path.resolve('/data/dir'));
});
- it("should include both ALLOWED_ROOT_DIRECTORY and DATA_DIR if both set", async () => {
- process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
- process.env.DATA_DIR = "/data";
+ it('should include both ALLOWED_ROOT_DIRECTORY and DATA_DIR if both set', async () => {
+ process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
+ process.env.DATA_DIR = '/data';
- const { initAllowedPaths, getAllowedPaths } =
- await import("@automaker/platform");
+ const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
initAllowedPaths();
const allowed = getAllowedPaths();
- expect(allowed).toContain(path.resolve("/projects"));
- expect(allowed).toContain(path.resolve("/data"));
+ expect(allowed).toContain(path.resolve('/projects'));
+ expect(allowed).toContain(path.resolve('/data'));
expect(allowed).toHaveLength(2);
});
- it("should return empty array when no paths configured", async () => {
+ it('should return empty array when no paths configured', async () => {
delete process.env.ALLOWED_ROOT_DIRECTORY;
delete process.env.DATA_DIR;
- const { initAllowedPaths, getAllowedPaths } =
- await import("@automaker/platform");
+ const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
initAllowedPaths();
const allowed = getAllowedPaths();
@@ -62,140 +58,129 @@ describe("security.ts", () => {
});
});
- describe("isPathAllowed", () => {
- it("should allow paths within ALLOWED_ROOT_DIRECTORY", async () => {
- process.env.ALLOWED_ROOT_DIRECTORY = "/allowed/project";
- process.env.DATA_DIR = "";
+ describe('isPathAllowed', () => {
+ it('should allow paths within ALLOWED_ROOT_DIRECTORY', async () => {
+ process.env.ALLOWED_ROOT_DIRECTORY = '/allowed/project';
+ process.env.DATA_DIR = '';
- const { initAllowedPaths, isPathAllowed } =
- await import("@automaker/platform");
+ const { initAllowedPaths, isPathAllowed } = await import('@automaker/platform');
initAllowedPaths();
// Paths within allowed directory should be allowed
- expect(isPathAllowed("/allowed/project/file.txt")).toBe(true);
- expect(isPathAllowed("/allowed/project/subdir/file.txt")).toBe(true);
+ expect(isPathAllowed('/allowed/project/file.txt')).toBe(true);
+ expect(isPathAllowed('/allowed/project/subdir/file.txt')).toBe(true);
// Paths outside allowed directory should be denied
- expect(isPathAllowed("/not/allowed/file.txt")).toBe(false);
- expect(isPathAllowed("/tmp/file.txt")).toBe(false);
- expect(isPathAllowed("/etc/passwd")).toBe(false);
+ expect(isPathAllowed('/not/allowed/file.txt')).toBe(false);
+ expect(isPathAllowed('/tmp/file.txt')).toBe(false);
+ expect(isPathAllowed('/etc/passwd')).toBe(false);
});
- it("should allow all paths when no restrictions are configured", async () => {
+ it('should allow all paths when no restrictions are configured', async () => {
delete process.env.DATA_DIR;
delete process.env.ALLOWED_ROOT_DIRECTORY;
- const { initAllowedPaths, isPathAllowed } =
- await import("@automaker/platform");
+ const { initAllowedPaths, isPathAllowed } = await import('@automaker/platform');
initAllowedPaths();
// All paths should be allowed when no restrictions are configured
- expect(isPathAllowed("/allowed/project/file.txt")).toBe(true);
- expect(isPathAllowed("/not/allowed/file.txt")).toBe(true);
- expect(isPathAllowed("/tmp/file.txt")).toBe(true);
- expect(isPathAllowed("/etc/passwd")).toBe(true);
- expect(isPathAllowed("/any/path")).toBe(true);
+ expect(isPathAllowed('/allowed/project/file.txt')).toBe(true);
+ expect(isPathAllowed('/not/allowed/file.txt')).toBe(true);
+ expect(isPathAllowed('/tmp/file.txt')).toBe(true);
+ expect(isPathAllowed('/etc/passwd')).toBe(true);
+ expect(isPathAllowed('/any/path')).toBe(true);
});
- it("should allow all paths when DATA_DIR is set but ALLOWED_ROOT_DIRECTORY is not", async () => {
- process.env.DATA_DIR = "/data";
+ it('should allow all paths when DATA_DIR is set but ALLOWED_ROOT_DIRECTORY is not', async () => {
+ process.env.DATA_DIR = '/data';
delete process.env.ALLOWED_ROOT_DIRECTORY;
- const { initAllowedPaths, isPathAllowed } =
- await import("@automaker/platform");
+ const { initAllowedPaths, isPathAllowed } = await import('@automaker/platform');
initAllowedPaths();
// DATA_DIR should be allowed
- expect(isPathAllowed("/data/settings.json")).toBe(true);
+ expect(isPathAllowed('/data/settings.json')).toBe(true);
// But all other paths should also be allowed when ALLOWED_ROOT_DIRECTORY is not set
- expect(isPathAllowed("/allowed/project/file.txt")).toBe(true);
- expect(isPathAllowed("/not/allowed/file.txt")).toBe(true);
- expect(isPathAllowed("/tmp/file.txt")).toBe(true);
- expect(isPathAllowed("/etc/passwd")).toBe(true);
- expect(isPathAllowed("/any/path")).toBe(true);
+ expect(isPathAllowed('/allowed/project/file.txt')).toBe(true);
+ expect(isPathAllowed('/not/allowed/file.txt')).toBe(true);
+ expect(isPathAllowed('/tmp/file.txt')).toBe(true);
+ expect(isPathAllowed('/etc/passwd')).toBe(true);
+ expect(isPathAllowed('/any/path')).toBe(true);
});
});
- describe("validatePath", () => {
- it("should return resolved path for allowed paths", async () => {
- process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
- process.env.DATA_DIR = "";
+ describe('validatePath', () => {
+ it('should return resolved path for allowed paths', async () => {
+ process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
+ process.env.DATA_DIR = '';
- const { initAllowedPaths, validatePath } =
- await import("@automaker/platform");
+ const { initAllowedPaths, validatePath } = await import('@automaker/platform');
initAllowedPaths();
- const result = validatePath("/allowed/file.txt");
- expect(result).toBe(path.resolve("/allowed/file.txt"));
+ const result = validatePath('/allowed/file.txt');
+ expect(result).toBe(path.resolve('/allowed/file.txt'));
});
- it("should throw error for paths outside allowed directories", async () => {
- process.env.ALLOWED_ROOT_DIRECTORY = "/allowed";
- process.env.DATA_DIR = "";
+ it('should throw error for paths outside allowed directories', async () => {
+ process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
+ process.env.DATA_DIR = '';
- const { initAllowedPaths, validatePath } =
- await import("@automaker/platform");
+ const { initAllowedPaths, validatePath } = await import('@automaker/platform');
initAllowedPaths();
// Disallowed paths should throw PathNotAllowedError
- expect(() => validatePath("/disallowed/file.txt")).toThrow();
+ expect(() => validatePath('/disallowed/file.txt')).toThrow();
});
- it("should not throw error for any path when no restrictions are configured", async () => {
+ it('should not throw error for any path when no restrictions are configured', async () => {
delete process.env.DATA_DIR;
delete process.env.ALLOWED_ROOT_DIRECTORY;
- const { initAllowedPaths, validatePath } =
- await import("@automaker/platform");
+ const { initAllowedPaths, validatePath } = await import('@automaker/platform');
initAllowedPaths();
// All paths are allowed when no restrictions configured
- expect(() => validatePath("/disallowed/file.txt")).not.toThrow();
- expect(validatePath("/disallowed/file.txt")).toBe(
- path.resolve("/disallowed/file.txt")
- );
+ expect(() => validatePath('/disallowed/file.txt')).not.toThrow();
+ expect(validatePath('/disallowed/file.txt')).toBe(path.resolve('/disallowed/file.txt'));
});
- it("should resolve relative paths within allowed directory", async () => {
+ it('should resolve relative paths within allowed directory', async () => {
const cwd = process.cwd();
process.env.ALLOWED_ROOT_DIRECTORY = cwd;
- process.env.DATA_DIR = "";
+ process.env.DATA_DIR = '';
- const { initAllowedPaths, validatePath } =
- await import("@automaker/platform");
+ const { initAllowedPaths, validatePath } = await import('@automaker/platform');
initAllowedPaths();
- const result = validatePath("./file.txt");
- expect(result).toBe(path.resolve(cwd, "./file.txt"));
+ const result = validatePath('./file.txt');
+ expect(result).toBe(path.resolve(cwd, './file.txt'));
});
});
- describe("getAllowedPaths", () => {
- it("should return array of allowed paths", async () => {
- process.env.ALLOWED_ROOT_DIRECTORY = "/projects";
- process.env.DATA_DIR = "/data";
+ describe('getAllowedPaths', () => {
+ it('should return array of allowed paths', async () => {
+ process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
+ process.env.DATA_DIR = '/data';
- const { initAllowedPaths, getAllowedPaths } =
- await import("@automaker/platform");
+ const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
initAllowedPaths();
const result = getAllowedPaths();
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBe(2);
- expect(result).toContain(path.resolve("/projects"));
- expect(result).toContain(path.resolve("/data"));
+ expect(result).toContain(path.resolve('/projects'));
+ expect(result).toContain(path.resolve('/data'));
});
- it("should return resolved paths", async () => {
- process.env.ALLOWED_ROOT_DIRECTORY = "/test";
- process.env.DATA_DIR = "";
+ it('should return resolved paths', async () => {
+ process.env.ALLOWED_ROOT_DIRECTORY = '/test';
+ process.env.DATA_DIR = '';
- const { initAllowedPaths, getAllowedPaths } =
- await import("@automaker/platform");
+ const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
initAllowedPaths();
const result = getAllowedPaths();
- expect(result[0]).toBe(path.resolve("/test"));
+ expect(result[0]).toBe(path.resolve('/test'));
});
});
});
diff --git a/apps/server/tests/unit/lib/worktree-metadata.test.ts b/apps/server/tests/unit/lib/worktree-metadata.test.ts
index 82f3242b0..ab7967f3d 100644
--- a/apps/server/tests/unit/lib/worktree-metadata.test.ts
+++ b/apps/server/tests/unit/lib/worktree-metadata.test.ts
@@ -1,4 +1,4 @@
-import { describe, it, expect, beforeEach, afterEach } from "vitest";
+import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import {
readWorktreeMetadata,
writeWorktreeMetadata,
@@ -8,12 +8,12 @@ import {
deleteWorktreeMetadata,
type WorktreeMetadata,
type WorktreePRInfo,
-} from "@/lib/worktree-metadata.js";
-import fs from "fs/promises";
-import path from "path";
-import os from "os";
+} from '@/lib/worktree-metadata.js';
+import fs from 'fs/promises';
+import path from 'path';
+import os from 'os';
-describe("worktree-metadata.ts", () => {
+describe('worktree-metadata.ts', () => {
let testProjectPath: string;
beforeEach(async () => {
@@ -29,10 +29,10 @@ describe("worktree-metadata.ts", () => {
}
});
- describe("sanitizeBranchName", () => {
+ describe('sanitizeBranchName', () => {
// Test through readWorktreeMetadata and writeWorktreeMetadata
- it("should sanitize branch names with invalid characters", async () => {
- const branch = "feature/test-branch";
+ it('should sanitize branch names with invalid characters', async () => {
+ const branch = 'feature/test-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -43,8 +43,8 @@ describe("worktree-metadata.ts", () => {
expect(result).toEqual(metadata);
});
- it("should sanitize branch names with Windows invalid characters", async () => {
- const branch = "feature:test*branch?";
+ it('should sanitize branch names with Windows invalid characters', async () => {
+ const branch = 'feature:test*branch?';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -55,8 +55,8 @@ describe("worktree-metadata.ts", () => {
expect(result).toEqual(metadata);
});
- it("should sanitize Windows reserved names", async () => {
- const branch = "CON";
+ it('should sanitize Windows reserved names', async () => {
+ const branch = 'CON';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -67,10 +67,10 @@ describe("worktree-metadata.ts", () => {
expect(result).toEqual(metadata);
});
- it("should handle empty branch name", async () => {
- const branch = "";
+ it('should handle empty branch name', async () => {
+ const branch = '';
const metadata: WorktreeMetadata = {
- branch: "branch",
+ branch: 'branch',
createdAt: new Date().toISOString(),
};
@@ -80,11 +80,11 @@ describe("worktree-metadata.ts", () => {
expect(result).toEqual(metadata);
});
- it("should handle branch name that becomes empty after sanitization", async () => {
+ it('should handle branch name that becomes empty after sanitization', async () => {
// Test branch that would become empty after removing invalid chars
- const branch = "///";
+ const branch = '///';
const metadata: WorktreeMetadata = {
- branch: "branch",
+ branch: 'branch',
createdAt: new Date().toISOString(),
};
@@ -94,14 +94,14 @@ describe("worktree-metadata.ts", () => {
});
});
- describe("readWorktreeMetadata", () => {
+ describe('readWorktreeMetadata', () => {
it("should return null when metadata file doesn't exist", async () => {
- const result = await readWorktreeMetadata(testProjectPath, "nonexistent-branch");
+ const result = await readWorktreeMetadata(testProjectPath, 'nonexistent-branch');
expect(result).toBeNull();
});
- it("should read existing metadata", async () => {
- const branch = "test-branch";
+ it('should read existing metadata', async () => {
+ const branch = 'test-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -112,16 +112,16 @@ describe("worktree-metadata.ts", () => {
expect(result).toEqual(metadata);
});
- it("should read metadata with PR info", async () => {
- const branch = "pr-branch";
+ it('should read metadata with PR info', async () => {
+ const branch = 'pr-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
pr: {
number: 123,
- url: "https://github.com/owner/repo/pull/123",
- title: "Test PR",
- state: "open",
+ url: 'https://github.com/owner/repo/pull/123',
+ title: 'Test PR',
+ state: 'open',
createdAt: new Date().toISOString(),
},
};
@@ -132,9 +132,9 @@ describe("worktree-metadata.ts", () => {
});
});
- describe("writeWorktreeMetadata", () => {
+ describe('writeWorktreeMetadata', () => {
it("should create metadata directory if it doesn't exist", async () => {
- const branch = "new-branch";
+ const branch = 'new-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -145,8 +145,8 @@ describe("worktree-metadata.ts", () => {
expect(result).toEqual(metadata);
});
- it("should overwrite existing metadata", async () => {
- const branch = "existing-branch";
+ it('should overwrite existing metadata', async () => {
+ const branch = 'existing-branch';
const metadata1: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -156,9 +156,9 @@ describe("worktree-metadata.ts", () => {
createdAt: new Date().toISOString(),
pr: {
number: 456,
- url: "https://github.com/owner/repo/pull/456",
- title: "Updated PR",
- state: "closed",
+ url: 'https://github.com/owner/repo/pull/456',
+ title: 'Updated PR',
+ state: 'closed',
createdAt: new Date().toISOString(),
},
};
@@ -170,14 +170,14 @@ describe("worktree-metadata.ts", () => {
});
});
- describe("updateWorktreePRInfo", () => {
+ describe('updateWorktreePRInfo', () => {
it("should create new metadata if it doesn't exist", async () => {
- const branch = "new-pr-branch";
+ const branch = 'new-pr-branch';
const prInfo: WorktreePRInfo = {
number: 789,
- url: "https://github.com/owner/repo/pull/789",
- title: "New PR",
- state: "open",
+ url: 'https://github.com/owner/repo/pull/789',
+ title: 'New PR',
+ state: 'open',
createdAt: new Date().toISOString(),
};
@@ -188,8 +188,8 @@ describe("worktree-metadata.ts", () => {
expect(result?.pr).toEqual(prInfo);
});
- it("should update existing metadata with PR info", async () => {
- const branch = "existing-pr-branch";
+ it('should update existing metadata with PR info', async () => {
+ const branch = 'existing-pr-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -199,9 +199,9 @@ describe("worktree-metadata.ts", () => {
const prInfo: WorktreePRInfo = {
number: 999,
- url: "https://github.com/owner/repo/pull/999",
- title: "Updated PR",
- state: "merged",
+ url: 'https://github.com/owner/repo/pull/999',
+ title: 'Updated PR',
+ state: 'merged',
createdAt: new Date().toISOString(),
};
@@ -210,8 +210,8 @@ describe("worktree-metadata.ts", () => {
expect(result?.pr).toEqual(prInfo);
});
- it("should preserve existing metadata when updating PR info", async () => {
- const branch = "preserve-branch";
+ it('should preserve existing metadata when updating PR info', async () => {
+ const branch = 'preserve-branch';
const originalCreatedAt = new Date().toISOString();
const metadata: WorktreeMetadata = {
branch,
@@ -222,9 +222,9 @@ describe("worktree-metadata.ts", () => {
const prInfo: WorktreePRInfo = {
number: 111,
- url: "https://github.com/owner/repo/pull/111",
- title: "PR",
- state: "open",
+ url: 'https://github.com/owner/repo/pull/111',
+ title: 'PR',
+ state: 'open',
createdAt: new Date().toISOString(),
};
@@ -235,14 +235,14 @@ describe("worktree-metadata.ts", () => {
});
});
- describe("getWorktreePRInfo", () => {
+ describe('getWorktreePRInfo', () => {
it("should return null when metadata doesn't exist", async () => {
- const result = await getWorktreePRInfo(testProjectPath, "nonexistent");
+ const result = await getWorktreePRInfo(testProjectPath, 'nonexistent');
expect(result).toBeNull();
});
- it("should return null when metadata exists but has no PR info", async () => {
- const branch = "no-pr-branch";
+ it('should return null when metadata exists but has no PR info', async () => {
+ const branch = 'no-pr-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -253,13 +253,13 @@ describe("worktree-metadata.ts", () => {
expect(result).toBeNull();
});
- it("should return PR info when it exists", async () => {
- const branch = "has-pr-branch";
+ it('should return PR info when it exists', async () => {
+ const branch = 'has-pr-branch';
const prInfo: WorktreePRInfo = {
number: 222,
- url: "https://github.com/owner/repo/pull/222",
- title: "Has PR",
- state: "open",
+ url: 'https://github.com/owner/repo/pull/222',
+ title: 'Has PR',
+ state: 'open',
createdAt: new Date().toISOString(),
};
@@ -269,23 +269,23 @@ describe("worktree-metadata.ts", () => {
});
});
- describe("readAllWorktreeMetadata", () => {
+ describe('readAllWorktreeMetadata', () => {
it("should return empty map when worktrees directory doesn't exist", async () => {
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(0);
});
- it("should return empty map when worktrees directory is empty", async () => {
- const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
+ it('should return empty map when worktrees directory is empty', async () => {
+ const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
await fs.mkdir(worktreesDir, { recursive: true });
const result = await readAllWorktreeMetadata(testProjectPath);
expect(result.size).toBe(0);
});
- it("should read all worktree metadata", async () => {
- const branch1 = "branch-1";
- const branch2 = "branch-2";
+ it('should read all worktree metadata', async () => {
+ const branch1 = 'branch-1';
+ const branch2 = 'branch-2';
const metadata1: WorktreeMetadata = {
branch: branch1,
createdAt: new Date().toISOString(),
@@ -295,9 +295,9 @@ describe("worktree-metadata.ts", () => {
createdAt: new Date().toISOString(),
pr: {
number: 333,
- url: "https://github.com/owner/repo/pull/333",
- title: "PR 3",
- state: "open",
+ url: 'https://github.com/owner/repo/pull/333',
+ title: 'PR 3',
+ state: 'open',
createdAt: new Date().toISOString(),
},
};
@@ -311,12 +311,12 @@ describe("worktree-metadata.ts", () => {
expect(result.get(branch2)).toEqual(metadata2);
});
- it("should skip directories without worktree.json", async () => {
- const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
- const emptyDir = path.join(worktreesDir, "empty-dir");
+ it('should skip directories without worktree.json', async () => {
+ const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
+ const emptyDir = path.join(worktreesDir, 'empty-dir');
await fs.mkdir(emptyDir, { recursive: true });
- const branch = "valid-branch";
+ const branch = 'valid-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -328,13 +328,13 @@ describe("worktree-metadata.ts", () => {
expect(result.get(branch)).toEqual(metadata);
});
- it("should skip files in worktrees directory", async () => {
- const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
+ it('should skip files in worktrees directory', async () => {
+ const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
await fs.mkdir(worktreesDir, { recursive: true });
- const filePath = path.join(worktreesDir, "not-a-dir.txt");
- await fs.writeFile(filePath, "content");
+ const filePath = path.join(worktreesDir, 'not-a-dir.txt');
+ await fs.writeFile(filePath, 'content');
- const branch = "valid-branch";
+ const branch = 'valid-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -346,14 +346,14 @@ describe("worktree-metadata.ts", () => {
expect(result.get(branch)).toEqual(metadata);
});
- it("should skip directories with malformed JSON", async () => {
- const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
- const badDir = path.join(worktreesDir, "bad-dir");
+ it('should skip directories with malformed JSON', async () => {
+ const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
+ const badDir = path.join(worktreesDir, 'bad-dir');
await fs.mkdir(badDir, { recursive: true });
- const badJsonPath = path.join(badDir, "worktree.json");
- await fs.writeFile(badJsonPath, "not valid json");
+ const badJsonPath = path.join(badDir, 'worktree.json');
+ await fs.writeFile(badJsonPath, 'not valid json');
- const branch = "valid-branch";
+ const branch = 'valid-branch';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -366,9 +366,9 @@ describe("worktree-metadata.ts", () => {
});
});
- describe("deleteWorktreeMetadata", () => {
- it("should delete worktree metadata directory", async () => {
- const branch = "to-delete";
+ describe('deleteWorktreeMetadata', () => {
+ it('should delete worktree metadata directory', async () => {
+ const branch = 'to-delete';
const metadata: WorktreeMetadata = {
branch,
createdAt: new Date().toISOString(),
@@ -385,10 +385,7 @@ describe("worktree-metadata.ts", () => {
it("should handle deletion when metadata doesn't exist", async () => {
// Should not throw
- await expect(
- deleteWorktreeMetadata(testProjectPath, "nonexistent")
- ).resolves.toBeUndefined();
+ await expect(deleteWorktreeMetadata(testProjectPath, 'nonexistent')).resolves.toBeUndefined();
});
});
});
-
diff --git a/apps/server/tests/unit/providers/base-provider.test.ts b/apps/server/tests/unit/providers/base-provider.test.ts
index ad0cd41ba..f355fec99 100644
--- a/apps/server/tests/unit/providers/base-provider.test.ts
+++ b/apps/server/tests/unit/providers/base-provider.test.ts
@@ -1,23 +1,21 @@
-import { describe, it, expect } from "vitest";
-import { BaseProvider } from "@/providers/base-provider.js";
+import { describe, it, expect } from 'vitest';
+import { BaseProvider } from '@/providers/base-provider.js';
import type {
ProviderConfig,
ExecuteOptions,
ProviderMessage,
InstallationStatus,
ModelDefinition,
-} from "@automaker/types";
+} from '@automaker/types';
// Concrete implementation for testing the abstract class
class TestProvider extends BaseProvider {
getName(): string {
- return "test-provider";
+ return 'test-provider';
}
- async *executeQuery(
- _options: ExecuteOptions
- ): AsyncGenerator {
- yield { type: "text", text: "test response" };
+ async *executeQuery(_options: ExecuteOptions): AsyncGenerator {
+ yield { type: 'text', text: 'test response' };
}
async detectInstallation(): Promise {
@@ -25,37 +23,35 @@ class TestProvider extends BaseProvider {
}
getAvailableModels(): ModelDefinition[] {
- return [
- { id: "test-model-1", name: "Test Model 1", description: "A test model" },
- ];
+ return [{ id: 'test-model-1', name: 'Test Model 1', description: 'A test model' }];
}
}
-describe("base-provider.ts", () => {
- describe("constructor", () => {
- it("should initialize with empty config when none provided", () => {
+describe('base-provider.ts', () => {
+ describe('constructor', () => {
+ it('should initialize with empty config when none provided', () => {
const provider = new TestProvider();
expect(provider.getConfig()).toEqual({});
});
- it("should initialize with provided config", () => {
+ it('should initialize with provided config', () => {
const config: ProviderConfig = {
- apiKey: "test-key",
- baseUrl: "https://test.com",
+ apiKey: 'test-key',
+ baseUrl: 'https://test.com',
};
const provider = new TestProvider(config);
expect(provider.getConfig()).toEqual(config);
});
- it("should call getName() during initialization", () => {
+ it('should call getName() during initialization', () => {
const provider = new TestProvider();
- expect(provider.getName()).toBe("test-provider");
+ expect(provider.getName()).toBe('test-provider');
});
});
- describe("validateConfig", () => {
- it("should return valid when config exists", () => {
- const provider = new TestProvider({ apiKey: "test" });
+ describe('validateConfig', () => {
+ it('should return valid when config exists', () => {
+ const provider = new TestProvider({ apiKey: 'test' });
const result = provider.validateConfig();
expect(result.valid).toBe(true);
@@ -63,7 +59,7 @@ describe("base-provider.ts", () => {
expect(result.warnings).toHaveLength(0);
});
- it("should return invalid when config is undefined", () => {
+ it('should return invalid when config is undefined', () => {
// Create provider without config
const provider = new TestProvider();
// Manually set config to undefined to test edge case
@@ -72,10 +68,10 @@ describe("base-provider.ts", () => {
const result = provider.validateConfig();
expect(result.valid).toBe(false);
- expect(result.errors).toContain("Provider config is missing");
+ expect(result.errors).toContain('Provider config is missing');
});
- it("should return valid for empty config object", () => {
+ it('should return valid for empty config object', () => {
const provider = new TestProvider({});
const result = provider.validateConfig();
@@ -83,53 +79,53 @@ describe("base-provider.ts", () => {
expect(result.errors).toHaveLength(0);
});
- it("should include warnings array in result", () => {
+ it('should include warnings array in result', () => {
const provider = new TestProvider();
const result = provider.validateConfig();
- expect(result).toHaveProperty("warnings");
+ expect(result).toHaveProperty('warnings');
expect(Array.isArray(result.warnings)).toBe(true);
});
});
- describe("supportsFeature", () => {
+ describe('supportsFeature', () => {
it("should support 'tools' feature", () => {
const provider = new TestProvider();
- expect(provider.supportsFeature("tools")).toBe(true);
+ expect(provider.supportsFeature('tools')).toBe(true);
});
it("should support 'text' feature", () => {
const provider = new TestProvider();
- expect(provider.supportsFeature("text")).toBe(true);
+ expect(provider.supportsFeature('text')).toBe(true);
});
- it("should not support unknown features", () => {
+ it('should not support unknown features', () => {
const provider = new TestProvider();
- expect(provider.supportsFeature("vision")).toBe(false);
- expect(provider.supportsFeature("mcp")).toBe(false);
- expect(provider.supportsFeature("unknown")).toBe(false);
+ expect(provider.supportsFeature('vision')).toBe(false);
+ expect(provider.supportsFeature('mcp')).toBe(false);
+ expect(provider.supportsFeature('unknown')).toBe(false);
});
- it("should be case-sensitive", () => {
+ it('should be case-sensitive', () => {
const provider = new TestProvider();
- expect(provider.supportsFeature("TOOLS")).toBe(false);
- expect(provider.supportsFeature("Text")).toBe(false);
+ expect(provider.supportsFeature('TOOLS')).toBe(false);
+ expect(provider.supportsFeature('Text')).toBe(false);
});
});
- describe("getConfig", () => {
- it("should return current config", () => {
+ describe('getConfig', () => {
+ it('should return current config', () => {
const config: ProviderConfig = {
- apiKey: "test-key",
- model: "test-model",
+ apiKey: 'test-key',
+ model: 'test-model',
};
const provider = new TestProvider(config);
expect(provider.getConfig()).toEqual(config);
});
- it("should return same reference", () => {
- const config: ProviderConfig = { apiKey: "test" };
+ it('should return same reference', () => {
+ const config: ProviderConfig = { apiKey: 'test' };
const provider = new TestProvider(config);
const retrieved1 = provider.getConfig();
@@ -139,31 +135,31 @@ describe("base-provider.ts", () => {
});
});
- describe("setConfig", () => {
- it("should merge partial config with existing config", () => {
- const provider = new TestProvider({ apiKey: "original-key" });
+ describe('setConfig', () => {
+ it('should merge partial config with existing config', () => {
+ const provider = new TestProvider({ apiKey: 'original-key' });
- provider.setConfig({ model: "new-model" });
+ provider.setConfig({ model: 'new-model' });
expect(provider.getConfig()).toEqual({
- apiKey: "original-key",
- model: "new-model",
+ apiKey: 'original-key',
+ model: 'new-model',
});
});
- it("should override existing fields", () => {
- const provider = new TestProvider({ apiKey: "old-key", model: "old-model" });
+ it('should override existing fields', () => {
+ const provider = new TestProvider({ apiKey: 'old-key', model: 'old-model' });
- provider.setConfig({ apiKey: "new-key" });
+ provider.setConfig({ apiKey: 'new-key' });
expect(provider.getConfig()).toEqual({
- apiKey: "new-key",
- model: "old-model",
+ apiKey: 'new-key',
+ model: 'old-model',
});
});
- it("should accept empty object", () => {
- const provider = new TestProvider({ apiKey: "test" });
+ it('should accept empty object', () => {
+ const provider = new TestProvider({ apiKey: 'test' });
const originalConfig = provider.getConfig();
provider.setConfig({});
@@ -171,68 +167,68 @@ describe("base-provider.ts", () => {
expect(provider.getConfig()).toEqual(originalConfig);
});
- it("should handle multiple updates", () => {
+ it('should handle multiple updates', () => {
const provider = new TestProvider();
- provider.setConfig({ apiKey: "key1" });
- provider.setConfig({ model: "model1" });
- provider.setConfig({ baseUrl: "https://test.com" });
+ provider.setConfig({ apiKey: 'key1' });
+ provider.setConfig({ model: 'model1' });
+ provider.setConfig({ baseUrl: 'https://test.com' });
expect(provider.getConfig()).toEqual({
- apiKey: "key1",
- model: "model1",
- baseUrl: "https://test.com",
+ apiKey: 'key1',
+ model: 'model1',
+ baseUrl: 'https://test.com',
});
});
- it("should preserve other fields when updating one field", () => {
+ it('should preserve other fields when updating one field', () => {
const provider = new TestProvider({
- apiKey: "key",
- model: "model",
- baseUrl: "https://test.com",
+ apiKey: 'key',
+ model: 'model',
+ baseUrl: 'https://test.com',
});
- provider.setConfig({ model: "new-model" });
+ provider.setConfig({ model: 'new-model' });
expect(provider.getConfig()).toEqual({
- apiKey: "key",
- model: "new-model",
- baseUrl: "https://test.com",
+ apiKey: 'key',
+ model: 'new-model',
+ baseUrl: 'https://test.com',
});
});
});
- describe("abstract methods", () => {
- it("should require getName implementation", () => {
+ describe('abstract methods', () => {
+ it('should require getName implementation', () => {
const provider = new TestProvider();
- expect(typeof provider.getName).toBe("function");
- expect(provider.getName()).toBe("test-provider");
+ expect(typeof provider.getName).toBe('function');
+ expect(provider.getName()).toBe('test-provider');
});
- it("should require executeQuery implementation", async () => {
+ it('should require executeQuery implementation', async () => {
const provider = new TestProvider();
- expect(typeof provider.executeQuery).toBe("function");
+ expect(typeof provider.executeQuery).toBe('function');
const generator = provider.executeQuery({
- prompt: "test",
- projectDirectory: "/test",
+ prompt: 'test',
+ projectDirectory: '/test',
});
const result = await generator.next();
- expect(result.value).toEqual({ type: "text", text: "test response" });
+ expect(result.value).toEqual({ type: 'text', text: 'test response' });
});
- it("should require detectInstallation implementation", async () => {
+ it('should require detectInstallation implementation', async () => {
const provider = new TestProvider();
- expect(typeof provider.detectInstallation).toBe("function");
+ expect(typeof provider.detectInstallation).toBe('function');
const status = await provider.detectInstallation();
- expect(status).toHaveProperty("installed");
+ expect(status).toHaveProperty('installed');
});
- it("should require getAvailableModels implementation", () => {
+ it('should require getAvailableModels implementation', () => {
const provider = new TestProvider();
- expect(typeof provider.getAvailableModels).toBe("function");
+ expect(typeof provider.getAvailableModels).toBe('function');
const models = provider.getAvailableModels();
expect(Array.isArray(models)).toBe(true);
diff --git a/apps/server/tests/unit/providers/claude-provider.test.ts b/apps/server/tests/unit/providers/claude-provider.test.ts
index 41c5bf719..888cf091a 100644
--- a/apps/server/tests/unit/providers/claude-provider.test.ts
+++ b/apps/server/tests/unit/providers/claude-provider.test.ts
@@ -1,11 +1,11 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { ClaudeProvider } from "@/providers/claude-provider.js";
-import * as sdk from "@anthropic-ai/claude-agent-sdk";
-import { collectAsyncGenerator } from "../../utils/helpers.js";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import { ClaudeProvider } from '@/providers/claude-provider.js';
+import * as sdk from '@anthropic-ai/claude-agent-sdk';
+import { collectAsyncGenerator } from '../../utils/helpers.js';
-vi.mock("@anthropic-ai/claude-agent-sdk");
+vi.mock('@anthropic-ai/claude-agent-sdk');
-describe("claude-provider.ts", () => {
+describe('claude-provider.ts', () => {
let provider: ClaudeProvider;
beforeEach(() => {
@@ -14,17 +14,17 @@ describe("claude-provider.ts", () => {
delete process.env.ANTHROPIC_API_KEY;
});
- describe("getName", () => {
+ describe('getName', () => {
it("should return 'claude' as provider name", () => {
- expect(provider.getName()).toBe("claude");
+ expect(provider.getName()).toBe('claude');
});
});
- describe("executeQuery", () => {
- it("should execute simple text query", async () => {
+ describe('executeQuery', () => {
+ it('should execute simple text query', async () => {
const mockMessages = [
- { type: "text", text: "Response 1" },
- { type: "text", text: "Response 2" },
+ { type: 'text', text: 'Response 1' },
+ { type: 'text', text: 'Response 2' },
];
vi.mocked(sdk.query).mockReturnValue(
@@ -36,95 +36,86 @@ describe("claude-provider.ts", () => {
);
const generator = provider.executeQuery({
- prompt: "Hello",
- cwd: "/test",
+ prompt: 'Hello',
+ cwd: '/test',
});
const results = await collectAsyncGenerator(generator);
expect(results).toHaveLength(2);
- expect(results[0]).toEqual({ type: "text", text: "Response 1" });
- expect(results[1]).toEqual({ type: "text", text: "Response 2" });
+ expect(results[0]).toEqual({ type: 'text', text: 'Response 1' });
+ expect(results[1]).toEqual({ type: 'text', text: 'Response 2' });
});
- it("should pass correct options to SDK", async () => {
+ it('should pass correct options to SDK', async () => {
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
- yield { type: "text", text: "test" };
+ yield { type: 'text', text: 'test' };
})()
);
const generator = provider.executeQuery({
- prompt: "Test prompt",
- model: "claude-opus-4-5-20251101",
- cwd: "/test/dir",
- systemPrompt: "You are helpful",
+ prompt: 'Test prompt',
+ model: 'claude-opus-4-5-20251101',
+ cwd: '/test/dir',
+ systemPrompt: 'You are helpful',
maxTurns: 10,
- allowedTools: ["Read", "Write"],
+ allowedTools: ['Read', 'Write'],
});
await collectAsyncGenerator(generator);
expect(sdk.query).toHaveBeenCalledWith({
- prompt: "Test prompt",
+ prompt: 'Test prompt',
options: expect.objectContaining({
- model: "claude-opus-4-5-20251101",
- systemPrompt: "You are helpful",
+ model: 'claude-opus-4-5-20251101',
+ systemPrompt: 'You are helpful',
maxTurns: 10,
- cwd: "/test/dir",
- allowedTools: ["Read", "Write"],
- permissionMode: "acceptEdits",
+ cwd: '/test/dir',
+ allowedTools: ['Read', 'Write'],
+ permissionMode: 'acceptEdits',
}),
});
});
- it("should use default allowed tools when not specified", async () => {
+ it('should use default allowed tools when not specified', async () => {
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
- yield { type: "text", text: "test" };
+ yield { type: 'text', text: 'test' };
})()
);
const generator = provider.executeQuery({
- prompt: "Test",
- cwd: "/test",
+ prompt: 'Test',
+ cwd: '/test',
});
await collectAsyncGenerator(generator);
expect(sdk.query).toHaveBeenCalledWith({
- prompt: "Test",
+ prompt: 'Test',
options: expect.objectContaining({
- allowedTools: [
- "Read",
- "Write",
- "Edit",
- "Glob",
- "Grep",
- "Bash",
- "WebSearch",
- "WebFetch",
- ],
+ allowedTools: ['Read', 'Write', 'Edit', 'Glob', 'Grep', 'Bash', 'WebSearch', 'WebFetch'],
}),
});
});
- it("should enable sandbox by default", async () => {
+ it('should enable sandbox by default', async () => {
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
- yield { type: "text", text: "test" };
+ yield { type: 'text', text: 'test' };
})()
);
const generator = provider.executeQuery({
- prompt: "Test",
- cwd: "/test",
+ prompt: 'Test',
+ cwd: '/test',
});
await collectAsyncGenerator(generator);
expect(sdk.query).toHaveBeenCalledWith({
- prompt: "Test",
+ prompt: 'Test',
options: expect.objectContaining({
sandbox: {
enabled: true,
@@ -134,110 +125,110 @@ describe("claude-provider.ts", () => {
});
});
- it("should pass abortController if provided", async () => {
+ it('should pass abortController if provided', async () => {
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
- yield { type: "text", text: "test" };
+ yield { type: 'text', text: 'test' };
})()
);
const abortController = new AbortController();
const generator = provider.executeQuery({
- prompt: "Test",
- cwd: "/test",
+ prompt: 'Test',
+ cwd: '/test',
abortController,
});
await collectAsyncGenerator(generator);
expect(sdk.query).toHaveBeenCalledWith({
- prompt: "Test",
+ prompt: 'Test',
options: expect.objectContaining({
abortController,
}),
});
});
- it("should handle conversation history with sdkSessionId using resume option", async () => {
+ it('should handle conversation history with sdkSessionId using resume option', async () => {
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
- yield { type: "text", text: "test" };
+ yield { type: 'text', text: 'test' };
})()
);
const conversationHistory = [
- { role: "user" as const, content: "Previous message" },
- { role: "assistant" as const, content: "Previous response" },
+ { role: 'user' as const, content: 'Previous message' },
+ { role: 'assistant' as const, content: 'Previous response' },
];
const generator = provider.executeQuery({
- prompt: "Current message",
- cwd: "/test",
+ prompt: 'Current message',
+ cwd: '/test',
conversationHistory,
- sdkSessionId: "test-session-id",
+ sdkSessionId: 'test-session-id',
});
await collectAsyncGenerator(generator);
// Should use resume option when sdkSessionId is provided with history
expect(sdk.query).toHaveBeenCalledWith({
- prompt: "Current message",
+ prompt: 'Current message',
options: expect.objectContaining({
- resume: "test-session-id",
+ resume: 'test-session-id',
}),
});
});
- it("should handle array prompt (with images)", async () => {
+ it('should handle array prompt (with images)', async () => {
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
- yield { type: "text", text: "test" };
+ yield { type: 'text', text: 'test' };
})()
);
const arrayPrompt = [
- { type: "text", text: "Describe this" },
- { type: "image", source: { type: "base64", data: "..." } },
+ { type: 'text', text: 'Describe this' },
+ { type: 'image', source: { type: 'base64', data: '...' } },
];
const generator = provider.executeQuery({
prompt: arrayPrompt as any,
- cwd: "/test",
+ cwd: '/test',
});
await collectAsyncGenerator(generator);
// Should pass an async generator as prompt for array inputs
const callArgs = vi.mocked(sdk.query).mock.calls[0][0];
- expect(typeof callArgs.prompt).not.toBe("string");
+ expect(typeof callArgs.prompt).not.toBe('string');
});
- it("should use maxTurns default of 20", async () => {
+ it('should use maxTurns default of 20', async () => {
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
- yield { type: "text", text: "test" };
+ yield { type: 'text', text: 'test' };
})()
);
const generator = provider.executeQuery({
- prompt: "Test",
- cwd: "/test",
+ prompt: 'Test',
+ cwd: '/test',
});
await collectAsyncGenerator(generator);
expect(sdk.query).toHaveBeenCalledWith({
- prompt: "Test",
+ prompt: 'Test',
options: expect.objectContaining({
maxTurns: 20,
}),
});
});
- it("should handle errors during execution and rethrow", async () => {
- const consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => {});
- const testError = new Error("SDK execution failed");
+ it('should handle errors during execution and rethrow', async () => {
+ const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
+ const testError = new Error('SDK execution failed');
vi.mocked(sdk.query).mockReturnValue(
(async function* () {
@@ -246,13 +237,13 @@ describe("claude-provider.ts", () => {
);
const generator = provider.executeQuery({
- prompt: "Test",
- cwd: "/test",
+ prompt: 'Test',
+ cwd: '/test',
});
- await expect(collectAsyncGenerator(generator)).rejects.toThrow("SDK execution failed");
+ await expect(collectAsyncGenerator(generator)).rejects.toThrow('SDK execution failed');
expect(consoleErrorSpy).toHaveBeenCalledWith(
- "[ClaudeProvider] executeQuery() error during execution:",
+ '[ClaudeProvider] executeQuery() error during execution:',
testError
);
@@ -260,16 +251,16 @@ describe("claude-provider.ts", () => {
});
});
- describe("detectInstallation", () => {
- it("should return installed with SDK method", async () => {
+ describe('detectInstallation', () => {
+ it('should return installed with SDK method', async () => {
const result = await provider.detectInstallation();
expect(result.installed).toBe(true);
- expect(result.method).toBe("sdk");
+ expect(result.method).toBe('sdk');
});
- it("should detect ANTHROPIC_API_KEY", async () => {
- process.env.ANTHROPIC_API_KEY = "test-key";
+ it('should detect ANTHROPIC_API_KEY', async () => {
+ process.env.ANTHROPIC_API_KEY = 'test-key';
const result = await provider.detectInstallation();
@@ -277,7 +268,7 @@ describe("claude-provider.ts", () => {
expect(result.authenticated).toBe(true);
});
- it("should return hasApiKey false when no keys present", async () => {
+ it('should return hasApiKey false when no keys present', async () => {
const result = await provider.detectInstallation();
expect(result.hasApiKey).toBe(false);
@@ -285,54 +276,52 @@ describe("claude-provider.ts", () => {
});
});
- describe("getAvailableModels", () => {
- it("should return 4 Claude models", () => {
+ describe('getAvailableModels', () => {
+ it('should return 4 Claude models', () => {
const models = provider.getAvailableModels();
expect(models).toHaveLength(4);
});
- it("should include Claude Opus 4.5", () => {
+ it('should include Claude Opus 4.5', () => {
const models = provider.getAvailableModels();
- const opus = models.find((m) => m.id === "claude-opus-4-5-20251101");
+ const opus = models.find((m) => m.id === 'claude-opus-4-5-20251101');
expect(opus).toBeDefined();
- expect(opus?.name).toBe("Claude Opus 4.5");
- expect(opus?.provider).toBe("anthropic");
+ expect(opus?.name).toBe('Claude Opus 4.5');
+ expect(opus?.provider).toBe('anthropic');
});
- it("should include Claude Sonnet 4", () => {
+ it('should include Claude Sonnet 4', () => {
const models = provider.getAvailableModels();
- const sonnet = models.find((m) => m.id === "claude-sonnet-4-20250514");
+ const sonnet = models.find((m) => m.id === 'claude-sonnet-4-20250514');
expect(sonnet).toBeDefined();
- expect(sonnet?.name).toBe("Claude Sonnet 4");
+ expect(sonnet?.name).toBe('Claude Sonnet 4');
});
- it("should include Claude 3.5 Sonnet", () => {
+ it('should include Claude 3.5 Sonnet', () => {
const models = provider.getAvailableModels();
- const sonnet35 = models.find(
- (m) => m.id === "claude-3-5-sonnet-20241022"
- );
+ const sonnet35 = models.find((m) => m.id === 'claude-3-5-sonnet-20241022');
expect(sonnet35).toBeDefined();
});
- it("should include Claude 3.5 Haiku", () => {
+ it('should include Claude 3.5 Haiku', () => {
const models = provider.getAvailableModels();
- const haiku = models.find((m) => m.id === "claude-3-5-haiku-20241022");
+ const haiku = models.find((m) => m.id === 'claude-3-5-haiku-20241022');
expect(haiku).toBeDefined();
});
- it("should mark Opus as default", () => {
+ it('should mark Opus as default', () => {
const models = provider.getAvailableModels();
- const opus = models.find((m) => m.id === "claude-opus-4-5-20251101");
+ const opus = models.find((m) => m.id === 'claude-opus-4-5-20251101');
expect(opus?.default).toBe(true);
});
- it("should all support vision and tools", () => {
+ it('should all support vision and tools', () => {
const models = provider.getAvailableModels();
models.forEach((model) => {
@@ -341,7 +330,7 @@ describe("claude-provider.ts", () => {
});
});
- it("should have correct context windows", () => {
+ it('should have correct context windows', () => {
const models = provider.getAvailableModels();
models.forEach((model) => {
@@ -349,7 +338,7 @@ describe("claude-provider.ts", () => {
});
});
- it("should have modelString field matching id", () => {
+ it('should have modelString field matching id', () => {
const models = provider.getAvailableModels();
models.forEach((model) => {
@@ -358,38 +347,38 @@ describe("claude-provider.ts", () => {
});
});
- describe("supportsFeature", () => {
+ describe('supportsFeature', () => {
it("should support 'tools' feature", () => {
- expect(provider.supportsFeature("tools")).toBe(true);
+ expect(provider.supportsFeature('tools')).toBe(true);
});
it("should support 'text' feature", () => {
- expect(provider.supportsFeature("text")).toBe(true);
+ expect(provider.supportsFeature('text')).toBe(true);
});
it("should support 'vision' feature", () => {
- expect(provider.supportsFeature("vision")).toBe(true);
+ expect(provider.supportsFeature('vision')).toBe(true);
});
it("should support 'thinking' feature", () => {
- expect(provider.supportsFeature("thinking")).toBe(true);
+ expect(provider.supportsFeature('thinking')).toBe(true);
});
it("should not support 'mcp' feature", () => {
- expect(provider.supportsFeature("mcp")).toBe(false);
+ expect(provider.supportsFeature('mcp')).toBe(false);
});
it("should not support 'cli' feature", () => {
- expect(provider.supportsFeature("cli")).toBe(false);
+ expect(provider.supportsFeature('cli')).toBe(false);
});
- it("should not support unknown features", () => {
- expect(provider.supportsFeature("unknown")).toBe(false);
+ it('should not support unknown features', () => {
+ expect(provider.supportsFeature('unknown')).toBe(false);
});
});
- describe("validateConfig", () => {
- it("should validate config from base class", () => {
+ describe('validateConfig', () => {
+ it('should validate config from base class', () => {
const result = provider.validateConfig();
expect(result.valid).toBe(true);
@@ -397,21 +386,21 @@ describe("claude-provider.ts", () => {
});
});
- describe("config management", () => {
- it("should get and set config", () => {
- provider.setConfig({ apiKey: "test-key" });
+ describe('config management', () => {
+ it('should get and set config', () => {
+ provider.setConfig({ apiKey: 'test-key' });
const config = provider.getConfig();
- expect(config.apiKey).toBe("test-key");
+ expect(config.apiKey).toBe('test-key');
});
- it("should merge config updates", () => {
- provider.setConfig({ apiKey: "key1" });
- provider.setConfig({ model: "model1" });
+ it('should merge config updates', () => {
+ provider.setConfig({ apiKey: 'key1' });
+ provider.setConfig({ model: 'model1' });
const config = provider.getConfig();
- expect(config.apiKey).toBe("key1");
- expect(config.model).toBe("model1");
+ expect(config.apiKey).toBe('key1');
+ expect(config.model).toBe('model1');
});
});
});
diff --git a/apps/server/tests/unit/providers/provider-factory.test.ts b/apps/server/tests/unit/providers/provider-factory.test.ts
index cd34af158..069fbf860 100644
--- a/apps/server/tests/unit/providers/provider-factory.test.ts
+++ b/apps/server/tests/unit/providers/provider-factory.test.ts
@@ -1,13 +1,13 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { ProviderFactory } from "@/providers/provider-factory.js";
-import { ClaudeProvider } from "@/providers/claude-provider.js";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import { ProviderFactory } from '@/providers/provider-factory.js';
+import { ClaudeProvider } from '@/providers/claude-provider.js';
-describe("provider-factory.ts", () => {
+describe('provider-factory.ts', () => {
let consoleSpy: any;
beforeEach(() => {
consoleSpy = {
- warn: vi.spyOn(console, "warn").mockImplementation(() => {}),
+ warn: vi.spyOn(console, 'warn').mockImplementation(() => {}),
};
});
@@ -15,55 +15,49 @@ describe("provider-factory.ts", () => {
consoleSpy.warn.mockRestore();
});
- describe("getProviderForModel", () => {
- describe("Claude models (claude-* prefix)", () => {
- it("should return ClaudeProvider for claude-opus-4-5-20251101", () => {
- const provider = ProviderFactory.getProviderForModel(
- "claude-opus-4-5-20251101"
- );
+ describe('getProviderForModel', () => {
+ describe('Claude models (claude-* prefix)', () => {
+ it('should return ClaudeProvider for claude-opus-4-5-20251101', () => {
+ const provider = ProviderFactory.getProviderForModel('claude-opus-4-5-20251101');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
- it("should return ClaudeProvider for claude-sonnet-4-20250514", () => {
- const provider = ProviderFactory.getProviderForModel(
- "claude-sonnet-4-20250514"
- );
+ it('should return ClaudeProvider for claude-sonnet-4-20250514', () => {
+ const provider = ProviderFactory.getProviderForModel('claude-sonnet-4-20250514');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
- it("should return ClaudeProvider for claude-haiku-4-5", () => {
- const provider = ProviderFactory.getProviderForModel("claude-haiku-4-5");
+ it('should return ClaudeProvider for claude-haiku-4-5', () => {
+ const provider = ProviderFactory.getProviderForModel('claude-haiku-4-5');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
- it("should be case-insensitive for claude models", () => {
- const provider = ProviderFactory.getProviderForModel(
- "CLAUDE-OPUS-4-5-20251101"
- );
+ it('should be case-insensitive for claude models', () => {
+ const provider = ProviderFactory.getProviderForModel('CLAUDE-OPUS-4-5-20251101');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
});
- describe("Claude aliases", () => {
+ describe('Claude aliases', () => {
it("should return ClaudeProvider for 'haiku'", () => {
- const provider = ProviderFactory.getProviderForModel("haiku");
+ const provider = ProviderFactory.getProviderForModel('haiku');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
it("should return ClaudeProvider for 'sonnet'", () => {
- const provider = ProviderFactory.getProviderForModel("sonnet");
+ const provider = ProviderFactory.getProviderForModel('sonnet');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
it("should return ClaudeProvider for 'opus'", () => {
- const provider = ProviderFactory.getProviderForModel("opus");
+ const provider = ProviderFactory.getProviderForModel('opus');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
- it("should be case-insensitive for aliases", () => {
- const provider1 = ProviderFactory.getProviderForModel("HAIKU");
- const provider2 = ProviderFactory.getProviderForModel("Sonnet");
- const provider3 = ProviderFactory.getProviderForModel("Opus");
+ it('should be case-insensitive for aliases', () => {
+ const provider1 = ProviderFactory.getProviderForModel('HAIKU');
+ const provider2 = ProviderFactory.getProviderForModel('Sonnet');
+ const provider3 = ProviderFactory.getProviderForModel('Opus');
expect(provider1).toBeInstanceOf(ClaudeProvider);
expect(provider2).toBeInstanceOf(ClaudeProvider);
@@ -71,65 +65,61 @@ describe("provider-factory.ts", () => {
});
});
- describe("Unknown models", () => {
- it("should default to ClaudeProvider for unknown model", () => {
- const provider = ProviderFactory.getProviderForModel("unknown-model-123");
+ describe('Unknown models', () => {
+ it('should default to ClaudeProvider for unknown model', () => {
+ const provider = ProviderFactory.getProviderForModel('unknown-model-123');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
- it("should warn when defaulting to Claude", () => {
- ProviderFactory.getProviderForModel("random-model");
- expect(consoleSpy.warn).toHaveBeenCalledWith(
- expect.stringContaining("Unknown model prefix")
- );
+ it('should warn when defaulting to Claude', () => {
+ ProviderFactory.getProviderForModel('random-model');
expect(consoleSpy.warn).toHaveBeenCalledWith(
- expect.stringContaining("random-model")
+ expect.stringContaining('Unknown model prefix')
);
+ expect(consoleSpy.warn).toHaveBeenCalledWith(expect.stringContaining('random-model'));
expect(consoleSpy.warn).toHaveBeenCalledWith(
- expect.stringContaining("defaulting to Claude")
+ expect.stringContaining('defaulting to Claude')
);
});
- it("should handle empty string", () => {
- const provider = ProviderFactory.getProviderForModel("");
+ it('should handle empty string', () => {
+ const provider = ProviderFactory.getProviderForModel('');
expect(provider).toBeInstanceOf(ClaudeProvider);
expect(consoleSpy.warn).toHaveBeenCalled();
});
- it("should default to ClaudeProvider for gpt models (not supported)", () => {
- const provider = ProviderFactory.getProviderForModel("gpt-5.2");
+ it('should default to ClaudeProvider for gpt models (not supported)', () => {
+ const provider = ProviderFactory.getProviderForModel('gpt-5.2');
expect(provider).toBeInstanceOf(ClaudeProvider);
expect(consoleSpy.warn).toHaveBeenCalled();
});
- it("should default to ClaudeProvider for o-series models (not supported)", () => {
- const provider = ProviderFactory.getProviderForModel("o1");
+ it('should default to ClaudeProvider for o-series models (not supported)', () => {
+ const provider = ProviderFactory.getProviderForModel('o1');
expect(provider).toBeInstanceOf(ClaudeProvider);
expect(consoleSpy.warn).toHaveBeenCalled();
});
});
});
- describe("getAllProviders", () => {
- it("should return array of all providers", () => {
+ describe('getAllProviders', () => {
+ it('should return array of all providers', () => {
const providers = ProviderFactory.getAllProviders();
expect(Array.isArray(providers)).toBe(true);
});
- it("should include ClaudeProvider", () => {
+ it('should include ClaudeProvider', () => {
const providers = ProviderFactory.getAllProviders();
- const hasClaudeProvider = providers.some(
- (p) => p instanceof ClaudeProvider
- );
+ const hasClaudeProvider = providers.some((p) => p instanceof ClaudeProvider);
expect(hasClaudeProvider).toBe(true);
});
- it("should return exactly 1 provider", () => {
+ it('should return exactly 1 provider', () => {
const providers = ProviderFactory.getAllProviders();
expect(providers).toHaveLength(1);
});
- it("should create new instances each time", () => {
+ it('should create new instances each time', () => {
const providers1 = ProviderFactory.getAllProviders();
const providers2 = ProviderFactory.getAllProviders();
@@ -137,60 +127,60 @@ describe("provider-factory.ts", () => {
});
});
- describe("checkAllProviders", () => {
- it("should return installation status for all providers", async () => {
+ describe('checkAllProviders', () => {
+ it('should return installation status for all providers', async () => {
const statuses = await ProviderFactory.checkAllProviders();
- expect(statuses).toHaveProperty("claude");
+ expect(statuses).toHaveProperty('claude');
});
- it("should call detectInstallation on each provider", async () => {
+ it('should call detectInstallation on each provider', async () => {
const statuses = await ProviderFactory.checkAllProviders();
- expect(statuses.claude).toHaveProperty("installed");
+ expect(statuses.claude).toHaveProperty('installed');
});
- it("should return correct provider names as keys", async () => {
+ it('should return correct provider names as keys', async () => {
const statuses = await ProviderFactory.checkAllProviders();
const keys = Object.keys(statuses);
- expect(keys).toContain("claude");
+ expect(keys).toContain('claude');
expect(keys).toHaveLength(1);
});
});
- describe("getProviderByName", () => {
+ describe('getProviderByName', () => {
it("should return ClaudeProvider for 'claude'", () => {
- const provider = ProviderFactory.getProviderByName("claude");
+ const provider = ProviderFactory.getProviderByName('claude');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
it("should return ClaudeProvider for 'anthropic'", () => {
- const provider = ProviderFactory.getProviderByName("anthropic");
+ const provider = ProviderFactory.getProviderByName('anthropic');
expect(provider).toBeInstanceOf(ClaudeProvider);
});
- it("should be case-insensitive", () => {
- const provider1 = ProviderFactory.getProviderByName("CLAUDE");
- const provider2 = ProviderFactory.getProviderByName("ANTHROPIC");
+ it('should be case-insensitive', () => {
+ const provider1 = ProviderFactory.getProviderByName('CLAUDE');
+ const provider2 = ProviderFactory.getProviderByName('ANTHROPIC');
expect(provider1).toBeInstanceOf(ClaudeProvider);
expect(provider2).toBeInstanceOf(ClaudeProvider);
});
- it("should return null for unknown provider", () => {
- const provider = ProviderFactory.getProviderByName("unknown");
+ it('should return null for unknown provider', () => {
+ const provider = ProviderFactory.getProviderByName('unknown');
expect(provider).toBeNull();
});
- it("should return null for empty string", () => {
- const provider = ProviderFactory.getProviderByName("");
+ it('should return null for empty string', () => {
+ const provider = ProviderFactory.getProviderByName('');
expect(provider).toBeNull();
});
- it("should create new instance each time", () => {
- const provider1 = ProviderFactory.getProviderByName("claude");
- const provider2 = ProviderFactory.getProviderByName("claude");
+ it('should create new instance each time', () => {
+ const provider1 = ProviderFactory.getProviderByName('claude');
+ const provider2 = ProviderFactory.getProviderByName('claude');
expect(provider1).not.toBe(provider2);
expect(provider1).toBeInstanceOf(ClaudeProvider);
@@ -198,35 +188,33 @@ describe("provider-factory.ts", () => {
});
});
- describe("getAllAvailableModels", () => {
- it("should return array of models", () => {
+ describe('getAllAvailableModels', () => {
+ it('should return array of models', () => {
const models = ProviderFactory.getAllAvailableModels();
expect(Array.isArray(models)).toBe(true);
});
- it("should include models from all providers", () => {
+ it('should include models from all providers', () => {
const models = ProviderFactory.getAllAvailableModels();
expect(models.length).toBeGreaterThan(0);
});
- it("should return models with required fields", () => {
+ it('should return models with required fields', () => {
const models = ProviderFactory.getAllAvailableModels();
models.forEach((model) => {
- expect(model).toHaveProperty("id");
- expect(model).toHaveProperty("name");
- expect(typeof model.id).toBe("string");
- expect(typeof model.name).toBe("string");
+ expect(model).toHaveProperty('id');
+ expect(model).toHaveProperty('name');
+ expect(typeof model.id).toBe('string');
+ expect(typeof model.name).toBe('string');
});
});
- it("should include Claude models", () => {
+ it('should include Claude models', () => {
const models = ProviderFactory.getAllAvailableModels();
// Claude models should include claude-* in their IDs
- const hasClaudeModels = models.some((m) =>
- m.id.toLowerCase().includes("claude")
- );
+ const hasClaudeModels = models.some((m) => m.id.toLowerCase().includes('claude'));
expect(hasClaudeModels).toBe(true);
});
diff --git a/apps/server/tests/unit/routes/app-spec/common.test.ts b/apps/server/tests/unit/routes/app-spec/common.test.ts
index 14ec98d1d..aeaf8ea5c 100644
--- a/apps/server/tests/unit/routes/app-spec/common.test.ts
+++ b/apps/server/tests/unit/routes/app-spec/common.test.ts
@@ -1,65 +1,59 @@
-import { describe, it, expect, beforeEach } from "vitest";
+import { describe, it, expect, beforeEach } from 'vitest';
import {
setRunningState,
getErrorMessage,
getSpecRegenerationStatus,
-} from "@/routes/app-spec/common.js";
+} from '@/routes/app-spec/common.js';
-describe("app-spec/common.ts", () => {
+describe('app-spec/common.ts', () => {
beforeEach(() => {
// Reset state before each test
setRunningState(false, null);
});
- describe("setRunningState", () => {
- it("should set isRunning to true when running is true", () => {
+ describe('setRunningState', () => {
+ it('should set isRunning to true when running is true', () => {
setRunningState(true);
expect(getSpecRegenerationStatus().isRunning).toBe(true);
});
- it("should set isRunning to false when running is false", () => {
+ it('should set isRunning to false when running is false', () => {
setRunningState(true);
setRunningState(false);
expect(getSpecRegenerationStatus().isRunning).toBe(false);
});
- it("should set currentAbortController when provided", () => {
+ it('should set currentAbortController when provided', () => {
const controller = new AbortController();
setRunningState(true, controller);
- expect(getSpecRegenerationStatus().currentAbortController).toBe(
- controller
- );
+ expect(getSpecRegenerationStatus().currentAbortController).toBe(controller);
});
- it("should set currentAbortController to null when not provided", () => {
+ it('should set currentAbortController to null when not provided', () => {
const controller = new AbortController();
setRunningState(true, controller);
setRunningState(false);
expect(getSpecRegenerationStatus().currentAbortController).toBe(null);
});
- it("should set currentAbortController to null when explicitly passed null", () => {
+ it('should set currentAbortController to null when explicitly passed null', () => {
const controller = new AbortController();
setRunningState(true, controller);
setRunningState(true, null);
expect(getSpecRegenerationStatus().currentAbortController).toBe(null);
});
- it("should update state multiple times correctly", () => {
+ it('should update state multiple times correctly', () => {
const controller1 = new AbortController();
const controller2 = new AbortController();
setRunningState(true, controller1);
expect(getSpecRegenerationStatus().isRunning).toBe(true);
- expect(getSpecRegenerationStatus().currentAbortController).toBe(
- controller1
- );
+ expect(getSpecRegenerationStatus().currentAbortController).toBe(controller1);
setRunningState(true, controller2);
expect(getSpecRegenerationStatus().isRunning).toBe(true);
- expect(getSpecRegenerationStatus().currentAbortController).toBe(
- controller2
- );
+ expect(getSpecRegenerationStatus().currentAbortController).toBe(controller2);
setRunningState(false, null);
expect(getSpecRegenerationStatus().isRunning).toBe(false);
@@ -67,42 +61,42 @@ describe("app-spec/common.ts", () => {
});
});
- describe("getErrorMessage", () => {
- it("should return message from Error instance", () => {
- const error = new Error("Test error message");
- expect(getErrorMessage(error)).toBe("Test error message");
+ describe('getErrorMessage', () => {
+ it('should return message from Error instance', () => {
+ const error = new Error('Test error message');
+ expect(getErrorMessage(error)).toBe('Test error message');
});
it("should return 'Unknown error' for non-Error objects", () => {
- expect(getErrorMessage("string error")).toBe("Unknown error");
- expect(getErrorMessage(123)).toBe("Unknown error");
- expect(getErrorMessage(null)).toBe("Unknown error");
- expect(getErrorMessage(undefined)).toBe("Unknown error");
- expect(getErrorMessage({})).toBe("Unknown error");
- expect(getErrorMessage([])).toBe("Unknown error");
+ expect(getErrorMessage('string error')).toBe('Unknown error');
+ expect(getErrorMessage(123)).toBe('Unknown error');
+ expect(getErrorMessage(null)).toBe('Unknown error');
+ expect(getErrorMessage(undefined)).toBe('Unknown error');
+ expect(getErrorMessage({})).toBe('Unknown error');
+ expect(getErrorMessage([])).toBe('Unknown error');
});
- it("should return message from Error with empty message", () => {
- const error = new Error("");
- expect(getErrorMessage(error)).toBe("");
+ it('should return message from Error with empty message', () => {
+ const error = new Error('');
+ expect(getErrorMessage(error)).toBe('');
});
- it("should handle Error objects with custom properties", () => {
- const error = new Error("Base message");
- (error as any).customProp = "custom value";
- expect(getErrorMessage(error)).toBe("Base message");
+ it('should handle Error objects with custom properties', () => {
+ const error = new Error('Base message');
+ (error as any).customProp = 'custom value';
+ expect(getErrorMessage(error)).toBe('Base message');
});
- it("should handle Error objects created with different constructors", () => {
+ it('should handle Error objects created with different constructors', () => {
class CustomError extends Error {
constructor(message: string) {
super(message);
- this.name = "CustomError";
+ this.name = 'CustomError';
}
}
- const customError = new CustomError("Custom error message");
- expect(getErrorMessage(customError)).toBe("Custom error message");
+ const customError = new CustomError('Custom error message');
+ expect(getErrorMessage(customError)).toBe('Custom error message');
});
});
});
diff --git a/apps/server/tests/unit/routes/app-spec/parse-and-create-features.test.ts b/apps/server/tests/unit/routes/app-spec/parse-and-create-features.test.ts
index 7b3d05689..9bb5c1200 100644
--- a/apps/server/tests/unit/routes/app-spec/parse-and-create-features.test.ts
+++ b/apps/server/tests/unit/routes/app-spec/parse-and-create-features.test.ts
@@ -1,11 +1,11 @@
-import { describe, it, expect } from "vitest";
+import { describe, it, expect } from 'vitest';
-describe("app-spec/parse-and-create-features.ts - JSON extraction", () => {
+describe('app-spec/parse-and-create-features.ts - JSON extraction', () => {
// Test the JSON extraction regex pattern used in parseAndCreateFeatures
const jsonExtractionPattern = /\{[\s\S]*"features"[\s\S]*\}/;
- describe("JSON extraction regex", () => {
- it("should extract JSON with features array", () => {
+ describe('JSON extraction regex', () => {
+ it('should extract JSON with features array', () => {
const content = `Here is the response:
{
"features": [
@@ -26,7 +26,7 @@ describe("app-spec/parse-and-create-features.ts - JSON extraction", () => {
expect(match![0]).toContain('"id": "feature-1"');
});
- it("should extract JSON with multiple features", () => {
+ it('should extract JSON with multiple features', () => {
const content = `Some text before
{
"features": [
@@ -49,7 +49,7 @@ Some text after`;
expect(match![0]).toContain('"feature-2"');
});
- it("should extract JSON with nested objects and arrays", () => {
+ it('should extract JSON with nested objects and arrays', () => {
const content = `Response:
{
"features": [
@@ -69,7 +69,7 @@ Some text after`;
expect(match![0]).toContain('"dep-1"');
});
- it("should handle JSON with whitespace and newlines", () => {
+ it('should handle JSON with whitespace and newlines', () => {
const content = `Text before
{
"features": [
@@ -87,7 +87,7 @@ Text after`;
expect(match![0]).toContain('"features"');
});
- it("should extract JSON when features array is empty", () => {
+ it('should extract JSON when features array is empty', () => {
const content = `Response:
{
"features": []
@@ -96,10 +96,10 @@ Text after`;
const match = content.match(jsonExtractionPattern);
expect(match).not.toBeNull();
expect(match![0]).toContain('"features"');
- expect(match![0]).toContain("[]");
+ expect(match![0]).toContain('[]');
});
- it("should not match content without features key", () => {
+ it('should not match content without features key', () => {
const content = `{
"otherKey": "value"
}`;
@@ -108,13 +108,13 @@ Text after`;
expect(match).toBeNull();
});
- it("should not match content without JSON structure", () => {
- const content = "Just plain text with features mentioned";
+ it('should not match content without JSON structure', () => {
+ const content = 'Just plain text with features mentioned';
const match = content.match(jsonExtractionPattern);
expect(match).toBeNull();
});
- it("should extract JSON when features key appears multiple times", () => {
+ it('should extract JSON when features key appears multiple times', () => {
const content = `Before:
{
"features": [
@@ -132,7 +132,7 @@ After: The word "features" appears again`;
expect(match![0]).toContain('"features"');
});
- it("should handle JSON with escaped quotes", () => {
+ it('should handle JSON with escaped quotes', () => {
const content = `{
"features": [
{
@@ -147,7 +147,7 @@ After: The word "features" appears again`;
expect(match![0]).toContain('"features"');
});
- it("should extract JSON with complex nested structure", () => {
+ it('should extract JSON with complex nested structure', () => {
const content = `Response:
{
"features": [
@@ -177,8 +177,8 @@ After: The word "features" appears again`;
});
});
- describe("JSON parsing validation", () => {
- it("should parse valid feature JSON structure", () => {
+ describe('JSON parsing validation', () => {
+ it('should parse valid feature JSON structure', () => {
const validJson = `{
"features": [
{
@@ -196,11 +196,11 @@ After: The word "features" appears again`;
expect(parsed.features).toBeDefined();
expect(Array.isArray(parsed.features)).toBe(true);
expect(parsed.features.length).toBe(1);
- expect(parsed.features[0].id).toBe("feature-1");
- expect(parsed.features[0].title).toBe("Test Feature");
+ expect(parsed.features[0].id).toBe('feature-1');
+ expect(parsed.features[0].title).toBe('Test Feature');
});
- it("should handle features with optional fields", () => {
+ it('should handle features with optional fields', () => {
const jsonWithOptionalFields = `{
"features": [
{
@@ -213,14 +213,14 @@ After: The word "features" appears again`;
}`;
const parsed = JSON.parse(jsonWithOptionalFields);
- expect(parsed.features[0].id).toBe("feature-1");
+ expect(parsed.features[0].id).toBe('feature-1');
expect(parsed.features[0].priority).toBe(2);
// description and dependencies are optional
expect(parsed.features[0].description).toBeUndefined();
expect(parsed.features[0].dependencies).toBeUndefined();
});
- it("should handle features with dependencies", () => {
+ it('should handle features with dependencies', () => {
const jsonWithDeps = `{
"features": [
{
@@ -238,7 +238,7 @@ After: The word "features" appears again`;
const parsed = JSON.parse(jsonWithDeps);
expect(parsed.features[0].dependencies).toEqual([]);
- expect(parsed.features[1].dependencies).toEqual(["feature-1"]);
+ expect(parsed.features[1].dependencies).toEqual(['feature-1']);
});
});
});
diff --git a/apps/server/tests/unit/services/agent-service.test.ts b/apps/server/tests/unit/services/agent-service.test.ts
index 1661522c8..8b125f8cd 100644
--- a/apps/server/tests/unit/services/agent-service.test.ts
+++ b/apps/server/tests/unit/services/agent-service.test.ts
@@ -1,17 +1,17 @@
-import { describe, it, expect, vi, beforeEach } from "vitest";
-import { AgentService } from "@/services/agent-service.js";
-import { ProviderFactory } from "@/providers/provider-factory.js";
-import * as fs from "fs/promises";
-import * as imageHandler from "@automaker/utils";
-import * as promptBuilder from "@automaker/utils";
-import { collectAsyncGenerator } from "../../utils/helpers.js";
-
-vi.mock("fs/promises");
-vi.mock("@/providers/provider-factory.js");
-vi.mock("@automaker/utils");
-vi.mock("@automaker/utils");
-
-describe("agent-service.ts", () => {
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import { AgentService } from '@/services/agent-service.js';
+import { ProviderFactory } from '@/providers/provider-factory.js';
+import * as fs from 'fs/promises';
+import * as imageHandler from '@automaker/utils';
+import * as promptBuilder from '@automaker/utils';
+import { collectAsyncGenerator } from '../../utils/helpers.js';
+
+vi.mock('fs/promises');
+vi.mock('@/providers/provider-factory.js');
+vi.mock('@automaker/utils');
+vi.mock('@automaker/utils');
+
+describe('agent-service.ts', () => {
let service: AgentService;
const mockEvents = {
subscribe: vi.fn(),
@@ -20,86 +20,83 @@ describe("agent-service.ts", () => {
beforeEach(() => {
vi.clearAllMocks();
- service = new AgentService("/test/data", mockEvents as any);
+ service = new AgentService('/test/data', mockEvents as any);
});
- describe("initialize", () => {
- it("should create state directory", async () => {
+ describe('initialize', () => {
+ it('should create state directory', async () => {
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
await service.initialize();
- expect(fs.mkdir).toHaveBeenCalledWith(
- expect.stringContaining("agent-sessions"),
- { recursive: true }
- );
+ expect(fs.mkdir).toHaveBeenCalledWith(expect.stringContaining('agent-sessions'), {
+ recursive: true,
+ });
});
});
- describe("startConversation", () => {
- it("should create new session with empty messages", async () => {
- const error: any = new Error("ENOENT");
- error.code = "ENOENT";
+ describe('startConversation', () => {
+ it('should create new session with empty messages', async () => {
+ const error: any = new Error('ENOENT');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
const result = await service.startConversation({
- sessionId: "session-1",
- workingDirectory: "/test/dir",
+ sessionId: 'session-1',
+ workingDirectory: '/test/dir',
});
expect(result.success).toBe(true);
expect(result.messages).toEqual([]);
- expect(result.sessionId).toBe("session-1");
+ expect(result.sessionId).toBe('session-1');
});
- it("should load existing session", async () => {
+ it('should load existing session', async () => {
const existingMessages = [
{
- id: "msg-1",
- role: "user",
- content: "Hello",
- timestamp: "2024-01-01T00:00:00Z",
+ id: 'msg-1',
+ role: 'user',
+ content: 'Hello',
+ timestamp: '2024-01-01T00:00:00Z',
},
];
- vi.mocked(fs.readFile).mockResolvedValue(
- JSON.stringify(existingMessages)
- );
+ vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(existingMessages));
const result = await service.startConversation({
- sessionId: "session-1",
- workingDirectory: "/test/dir",
+ sessionId: 'session-1',
+ workingDirectory: '/test/dir',
});
expect(result.success).toBe(true);
expect(result.messages).toEqual(existingMessages);
});
- it("should use process.cwd() if no working directory provided", async () => {
- const error: any = new Error("ENOENT");
- error.code = "ENOENT";
+ it('should use process.cwd() if no working directory provided', async () => {
+ const error: any = new Error('ENOENT');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
const result = await service.startConversation({
- sessionId: "session-1",
+ sessionId: 'session-1',
});
expect(result.success).toBe(true);
});
- it("should reuse existing session if already started", async () => {
- const error: any = new Error("ENOENT");
- error.code = "ENOENT";
+ it('should reuse existing session if already started', async () => {
+ const error: any = new Error('ENOENT');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
// Start session first time
await service.startConversation({
- sessionId: "session-1",
+ sessionId: 'session-1',
});
// Start again with same ID
const result = await service.startConversation({
- sessionId: "session-1",
+ sessionId: 'session-1',
});
expect(result.success).toBe(true);
@@ -109,252 +106,237 @@ describe("agent-service.ts", () => {
});
});
- describe("sendMessage", () => {
+ describe('sendMessage', () => {
beforeEach(async () => {
- const error: any = new Error("ENOENT");
- error.code = "ENOENT";
+ const error: any = new Error('ENOENT');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
await service.startConversation({
- sessionId: "session-1",
- workingDirectory: "/test/dir",
+ sessionId: 'session-1',
+ workingDirectory: '/test/dir',
});
});
- it("should throw if session not found", async () => {
+ it('should throw if session not found', async () => {
await expect(
service.sendMessage({
- sessionId: "nonexistent",
- message: "Hello",
+ sessionId: 'nonexistent',
+ message: 'Hello',
})
- ).rejects.toThrow("Session nonexistent not found");
+ ).rejects.toThrow('Session nonexistent not found');
});
-
- it("should process message and stream responses", async () => {
+ it('should process message and stream responses', async () => {
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "assistant",
+ type: 'assistant',
message: {
- role: "assistant",
- content: [{ type: "text", text: "Response" }],
+ role: 'assistant',
+ content: [{ type: 'text', text: 'Response' }],
},
};
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
- content: "Hello",
+ content: 'Hello',
hasImages: false,
});
const result = await service.sendMessage({
- sessionId: "session-1",
- message: "Hello",
- workingDirectory: "/custom/dir",
+ sessionId: 'session-1',
+ message: 'Hello',
+ workingDirectory: '/custom/dir',
});
expect(result.success).toBe(true);
expect(mockEvents.emit).toHaveBeenCalled();
});
- it("should handle images in message", async () => {
+ it('should handle images in message', async () => {
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
vi.mocked(imageHandler.readImageAsBase64).mockResolvedValue({
- base64: "base64data",
- mimeType: "image/png",
- filename: "test.png",
- originalPath: "/path/test.png",
+ base64: 'base64data',
+ mimeType: 'image/png',
+ filename: 'test.png',
+ originalPath: '/path/test.png',
});
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
- content: "Check image",
+ content: 'Check image',
hasImages: true,
});
await service.sendMessage({
- sessionId: "session-1",
- message: "Check this",
- imagePaths: ["/path/test.png"],
+ sessionId: 'session-1',
+ message: 'Check this',
+ imagePaths: ['/path/test.png'],
});
- expect(imageHandler.readImageAsBase64).toHaveBeenCalledWith(
- "/path/test.png"
- );
+ expect(imageHandler.readImageAsBase64).toHaveBeenCalledWith('/path/test.png');
});
- it("should handle failed image loading gracefully", async () => {
+ it('should handle failed image loading gracefully', async () => {
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
- vi.mocked(imageHandler.readImageAsBase64).mockRejectedValue(
- new Error("Image not found")
- );
+ vi.mocked(imageHandler.readImageAsBase64).mockRejectedValue(new Error('Image not found'));
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
- content: "Check image",
+ content: 'Check image',
hasImages: false,
});
- const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
+ const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
await service.sendMessage({
- sessionId: "session-1",
- message: "Check this",
- imagePaths: ["/path/test.png"],
+ sessionId: 'session-1',
+ message: 'Check this',
+ imagePaths: ['/path/test.png'],
});
expect(consoleSpy).toHaveBeenCalled();
consoleSpy.mockRestore();
});
- it("should use custom model if provided", async () => {
+ it('should use custom model if provided', async () => {
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
- content: "Hello",
+ content: 'Hello',
hasImages: false,
});
await service.sendMessage({
- sessionId: "session-1",
- message: "Hello",
- model: "claude-sonnet-4-20250514",
+ sessionId: 'session-1',
+ message: 'Hello',
+ model: 'claude-sonnet-4-20250514',
});
- expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith("claude-sonnet-4-20250514");
+ expect(ProviderFactory.getProviderForModel).toHaveBeenCalledWith('claude-sonnet-4-20250514');
});
- it("should save session messages", async () => {
+ it('should save session messages', async () => {
const mockProvider = {
- getName: () => "claude",
+ getName: () => 'claude',
executeQuery: async function* () {
yield {
- type: "result",
- subtype: "success",
+ type: 'result',
+ subtype: 'success',
};
},
};
- vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(
- mockProvider as any
- );
+ vi.mocked(ProviderFactory.getProviderForModel).mockReturnValue(mockProvider as any);
vi.mocked(promptBuilder.buildPromptWithImages).mockResolvedValue({
- content: "Hello",
+ content: 'Hello',
hasImages: false,
});
await service.sendMessage({
- sessionId: "session-1",
- message: "Hello",
+ sessionId: 'session-1',
+ message: 'Hello',
});
expect(fs.writeFile).toHaveBeenCalled();
});
});
- describe("stopExecution", () => {
- it("should stop execution for a session", async () => {
- const error: any = new Error("ENOENT");
- error.code = "ENOENT";
+ describe('stopExecution', () => {
+ it('should stop execution for a session', async () => {
+ const error: any = new Error('ENOENT');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
await service.startConversation({
- sessionId: "session-1",
+ sessionId: 'session-1',
});
// Should return success
- const result = await service.stopExecution("session-1");
+ const result = await service.stopExecution('session-1');
expect(result.success).toBeDefined();
});
});
- describe("getHistory", () => {
- it("should return message history", async () => {
- const error: any = new Error("ENOENT");
- error.code = "ENOENT";
+ describe('getHistory', () => {
+ it('should return message history', async () => {
+ const error: any = new Error('ENOENT');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
await service.startConversation({
- sessionId: "session-1",
+ sessionId: 'session-1',
});
- const history = service.getHistory("session-1");
+ const history = service.getHistory('session-1');
expect(history).toBeDefined();
expect(history?.messages).toEqual([]);
});
- it("should handle non-existent session", () => {
- const history = service.getHistory("nonexistent");
+ it('should handle non-existent session', () => {
+ const history = service.getHistory('nonexistent');
expect(history).toBeDefined(); // Returns error object
});
});
- describe("clearSession", () => {
- it("should clear session messages", async () => {
- const error: any = new Error("ENOENT");
- error.code = "ENOENT";
+ describe('clearSession', () => {
+ it('should clear session messages', async () => {
+ const error: any = new Error('ENOENT');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
await service.startConversation({
- sessionId: "session-1",
+ sessionId: 'session-1',
});
- await service.clearSession("session-1");
+ await service.clearSession('session-1');
- const history = service.getHistory("session-1");
+ const history = service.getHistory('session-1');
expect(history?.messages).toEqual([]);
expect(fs.writeFile).toHaveBeenCalled();
});
diff --git a/apps/server/tests/unit/services/auto-mode-service-planning.test.ts b/apps/server/tests/unit/services/auto-mode-service-planning.test.ts
index 09483e782..7b52fe38d 100644
--- a/apps/server/tests/unit/services/auto-mode-service-planning.test.ts
+++ b/apps/server/tests/unit/services/auto-mode-service-planning.test.ts
@@ -1,7 +1,7 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { AutoModeService } from "@/services/auto-mode-service.js";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import { AutoModeService } from '@/services/auto-mode-service.js';
-describe("auto-mode-service.ts - Planning Mode", () => {
+describe('auto-mode-service.ts - Planning Mode', () => {
let service: AutoModeService;
const mockEvents = {
subscribe: vi.fn(),
@@ -18,98 +18,98 @@ describe("auto-mode-service.ts - Planning Mode", () => {
await service.stopAutoLoop().catch(() => {});
});
- describe("getPlanningPromptPrefix", () => {
+ describe('getPlanningPromptPrefix', () => {
// Access private method through any cast for testing
const getPlanningPromptPrefix = (svc: any, feature: any) => {
return svc.getPlanningPromptPrefix(feature);
};
- it("should return empty string for skip mode", () => {
- const feature = { id: "test", planningMode: "skip" as const };
+ it('should return empty string for skip mode', () => {
+ const feature = { id: 'test', planningMode: 'skip' as const };
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toBe("");
+ expect(result).toBe('');
});
- it("should return empty string when planningMode is undefined", () => {
- const feature = { id: "test" };
+ it('should return empty string when planningMode is undefined', () => {
+ const feature = { id: 'test' };
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toBe("");
+ expect(result).toBe('');
});
- it("should return lite prompt for lite mode without approval", () => {
+ it('should return lite prompt for lite mode without approval', () => {
const feature = {
- id: "test",
- planningMode: "lite" as const,
- requirePlanApproval: false
+ id: 'test',
+ planningMode: 'lite' as const,
+ requirePlanApproval: false,
};
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Planning Phase (Lite Mode)");
- expect(result).toContain("[PLAN_GENERATED]");
- expect(result).toContain("Feature Request");
+ expect(result).toContain('Planning Phase (Lite Mode)');
+ expect(result).toContain('[PLAN_GENERATED]');
+ expect(result).toContain('Feature Request');
});
- it("should return lite_with_approval prompt for lite mode with approval", () => {
+ it('should return lite_with_approval prompt for lite mode with approval', () => {
const feature = {
- id: "test",
- planningMode: "lite" as const,
- requirePlanApproval: true
+ id: 'test',
+ planningMode: 'lite' as const,
+ requirePlanApproval: true,
};
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Planning Phase (Lite Mode)");
- expect(result).toContain("[SPEC_GENERATED]");
- expect(result).toContain("DO NOT proceed with implementation");
+ expect(result).toContain('Planning Phase (Lite Mode)');
+ expect(result).toContain('[SPEC_GENERATED]');
+ expect(result).toContain('DO NOT proceed with implementation');
});
- it("should return spec prompt for spec mode", () => {
+ it('should return spec prompt for spec mode', () => {
const feature = {
- id: "test",
- planningMode: "spec" as const
+ id: 'test',
+ planningMode: 'spec' as const,
};
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Specification Phase (Spec Mode)");
- expect(result).toContain("```tasks");
- expect(result).toContain("T001");
- expect(result).toContain("[TASK_START]");
- expect(result).toContain("[TASK_COMPLETE]");
+ expect(result).toContain('Specification Phase (Spec Mode)');
+ expect(result).toContain('```tasks');
+ expect(result).toContain('T001');
+ expect(result).toContain('[TASK_START]');
+ expect(result).toContain('[TASK_COMPLETE]');
});
- it("should return full prompt for full mode", () => {
+ it('should return full prompt for full mode', () => {
const feature = {
- id: "test",
- planningMode: "full" as const
+ id: 'test',
+ planningMode: 'full' as const,
};
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Full Specification Phase (Full SDD Mode)");
- expect(result).toContain("Phase 1: Foundation");
- expect(result).toContain("Phase 2: Core Implementation");
- expect(result).toContain("Phase 3: Integration & Testing");
+ expect(result).toContain('Full Specification Phase (Full SDD Mode)');
+ expect(result).toContain('Phase 1: Foundation');
+ expect(result).toContain('Phase 2: Core Implementation');
+ expect(result).toContain('Phase 3: Integration & Testing');
});
- it("should include the separator and Feature Request header", () => {
+ it('should include the separator and Feature Request header', () => {
const feature = {
- id: "test",
- planningMode: "spec" as const
+ id: 'test',
+ planningMode: 'spec' as const,
};
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("---");
- expect(result).toContain("## Feature Request");
+ expect(result).toContain('---');
+ expect(result).toContain('## Feature Request');
});
- it("should instruct agent to NOT output exploration text", () => {
- const modes = ["lite", "spec", "full"] as const;
+ it('should instruct agent to NOT output exploration text', () => {
+ const modes = ['lite', 'spec', 'full'] as const;
for (const mode of modes) {
- const feature = { id: "test", planningMode: mode };
+ const feature = { id: 'test', planningMode: mode };
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Do NOT output exploration text");
- expect(result).toContain("Start DIRECTLY");
+ expect(result).toContain('Do NOT output exploration text');
+ expect(result).toContain('Start DIRECTLY');
}
});
});
- describe("parseTasksFromSpec (via module)", () => {
+ describe('parseTasksFromSpec (via module)', () => {
// We need to test the module-level function
// Import it directly for testing
- it("should parse tasks from a valid tasks block", async () => {
+ it('should parse tasks from a valid tasks block', async () => {
// This tests the internal logic through integration
// The function is module-level, so we verify behavior through the service
const specContent = `
@@ -123,12 +123,12 @@ describe("auto-mode-service.ts - Planning Mode", () => {
`;
// Since parseTasksFromSpec is a module-level function,
// we verify its behavior indirectly through plan parsing
- expect(specContent).toContain("T001");
- expect(specContent).toContain("T002");
- expect(specContent).toContain("T003");
+ expect(specContent).toContain('T001');
+ expect(specContent).toContain('T002');
+ expect(specContent).toContain('T003');
});
- it("should handle tasks block with phases", () => {
+ it('should handle tasks block with phases', () => {
const specContent = `
\`\`\`tasks
## Phase 1: Setup
@@ -139,190 +139,191 @@ describe("auto-mode-service.ts - Planning Mode", () => {
- [ ] T003: Create main module | File: src/index.ts
\`\`\`
`;
- expect(specContent).toContain("Phase 1");
- expect(specContent).toContain("Phase 2");
- expect(specContent).toContain("T001");
- expect(specContent).toContain("T003");
+ expect(specContent).toContain('Phase 1');
+ expect(specContent).toContain('Phase 2');
+ expect(specContent).toContain('T001');
+ expect(specContent).toContain('T003');
});
});
- describe("plan approval flow", () => {
- it("should track pending approvals correctly", () => {
- expect(service.hasPendingApproval("test-feature")).toBe(false);
+ describe('plan approval flow', () => {
+ it('should track pending approvals correctly', () => {
+ expect(service.hasPendingApproval('test-feature')).toBe(false);
});
- it("should allow cancelling non-existent approval without error", () => {
- expect(() => service.cancelPlanApproval("non-existent")).not.toThrow();
+ it('should allow cancelling non-existent approval without error', () => {
+ expect(() => service.cancelPlanApproval('non-existent')).not.toThrow();
});
- it("should return running features count after stop", async () => {
+ it('should return running features count after stop', async () => {
const count = await service.stopAutoLoop();
expect(count).toBe(0);
});
});
- describe("resolvePlanApproval", () => {
- it("should return error when no pending approval exists", async () => {
+ describe('resolvePlanApproval', () => {
+ it('should return error when no pending approval exists', async () => {
const result = await service.resolvePlanApproval(
- "non-existent-feature",
+ 'non-existent-feature',
true,
undefined,
undefined,
undefined
);
expect(result.success).toBe(false);
- expect(result.error).toContain("No pending approval");
+ expect(result.error).toContain('No pending approval');
});
- it("should handle approval with edited plan", async () => {
+ it('should handle approval with edited plan', async () => {
// Without a pending approval, this should fail gracefully
const result = await service.resolvePlanApproval(
- "test-feature",
+ 'test-feature',
true,
- "Edited plan content",
+ 'Edited plan content',
undefined,
undefined
);
expect(result.success).toBe(false);
});
- it("should handle rejection with feedback", async () => {
+ it('should handle rejection with feedback', async () => {
const result = await service.resolvePlanApproval(
- "test-feature",
+ 'test-feature',
false,
undefined,
- "Please add more details",
+ 'Please add more details',
undefined
);
expect(result.success).toBe(false);
});
});
- describe("buildFeaturePrompt", () => {
+ describe('buildFeaturePrompt', () => {
const buildFeaturePrompt = (svc: any, feature: any) => {
return svc.buildFeaturePrompt(feature);
};
- it("should include feature ID and description", () => {
+ it('should include feature ID and description', () => {
const feature = {
- id: "feat-123",
- description: "Add user authentication",
+ id: 'feat-123',
+ description: 'Add user authentication',
};
const result = buildFeaturePrompt(service, feature);
- expect(result).toContain("feat-123");
- expect(result).toContain("Add user authentication");
+ expect(result).toContain('feat-123');
+ expect(result).toContain('Add user authentication');
});
- it("should include specification when present", () => {
+ it('should include specification when present', () => {
const feature = {
- id: "feat-123",
- description: "Test feature",
- spec: "Detailed specification here",
+ id: 'feat-123',
+ description: 'Test feature',
+ spec: 'Detailed specification here',
};
const result = buildFeaturePrompt(service, feature);
- expect(result).toContain("Specification:");
- expect(result).toContain("Detailed specification here");
+ expect(result).toContain('Specification:');
+ expect(result).toContain('Detailed specification here');
});
- it("should include image paths when present", () => {
+ it('should include image paths when present', () => {
const feature = {
- id: "feat-123",
- description: "Test feature",
+ id: 'feat-123',
+ description: 'Test feature',
imagePaths: [
- { path: "/tmp/image1.png", filename: "image1.png", mimeType: "image/png" },
- "/tmp/image2.jpg",
+ { path: '/tmp/image1.png', filename: 'image1.png', mimeType: 'image/png' },
+ '/tmp/image2.jpg',
],
};
const result = buildFeaturePrompt(service, feature);
- expect(result).toContain("Context Images Attached");
- expect(result).toContain("image1.png");
- expect(result).toContain("/tmp/image2.jpg");
+ expect(result).toContain('Context Images Attached');
+ expect(result).toContain('image1.png');
+ expect(result).toContain('/tmp/image2.jpg');
});
- it("should include summary tags instruction", () => {
+ it('should include summary tags instruction', () => {
const feature = {
- id: "feat-123",
- description: "Test feature",
+ id: 'feat-123',
+ description: 'Test feature',
};
const result = buildFeaturePrompt(service, feature);
- expect(result).toContain("");
- expect(result).toContain("");
+ expect(result).toContain('');
+ expect(result).toContain('');
});
});
- describe("extractTitleFromDescription", () => {
+ describe('extractTitleFromDescription', () => {
const extractTitle = (svc: any, description: string) => {
return svc.extractTitleFromDescription(description);
};
it("should return 'Untitled Feature' for empty description", () => {
- expect(extractTitle(service, "")).toBe("Untitled Feature");
- expect(extractTitle(service, " ")).toBe("Untitled Feature");
+ expect(extractTitle(service, '')).toBe('Untitled Feature');
+ expect(extractTitle(service, ' ')).toBe('Untitled Feature');
});
- it("should return first line if under 60 characters", () => {
- const description = "Add user login\nWith email validation";
- expect(extractTitle(service, description)).toBe("Add user login");
+ it('should return first line if under 60 characters', () => {
+ const description = 'Add user login\nWith email validation';
+ expect(extractTitle(service, description)).toBe('Add user login');
});
- it("should truncate long first lines to 60 characters", () => {
- const description = "This is a very long feature description that exceeds the sixty character limit significantly";
+ it('should truncate long first lines to 60 characters', () => {
+ const description =
+ 'This is a very long feature description that exceeds the sixty character limit significantly';
const result = extractTitle(service, description);
expect(result.length).toBe(60);
- expect(result).toContain("...");
+ expect(result).toContain('...');
});
});
- describe("PLANNING_PROMPTS structure", () => {
+ describe('PLANNING_PROMPTS structure', () => {
const getPlanningPromptPrefix = (svc: any, feature: any) => {
return svc.getPlanningPromptPrefix(feature);
};
- it("should have all required planning modes", () => {
- const modes = ["lite", "spec", "full"] as const;
+ it('should have all required planning modes', () => {
+ const modes = ['lite', 'spec', 'full'] as const;
for (const mode of modes) {
- const feature = { id: "test", planningMode: mode };
+ const feature = { id: 'test', planningMode: mode };
const result = getPlanningPromptPrefix(service, feature);
expect(result.length).toBeGreaterThan(100);
}
});
- it("lite prompt should include correct structure", () => {
- const feature = { id: "test", planningMode: "lite" as const };
+ it('lite prompt should include correct structure', () => {
+ const feature = { id: 'test', planningMode: 'lite' as const };
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Goal");
- expect(result).toContain("Approach");
- expect(result).toContain("Files to Touch");
- expect(result).toContain("Tasks");
- expect(result).toContain("Risks");
+ expect(result).toContain('Goal');
+ expect(result).toContain('Approach');
+ expect(result).toContain('Files to Touch');
+ expect(result).toContain('Tasks');
+ expect(result).toContain('Risks');
});
- it("spec prompt should include task format instructions", () => {
- const feature = { id: "test", planningMode: "spec" as const };
+ it('spec prompt should include task format instructions', () => {
+ const feature = { id: 'test', planningMode: 'spec' as const };
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Problem");
- expect(result).toContain("Solution");
- expect(result).toContain("Acceptance Criteria");
- expect(result).toContain("GIVEN-WHEN-THEN");
- expect(result).toContain("Implementation Tasks");
- expect(result).toContain("Verification");
+ expect(result).toContain('Problem');
+ expect(result).toContain('Solution');
+ expect(result).toContain('Acceptance Criteria');
+ expect(result).toContain('GIVEN-WHEN-THEN');
+ expect(result).toContain('Implementation Tasks');
+ expect(result).toContain('Verification');
});
- it("full prompt should include phases", () => {
- const feature = { id: "test", planningMode: "full" as const };
+ it('full prompt should include phases', () => {
+ const feature = { id: 'test', planningMode: 'full' as const };
const result = getPlanningPromptPrefix(service, feature);
- expect(result).toContain("Problem Statement");
- expect(result).toContain("User Story");
- expect(result).toContain("Technical Context");
- expect(result).toContain("Non-Goals");
- expect(result).toContain("Phase 1");
- expect(result).toContain("Phase 2");
- expect(result).toContain("Phase 3");
+ expect(result).toContain('Problem Statement');
+ expect(result).toContain('User Story');
+ expect(result).toContain('Technical Context');
+ expect(result).toContain('Non-Goals');
+ expect(result).toContain('Phase 1');
+ expect(result).toContain('Phase 2');
+ expect(result).toContain('Phase 3');
});
});
- describe("status management", () => {
- it("should report correct status", () => {
+ describe('status management', () => {
+ it('should report correct status', () => {
const status = service.getStatus();
expect(status.runningFeatures).toEqual([]);
expect(status.isRunning).toBe(false);
diff --git a/apps/server/tests/unit/services/auto-mode-service.test.ts b/apps/server/tests/unit/services/auto-mode-service.test.ts
index f108a638e..ec0959d73 100644
--- a/apps/server/tests/unit/services/auto-mode-service.test.ts
+++ b/apps/server/tests/unit/services/auto-mode-service.test.ts
@@ -1,7 +1,7 @@
-import { describe, it, expect, vi, beforeEach } from "vitest";
-import { AutoModeService } from "@/services/auto-mode-service.js";
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import { AutoModeService } from '@/services/auto-mode-service.js';
-describe("auto-mode-service.ts", () => {
+describe('auto-mode-service.ts', () => {
let service: AutoModeService;
const mockEvents = {
subscribe: vi.fn(),
@@ -13,29 +13,27 @@ describe("auto-mode-service.ts", () => {
service = new AutoModeService(mockEvents as any);
});
- describe("constructor", () => {
- it("should initialize with event emitter", () => {
+ describe('constructor', () => {
+ it('should initialize with event emitter', () => {
expect(service).toBeDefined();
});
});
- describe("startAutoLoop", () => {
- it("should throw if auto mode is already running", async () => {
+ describe('startAutoLoop', () => {
+ it('should throw if auto mode is already running', async () => {
// Start first loop
- const promise1 = service.startAutoLoop("/test/project", 3);
+ const promise1 = service.startAutoLoop('/test/project', 3);
// Try to start second loop
- await expect(
- service.startAutoLoop("/test/project", 3)
- ).rejects.toThrow("already running");
+ await expect(service.startAutoLoop('/test/project', 3)).rejects.toThrow('already running');
// Cleanup
await service.stopAutoLoop();
await promise1.catch(() => {});
});
- it("should emit auto mode start event", async () => {
- const promise = service.startAutoLoop("/test/project", 3);
+ it('should emit auto mode start event', async () => {
+ const promise = service.startAutoLoop('/test/project', 3);
// Give it time to emit the event
await new Promise((resolve) => setTimeout(resolve, 10));
@@ -43,7 +41,7 @@ describe("auto-mode-service.ts", () => {
expect(mockEvents.emit).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
- message: expect.stringContaining("Auto mode started"),
+ message: expect.stringContaining('Auto mode started'),
})
);
@@ -53,9 +51,9 @@ describe("auto-mode-service.ts", () => {
});
});
- describe("stopAutoLoop", () => {
- it("should stop the auto loop", async () => {
- const promise = service.startAutoLoop("/test/project", 3);
+ describe('stopAutoLoop', () => {
+ it('should stop the auto loop', async () => {
+ const promise = service.startAutoLoop('/test/project', 3);
const runningCount = await service.stopAutoLoop();
@@ -63,7 +61,7 @@ describe("auto-mode-service.ts", () => {
await promise.catch(() => {});
});
- it("should return 0 when not running", async () => {
+ it('should return 0 when not running', async () => {
const runningCount = await service.stopAutoLoop();
expect(runningCount).toBe(0);
});
diff --git a/apps/server/tests/unit/services/auto-mode-task-parsing.test.ts b/apps/server/tests/unit/services/auto-mode-task-parsing.test.ts
index becdd3090..984e38c59 100644
--- a/apps/server/tests/unit/services/auto-mode-task-parsing.test.ts
+++ b/apps/server/tests/unit/services/auto-mode-task-parsing.test.ts
@@ -1,4 +1,4 @@
-import { describe, it, expect } from "vitest";
+import { describe, it, expect } from 'vitest';
/**
* Test the task parsing logic by reimplementing the parsing functions
@@ -88,59 +88,59 @@ function parseTasksFromSpec(specContent: string): ParsedTask[] {
return tasks;
}
-describe("Task Parsing", () => {
- describe("parseTaskLine", () => {
- it("should parse task with file path", () => {
- const line = "- [ ] T001: Create user model | File: src/models/user.ts";
+describe('Task Parsing', () => {
+ describe('parseTaskLine', () => {
+ it('should parse task with file path', () => {
+ const line = '- [ ] T001: Create user model | File: src/models/user.ts';
const result = parseTaskLine(line);
expect(result).toEqual({
- id: "T001",
- description: "Create user model",
- filePath: "src/models/user.ts",
+ id: 'T001',
+ description: 'Create user model',
+ filePath: 'src/models/user.ts',
phase: undefined,
- status: "pending",
+ status: 'pending',
});
});
- it("should parse task without file path", () => {
- const line = "- [ ] T002: Setup database connection";
+ it('should parse task without file path', () => {
+ const line = '- [ ] T002: Setup database connection';
const result = parseTaskLine(line);
expect(result).toEqual({
- id: "T002",
- description: "Setup database connection",
+ id: 'T002',
+ description: 'Setup database connection',
phase: undefined,
- status: "pending",
+ status: 'pending',
});
});
- it("should include phase when provided", () => {
- const line = "- [ ] T003: Write tests | File: tests/user.test.ts";
- const result = parseTaskLine(line, "Phase 1: Foundation");
- expect(result?.phase).toBe("Phase 1: Foundation");
+ it('should include phase when provided', () => {
+ const line = '- [ ] T003: Write tests | File: tests/user.test.ts';
+ const result = parseTaskLine(line, 'Phase 1: Foundation');
+ expect(result?.phase).toBe('Phase 1: Foundation');
});
- it("should return null for invalid line", () => {
- expect(parseTaskLine("- [ ] Invalid format")).toBeNull();
- expect(parseTaskLine("Not a task line")).toBeNull();
- expect(parseTaskLine("")).toBeNull();
+ it('should return null for invalid line', () => {
+ expect(parseTaskLine('- [ ] Invalid format')).toBeNull();
+ expect(parseTaskLine('Not a task line')).toBeNull();
+ expect(parseTaskLine('')).toBeNull();
});
- it("should handle multi-word descriptions", () => {
- const line = "- [ ] T004: Implement user authentication with JWT tokens | File: src/auth.ts";
+ it('should handle multi-word descriptions', () => {
+ const line = '- [ ] T004: Implement user authentication with JWT tokens | File: src/auth.ts';
const result = parseTaskLine(line);
- expect(result?.description).toBe("Implement user authentication with JWT tokens");
+ expect(result?.description).toBe('Implement user authentication with JWT tokens');
});
- it("should trim whitespace from description and file path", () => {
- const line = "- [ ] T005: Create API endpoint | File: src/routes/api.ts ";
+ it('should trim whitespace from description and file path', () => {
+ const line = '- [ ] T005: Create API endpoint | File: src/routes/api.ts ';
const result = parseTaskLine(line);
- expect(result?.description).toBe("Create API endpoint");
- expect(result?.filePath).toBe("src/routes/api.ts");
+ expect(result?.description).toBe('Create API endpoint');
+ expect(result?.filePath).toBe('src/routes/api.ts');
});
});
- describe("parseTasksFromSpec", () => {
- it("should parse tasks from a tasks code block", () => {
+ describe('parseTasksFromSpec', () => {
+ it('should parse tasks from a tasks code block', () => {
const specContent = `
## Specification
@@ -157,12 +157,12 @@ Some notes here.
`;
const tasks = parseTasksFromSpec(specContent);
expect(tasks).toHaveLength(3);
- expect(tasks[0].id).toBe("T001");
- expect(tasks[1].id).toBe("T002");
- expect(tasks[2].id).toBe("T003");
+ expect(tasks[0].id).toBe('T001');
+ expect(tasks[1].id).toBe('T002');
+ expect(tasks[2].id).toBe('T003');
});
- it("should parse tasks with phases", () => {
+ it('should parse tasks with phases', () => {
const specContent = `
\`\`\`tasks
## Phase 1: Foundation
@@ -179,20 +179,20 @@ Some notes here.
`;
const tasks = parseTasksFromSpec(specContent);
expect(tasks).toHaveLength(5);
- expect(tasks[0].phase).toBe("Phase 1: Foundation");
- expect(tasks[1].phase).toBe("Phase 1: Foundation");
- expect(tasks[2].phase).toBe("Phase 2: Implementation");
- expect(tasks[3].phase).toBe("Phase 2: Implementation");
- expect(tasks[4].phase).toBe("Phase 3: Testing");
+ expect(tasks[0].phase).toBe('Phase 1: Foundation');
+ expect(tasks[1].phase).toBe('Phase 1: Foundation');
+ expect(tasks[2].phase).toBe('Phase 2: Implementation');
+ expect(tasks[3].phase).toBe('Phase 2: Implementation');
+ expect(tasks[4].phase).toBe('Phase 3: Testing');
});
- it("should return empty array for content without tasks", () => {
- const specContent = "Just some text without any tasks";
+ it('should return empty array for content without tasks', () => {
+ const specContent = 'Just some text without any tasks';
const tasks = parseTasksFromSpec(specContent);
expect(tasks).toEqual([]);
});
- it("should fallback to finding task lines outside code block", () => {
+ it('should fallback to finding task lines outside code block', () => {
const specContent = `
## Implementation Plan
@@ -201,11 +201,11 @@ Some notes here.
`;
const tasks = parseTasksFromSpec(specContent);
expect(tasks).toHaveLength(2);
- expect(tasks[0].id).toBe("T001");
- expect(tasks[1].id).toBe("T002");
+ expect(tasks[0].id).toBe('T001');
+ expect(tasks[1].id).toBe('T002');
});
- it("should handle empty tasks block", () => {
+ it('should handle empty tasks block', () => {
const specContent = `
\`\`\`tasks
\`\`\`
@@ -214,7 +214,7 @@ Some notes here.
expect(tasks).toEqual([]);
});
- it("should handle mixed valid and invalid lines", () => {
+ it('should handle mixed valid and invalid lines', () => {
const specContent = `
\`\`\`tasks
- [ ] T001: Valid task | File: src/valid.ts
@@ -227,7 +227,7 @@ Some other text
expect(tasks).toHaveLength(2);
});
- it("should preserve task order", () => {
+ it('should preserve task order', () => {
const specContent = `
\`\`\`tasks
- [ ] T003: Third
@@ -236,12 +236,12 @@ Some other text
\`\`\`
`;
const tasks = parseTasksFromSpec(specContent);
- expect(tasks[0].id).toBe("T003");
- expect(tasks[1].id).toBe("T001");
- expect(tasks[2].id).toBe("T002");
+ expect(tasks[0].id).toBe('T003');
+ expect(tasks[1].id).toBe('T001');
+ expect(tasks[2].id).toBe('T002');
});
- it("should handle task IDs with different numbers", () => {
+ it('should handle task IDs with different numbers', () => {
const specContent = `
\`\`\`tasks
- [ ] T001: First
@@ -251,14 +251,14 @@ Some other text
`;
const tasks = parseTasksFromSpec(specContent);
expect(tasks).toHaveLength(3);
- expect(tasks[0].id).toBe("T001");
- expect(tasks[1].id).toBe("T010");
- expect(tasks[2].id).toBe("T100");
+ expect(tasks[0].id).toBe('T001');
+ expect(tasks[1].id).toBe('T010');
+ expect(tasks[2].id).toBe('T100');
});
});
- describe("spec content generation patterns", () => {
- it("should match the expected lite mode output format", () => {
+ describe('spec content generation patterns', () => {
+ it('should match the expected lite mode output format', () => {
const liteModeOutput = `
1. **Goal**: Implement user registration
2. **Approach**: Create form component, add validation, connect to API
@@ -271,12 +271,12 @@ Some other text
[PLAN_GENERATED] Planning outline complete.
`;
- expect(liteModeOutput).toContain("[PLAN_GENERATED]");
- expect(liteModeOutput).toContain("Goal");
- expect(liteModeOutput).toContain("Approach");
+ expect(liteModeOutput).toContain('[PLAN_GENERATED]');
+ expect(liteModeOutput).toContain('Goal');
+ expect(liteModeOutput).toContain('Approach');
});
- it("should match the expected spec mode output format", () => {
+ it('should match the expected spec mode output format', () => {
const specModeOutput = `
1. **Problem**: Users cannot register for accounts
@@ -300,12 +300,12 @@ Some other text
[SPEC_GENERATED] Please review the specification above.
`;
- expect(specModeOutput).toContain("[SPEC_GENERATED]");
- expect(specModeOutput).toContain("```tasks");
- expect(specModeOutput).toContain("T001");
+ expect(specModeOutput).toContain('[SPEC_GENERATED]');
+ expect(specModeOutput).toContain('```tasks');
+ expect(specModeOutput).toContain('T001');
});
- it("should match the expected full mode output format", () => {
+ it('should match the expected full mode output format', () => {
const fullModeOutput = `
1. **Problem Statement**: Users need ability to create accounts
@@ -336,10 +336,10 @@ Some other text
[SPEC_GENERATED] Please review the comprehensive specification above.
`;
- expect(fullModeOutput).toContain("Phase 1");
- expect(fullModeOutput).toContain("Phase 2");
- expect(fullModeOutput).toContain("Phase 3");
- expect(fullModeOutput).toContain("[SPEC_GENERATED]");
+ expect(fullModeOutput).toContain('Phase 1');
+ expect(fullModeOutput).toContain('Phase 2');
+ expect(fullModeOutput).toContain('Phase 3');
+ expect(fullModeOutput).toContain('[SPEC_GENERATED]');
});
});
});
diff --git a/apps/server/tests/unit/services/feature-loader.test.ts b/apps/server/tests/unit/services/feature-loader.test.ts
index 2a10ddf1e..f5f54e81a 100644
--- a/apps/server/tests/unit/services/feature-loader.test.ts
+++ b/apps/server/tests/unit/services/feature-loader.test.ts
@@ -1,66 +1,66 @@
-import { describe, it, expect, vi, beforeEach } from "vitest";
-import { FeatureLoader } from "@/services/feature-loader.js";
-import * as fs from "fs/promises";
-import path from "path";
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import { FeatureLoader } from '@/services/feature-loader.js';
+import * as fs from 'fs/promises';
+import path from 'path';
-vi.mock("fs/promises");
+vi.mock('fs/promises');
-describe("feature-loader.ts", () => {
+describe('feature-loader.ts', () => {
let loader: FeatureLoader;
- const testProjectPath = "/test/project";
+ const testProjectPath = '/test/project';
beforeEach(() => {
vi.clearAllMocks();
loader = new FeatureLoader();
});
- describe("getFeaturesDir", () => {
- it("should return features directory path", () => {
+ describe('getFeaturesDir', () => {
+ it('should return features directory path', () => {
const result = loader.getFeaturesDir(testProjectPath);
- expect(result).toContain("test");
- expect(result).toContain("project");
- expect(result).toContain(".automaker");
- expect(result).toContain("features");
+ expect(result).toContain('test');
+ expect(result).toContain('project');
+ expect(result).toContain('.automaker');
+ expect(result).toContain('features');
});
});
- describe("getFeatureImagesDir", () => {
- it("should return feature images directory path", () => {
- const result = loader.getFeatureImagesDir(testProjectPath, "feature-123");
- expect(result).toContain("features");
- expect(result).toContain("feature-123");
- expect(result).toContain("images");
+ describe('getFeatureImagesDir', () => {
+ it('should return feature images directory path', () => {
+ const result = loader.getFeatureImagesDir(testProjectPath, 'feature-123');
+ expect(result).toContain('features');
+ expect(result).toContain('feature-123');
+ expect(result).toContain('images');
});
});
- describe("getFeatureDir", () => {
- it("should return feature directory path", () => {
- const result = loader.getFeatureDir(testProjectPath, "feature-123");
- expect(result).toContain("features");
- expect(result).toContain("feature-123");
+ describe('getFeatureDir', () => {
+ it('should return feature directory path', () => {
+ const result = loader.getFeatureDir(testProjectPath, 'feature-123');
+ expect(result).toContain('features');
+ expect(result).toContain('feature-123');
});
});
- describe("getFeatureJsonPath", () => {
- it("should return feature.json path", () => {
- const result = loader.getFeatureJsonPath(testProjectPath, "feature-123");
- expect(result).toContain("features");
- expect(result).toContain("feature-123");
- expect(result).toContain("feature.json");
+ describe('getFeatureJsonPath', () => {
+ it('should return feature.json path', () => {
+ const result = loader.getFeatureJsonPath(testProjectPath, 'feature-123');
+ expect(result).toContain('features');
+ expect(result).toContain('feature-123');
+ expect(result).toContain('feature.json');
});
});
- describe("getAgentOutputPath", () => {
- it("should return agent-output.md path", () => {
- const result = loader.getAgentOutputPath(testProjectPath, "feature-123");
- expect(result).toContain("features");
- expect(result).toContain("feature-123");
- expect(result).toContain("agent-output.md");
+ describe('getAgentOutputPath', () => {
+ it('should return agent-output.md path', () => {
+ const result = loader.getAgentOutputPath(testProjectPath, 'feature-123');
+ expect(result).toContain('features');
+ expect(result).toContain('feature-123');
+ expect(result).toContain('agent-output.md');
});
});
- describe("generateFeatureId", () => {
- it("should generate unique feature ID with timestamp", () => {
+ describe('generateFeatureId', () => {
+ it('should generate unique feature ID with timestamp', () => {
const id1 = loader.generateFeatureId();
const id2 = loader.generateFeatureId();
@@ -75,381 +75,371 @@ describe("feature-loader.ts", () => {
});
});
- describe("getAll", () => {
+ describe('getAll', () => {
it("should return empty array when features directory doesn't exist", async () => {
- vi.mocked(fs.access).mockRejectedValue(new Error("ENOENT"));
+ vi.mocked(fs.access).mockRejectedValue(new Error('ENOENT'));
const result = await loader.getAll(testProjectPath);
expect(result).toEqual([]);
});
- it("should load all features from feature directories", async () => {
+ it('should load all features from feature directories', async () => {
vi.mocked(fs.access).mockResolvedValue(undefined);
vi.mocked(fs.readdir).mockResolvedValue([
- { name: "feature-1", isDirectory: () => true } as any,
- { name: "feature-2", isDirectory: () => true } as any,
- { name: "file.txt", isDirectory: () => false } as any,
+ { name: 'feature-1', isDirectory: () => true } as any,
+ { name: 'feature-2', isDirectory: () => true } as any,
+ { name: 'file.txt', isDirectory: () => false } as any,
]);
vi.mocked(fs.readFile)
.mockResolvedValueOnce(
JSON.stringify({
- id: "feature-1",
- category: "ui",
- description: "Feature 1",
+ id: 'feature-1',
+ category: 'ui',
+ description: 'Feature 1',
})
)
.mockResolvedValueOnce(
JSON.stringify({
- id: "feature-2",
- category: "backend",
- description: "Feature 2",
+ id: 'feature-2',
+ category: 'backend',
+ description: 'Feature 2',
})
);
const result = await loader.getAll(testProjectPath);
expect(result).toHaveLength(2);
- expect(result[0].id).toBe("feature-1");
- expect(result[1].id).toBe("feature-2");
+ expect(result[0].id).toBe('feature-1');
+ expect(result[1].id).toBe('feature-2');
});
- it("should skip features without id field", async () => {
+ it('should skip features without id field', async () => {
vi.mocked(fs.access).mockResolvedValue(undefined);
vi.mocked(fs.readdir).mockResolvedValue([
- { name: "feature-1", isDirectory: () => true } as any,
- { name: "feature-2", isDirectory: () => true } as any,
+ { name: 'feature-1', isDirectory: () => true } as any,
+ { name: 'feature-2', isDirectory: () => true } as any,
]);
- const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
+ const consoleSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
vi.mocked(fs.readFile)
.mockResolvedValueOnce(
JSON.stringify({
- category: "ui",
- description: "Missing ID",
+ category: 'ui',
+ description: 'Missing ID',
})
)
.mockResolvedValueOnce(
JSON.stringify({
- id: "feature-2",
- category: "backend",
- description: "Feature 2",
+ id: 'feature-2',
+ category: 'backend',
+ description: 'Feature 2',
})
);
const result = await loader.getAll(testProjectPath);
expect(result).toHaveLength(1);
- expect(result[0].id).toBe("feature-2");
+ expect(result[0].id).toBe('feature-2');
expect(consoleSpy).toHaveBeenCalledWith(
- "[FeatureLoader]",
+ '[FeatureLoader]',
expect.stringContaining("missing required 'id' field")
);
consoleSpy.mockRestore();
});
- it("should skip features with missing feature.json", async () => {
+ it('should skip features with missing feature.json', async () => {
vi.mocked(fs.access).mockResolvedValue(undefined);
vi.mocked(fs.readdir).mockResolvedValue([
- { name: "feature-1", isDirectory: () => true } as any,
- { name: "feature-2", isDirectory: () => true } as any,
+ { name: 'feature-1', isDirectory: () => true } as any,
+ { name: 'feature-2', isDirectory: () => true } as any,
]);
- const error: any = new Error("File not found");
- error.code = "ENOENT";
+ const error: any = new Error('File not found');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile)
.mockRejectedValueOnce(error)
.mockResolvedValueOnce(
JSON.stringify({
- id: "feature-2",
- category: "backend",
- description: "Feature 2",
+ id: 'feature-2',
+ category: 'backend',
+ description: 'Feature 2',
})
);
const result = await loader.getAll(testProjectPath);
expect(result).toHaveLength(1);
- expect(result[0].id).toBe("feature-2");
+ expect(result[0].id).toBe('feature-2');
});
- it("should handle malformed JSON gracefully", async () => {
+ it('should handle malformed JSON gracefully', async () => {
vi.mocked(fs.access).mockResolvedValue(undefined);
vi.mocked(fs.readdir).mockResolvedValue([
- { name: "feature-1", isDirectory: () => true } as any,
+ { name: 'feature-1', isDirectory: () => true } as any,
]);
- const consoleSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
+ const consoleSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
- vi.mocked(fs.readFile).mockResolvedValue("invalid json{");
+ vi.mocked(fs.readFile).mockResolvedValue('invalid json{');
const result = await loader.getAll(testProjectPath);
expect(result).toEqual([]);
expect(consoleSpy).toHaveBeenCalledWith(
- "[FeatureLoader]",
- expect.stringContaining("Failed to parse feature.json")
+ '[FeatureLoader]',
+ expect.stringContaining('Failed to parse feature.json')
);
consoleSpy.mockRestore();
});
- it("should sort features by creation order (timestamp)", async () => {
+ it('should sort features by creation order (timestamp)', async () => {
vi.mocked(fs.access).mockResolvedValue(undefined);
vi.mocked(fs.readdir).mockResolvedValue([
- { name: "feature-3", isDirectory: () => true } as any,
- { name: "feature-1", isDirectory: () => true } as any,
- { name: "feature-2", isDirectory: () => true } as any,
+ { name: 'feature-3', isDirectory: () => true } as any,
+ { name: 'feature-1', isDirectory: () => true } as any,
+ { name: 'feature-2', isDirectory: () => true } as any,
]);
vi.mocked(fs.readFile)
.mockResolvedValueOnce(
JSON.stringify({
- id: "feature-3000-xyz",
- category: "ui",
+ id: 'feature-3000-xyz',
+ category: 'ui',
})
)
.mockResolvedValueOnce(
JSON.stringify({
- id: "feature-1000-abc",
- category: "ui",
+ id: 'feature-1000-abc',
+ category: 'ui',
})
)
.mockResolvedValueOnce(
JSON.stringify({
- id: "feature-2000-def",
- category: "ui",
+ id: 'feature-2000-def',
+ category: 'ui',
})
);
const result = await loader.getAll(testProjectPath);
expect(result).toHaveLength(3);
- expect(result[0].id).toBe("feature-1000-abc");
- expect(result[1].id).toBe("feature-2000-def");
- expect(result[2].id).toBe("feature-3000-xyz");
+ expect(result[0].id).toBe('feature-1000-abc');
+ expect(result[1].id).toBe('feature-2000-def');
+ expect(result[2].id).toBe('feature-3000-xyz');
});
});
- describe("get", () => {
- it("should return feature by ID", async () => {
+ describe('get', () => {
+ it('should return feature by ID', async () => {
const featureData = {
- id: "feature-123",
- category: "ui",
- description: "Test feature",
+ id: 'feature-123',
+ category: 'ui',
+ description: 'Test feature',
};
vi.mocked(fs.readFile).mockResolvedValue(JSON.stringify(featureData));
- const result = await loader.get(testProjectPath, "feature-123");
+ const result = await loader.get(testProjectPath, 'feature-123');
expect(result).toEqual(featureData);
});
it("should return null when feature doesn't exist", async () => {
- const error: any = new Error("File not found");
- error.code = "ENOENT";
+ const error: any = new Error('File not found');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
- const result = await loader.get(testProjectPath, "feature-123");
+ const result = await loader.get(testProjectPath, 'feature-123');
expect(result).toBeNull();
});
- it("should throw on other errors", async () => {
- vi.mocked(fs.readFile).mockRejectedValue(new Error("Permission denied"));
+ it('should throw on other errors', async () => {
+ vi.mocked(fs.readFile).mockRejectedValue(new Error('Permission denied'));
- await expect(
- loader.get(testProjectPath, "feature-123")
- ).rejects.toThrow("Permission denied");
+ await expect(loader.get(testProjectPath, 'feature-123')).rejects.toThrow('Permission denied');
});
});
- describe("create", () => {
- it("should create new feature", async () => {
+ describe('create', () => {
+ it('should create new feature', async () => {
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
const featureData = {
- category: "ui",
- description: "New feature",
+ category: 'ui',
+ description: 'New feature',
};
const result = await loader.create(testProjectPath, featureData);
expect(result).toMatchObject({
- category: "ui",
- description: "New feature",
+ category: 'ui',
+ description: 'New feature',
id: expect.stringMatching(/^feature-/),
});
expect(fs.writeFile).toHaveBeenCalled();
});
- it("should use provided ID if given", async () => {
+ it('should use provided ID if given', async () => {
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
const result = await loader.create(testProjectPath, {
- id: "custom-id",
- category: "ui",
- description: "Test",
+ id: 'custom-id',
+ category: 'ui',
+ description: 'Test',
});
- expect(result.id).toBe("custom-id");
+ expect(result.id).toBe('custom-id');
});
- it("should set default category if not provided", async () => {
+ it('should set default category if not provided', async () => {
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
const result = await loader.create(testProjectPath, {
- description: "Test",
+ description: 'Test',
});
- expect(result.category).toBe("Uncategorized");
+ expect(result.category).toBe('Uncategorized');
});
});
- describe("update", () => {
- it("should update existing feature", async () => {
+ describe('update', () => {
+ it('should update existing feature', async () => {
vi.mocked(fs.readFile).mockResolvedValue(
JSON.stringify({
- id: "feature-123",
- category: "ui",
- description: "Old description",
+ id: 'feature-123',
+ category: 'ui',
+ description: 'Old description',
})
);
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
- const result = await loader.update(testProjectPath, "feature-123", {
- description: "New description",
+ const result = await loader.update(testProjectPath, 'feature-123', {
+ description: 'New description',
});
- expect(result.description).toBe("New description");
- expect(result.category).toBe("ui");
+ expect(result.description).toBe('New description');
+ expect(result.category).toBe('ui');
expect(fs.writeFile).toHaveBeenCalled();
});
it("should throw if feature doesn't exist", async () => {
- const error: any = new Error("File not found");
- error.code = "ENOENT";
+ const error: any = new Error('File not found');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
- await expect(
- loader.update(testProjectPath, "feature-123", {})
- ).rejects.toThrow("not found");
+ await expect(loader.update(testProjectPath, 'feature-123', {})).rejects.toThrow('not found');
});
});
- describe("delete", () => {
- it("should delete feature directory", async () => {
+ describe('delete', () => {
+ it('should delete feature directory', async () => {
vi.mocked(fs.rm).mockResolvedValue(undefined);
- const result = await loader.delete(testProjectPath, "feature-123");
+ const result = await loader.delete(testProjectPath, 'feature-123');
expect(result).toBe(true);
- expect(fs.rm).toHaveBeenCalledWith(
- expect.stringContaining("feature-123"),
- { recursive: true, force: true }
- );
+ expect(fs.rm).toHaveBeenCalledWith(expect.stringContaining('feature-123'), {
+ recursive: true,
+ force: true,
+ });
});
- it("should return false on error", async () => {
- vi.mocked(fs.rm).mockRejectedValue(new Error("Permission denied"));
+ it('should return false on error', async () => {
+ vi.mocked(fs.rm).mockRejectedValue(new Error('Permission denied'));
- const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
+ const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
- const result = await loader.delete(testProjectPath, "feature-123");
+ const result = await loader.delete(testProjectPath, 'feature-123');
expect(result).toBe(false);
expect(consoleSpy).toHaveBeenCalledWith(
- "[FeatureLoader]",
- expect.stringContaining("Failed to delete feature"),
- expect.objectContaining({ message: "Permission denied" })
+ '[FeatureLoader]',
+ expect.stringContaining('Failed to delete feature'),
+ expect.objectContaining({ message: 'Permission denied' })
);
consoleSpy.mockRestore();
});
});
- describe("getAgentOutput", () => {
- it("should return agent output content", async () => {
- vi.mocked(fs.readFile).mockResolvedValue("Agent output content");
+ describe('getAgentOutput', () => {
+ it('should return agent output content', async () => {
+ vi.mocked(fs.readFile).mockResolvedValue('Agent output content');
- const result = await loader.getAgentOutput(testProjectPath, "feature-123");
+ const result = await loader.getAgentOutput(testProjectPath, 'feature-123');
- expect(result).toBe("Agent output content");
+ expect(result).toBe('Agent output content');
});
it("should return null when file doesn't exist", async () => {
- const error: any = new Error("File not found");
- error.code = "ENOENT";
+ const error: any = new Error('File not found');
+ error.code = 'ENOENT';
vi.mocked(fs.readFile).mockRejectedValue(error);
- const result = await loader.getAgentOutput(testProjectPath, "feature-123");
+ const result = await loader.getAgentOutput(testProjectPath, 'feature-123');
expect(result).toBeNull();
});
- it("should throw on other errors", async () => {
- vi.mocked(fs.readFile).mockRejectedValue(new Error("Permission denied"));
+ it('should throw on other errors', async () => {
+ vi.mocked(fs.readFile).mockRejectedValue(new Error('Permission denied'));
- await expect(
- loader.getAgentOutput(testProjectPath, "feature-123")
- ).rejects.toThrow("Permission denied");
+ await expect(loader.getAgentOutput(testProjectPath, 'feature-123')).rejects.toThrow(
+ 'Permission denied'
+ );
});
});
- describe("saveAgentOutput", () => {
- it("should save agent output to file", async () => {
+ describe('saveAgentOutput', () => {
+ it('should save agent output to file', async () => {
vi.mocked(fs.mkdir).mockResolvedValue(undefined);
vi.mocked(fs.writeFile).mockResolvedValue(undefined);
- await loader.saveAgentOutput(
- testProjectPath,
- "feature-123",
- "Output content"
- );
+ await loader.saveAgentOutput(testProjectPath, 'feature-123', 'Output content');
expect(fs.writeFile).toHaveBeenCalledWith(
- expect.stringContaining("agent-output.md"),
- "Output content",
- "utf-8"
+ expect.stringContaining('agent-output.md'),
+ 'Output content',
+ 'utf-8'
);
});
});
- describe("deleteAgentOutput", () => {
- it("should delete agent output file", async () => {
+ describe('deleteAgentOutput', () => {
+ it('should delete agent output file', async () => {
vi.mocked(fs.unlink).mockResolvedValue(undefined);
- await loader.deleteAgentOutput(testProjectPath, "feature-123");
+ await loader.deleteAgentOutput(testProjectPath, 'feature-123');
- expect(fs.unlink).toHaveBeenCalledWith(
- expect.stringContaining("agent-output.md")
- );
+ expect(fs.unlink).toHaveBeenCalledWith(expect.stringContaining('agent-output.md'));
});
- it("should handle missing file gracefully", async () => {
- const error: any = new Error("File not found");
- error.code = "ENOENT";
+ it('should handle missing file gracefully', async () => {
+ const error: any = new Error('File not found');
+ error.code = 'ENOENT';
vi.mocked(fs.unlink).mockRejectedValue(error);
// Should not throw
await expect(
- loader.deleteAgentOutput(testProjectPath, "feature-123")
+ loader.deleteAgentOutput(testProjectPath, 'feature-123')
).resolves.toBeUndefined();
});
- it("should throw on other errors", async () => {
- vi.mocked(fs.unlink).mockRejectedValue(new Error("Permission denied"));
+ it('should throw on other errors', async () => {
+ vi.mocked(fs.unlink).mockRejectedValue(new Error('Permission denied'));
- await expect(
- loader.deleteAgentOutput(testProjectPath, "feature-123")
- ).rejects.toThrow("Permission denied");
+ await expect(loader.deleteAgentOutput(testProjectPath, 'feature-123')).rejects.toThrow(
+ 'Permission denied'
+ );
});
});
});
diff --git a/apps/server/tests/unit/services/settings-service.test.ts b/apps/server/tests/unit/services/settings-service.test.ts
index ecde0fb96..235387bfa 100644
--- a/apps/server/tests/unit/services/settings-service.test.ts
+++ b/apps/server/tests/unit/services/settings-service.test.ts
@@ -1,8 +1,8 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import fs from "fs/promises";
-import path from "path";
-import os from "os";
-import { SettingsService } from "@/services/settings-service.js";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import fs from 'fs/promises';
+import path from 'path';
+import os from 'os';
+import { SettingsService } from '@/services/settings-service.js';
import {
DEFAULT_GLOBAL_SETTINGS,
DEFAULT_CREDENTIALS,
@@ -13,9 +13,9 @@ import {
type GlobalSettings,
type Credentials,
type ProjectSettings,
-} from "@/types/settings.js";
+} from '@/types/settings.js';
-describe("settings-service.ts", () => {
+describe('settings-service.ts', () => {
let testDataDir: string;
let testProjectDir: string;
let settingsService: SettingsService;
@@ -37,120 +37,118 @@ describe("settings-service.ts", () => {
}
});
- describe("getGlobalSettings", () => {
- it("should return default settings when file does not exist", async () => {
+ describe('getGlobalSettings', () => {
+ it('should return default settings when file does not exist', async () => {
const settings = await settingsService.getGlobalSettings();
expect(settings).toEqual(DEFAULT_GLOBAL_SETTINGS);
});
- it("should read and return existing settings", async () => {
+ it('should read and return existing settings', async () => {
const customSettings: GlobalSettings = {
...DEFAULT_GLOBAL_SETTINGS,
- theme: "light",
+ theme: 'light',
sidebarOpen: false,
maxConcurrency: 5,
};
- const settingsPath = path.join(testDataDir, "settings.json");
+ const settingsPath = path.join(testDataDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(customSettings, null, 2));
const settings = await settingsService.getGlobalSettings();
- expect(settings.theme).toBe("light");
+ expect(settings.theme).toBe('light');
expect(settings.sidebarOpen).toBe(false);
expect(settings.maxConcurrency).toBe(5);
});
- it("should merge with defaults for missing properties", async () => {
+ it('should merge with defaults for missing properties', async () => {
const partialSettings = {
version: SETTINGS_VERSION,
- theme: "dark",
+ theme: 'dark',
};
- const settingsPath = path.join(testDataDir, "settings.json");
+ const settingsPath = path.join(testDataDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(partialSettings, null, 2));
const settings = await settingsService.getGlobalSettings();
- expect(settings.theme).toBe("dark");
+ expect(settings.theme).toBe('dark');
expect(settings.sidebarOpen).toBe(DEFAULT_GLOBAL_SETTINGS.sidebarOpen);
expect(settings.maxConcurrency).toBe(DEFAULT_GLOBAL_SETTINGS.maxConcurrency);
});
- it("should merge keyboard shortcuts deeply", async () => {
+ it('should merge keyboard shortcuts deeply', async () => {
const customSettings: GlobalSettings = {
...DEFAULT_GLOBAL_SETTINGS,
keyboardShortcuts: {
...DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts,
- board: "B",
+ board: 'B',
},
};
- const settingsPath = path.join(testDataDir, "settings.json");
+ const settingsPath = path.join(testDataDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(customSettings, null, 2));
const settings = await settingsService.getGlobalSettings();
- expect(settings.keyboardShortcuts.board).toBe("B");
+ expect(settings.keyboardShortcuts.board).toBe('B');
expect(settings.keyboardShortcuts.agent).toBe(
DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts.agent
);
});
});
- describe("updateGlobalSettings", () => {
- it("should create settings file with updates", async () => {
+ describe('updateGlobalSettings', () => {
+ it('should create settings file with updates', async () => {
const updates: Partial = {
- theme: "light",
+ theme: 'light',
sidebarOpen: false,
};
const updated = await settingsService.updateGlobalSettings(updates);
- expect(updated.theme).toBe("light");
+ expect(updated.theme).toBe('light');
expect(updated.sidebarOpen).toBe(false);
expect(updated.version).toBe(SETTINGS_VERSION);
- const settingsPath = path.join(testDataDir, "settings.json");
- const fileContent = await fs.readFile(settingsPath, "utf-8");
+ const settingsPath = path.join(testDataDir, 'settings.json');
+ const fileContent = await fs.readFile(settingsPath, 'utf-8');
const saved = JSON.parse(fileContent);
- expect(saved.theme).toBe("light");
+ expect(saved.theme).toBe('light');
expect(saved.sidebarOpen).toBe(false);
});
- it("should merge updates with existing settings", async () => {
+ it('should merge updates with existing settings', async () => {
const initial: GlobalSettings = {
...DEFAULT_GLOBAL_SETTINGS,
- theme: "dark",
+ theme: 'dark',
maxConcurrency: 3,
};
- const settingsPath = path.join(testDataDir, "settings.json");
+ const settingsPath = path.join(testDataDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(initial, null, 2));
const updates: Partial = {
- theme: "light",
+ theme: 'light',
};
const updated = await settingsService.updateGlobalSettings(updates);
- expect(updated.theme).toBe("light");
+ expect(updated.theme).toBe('light');
expect(updated.maxConcurrency).toBe(3); // Preserved from initial
});
- it("should deep merge keyboard shortcuts", async () => {
+ it('should deep merge keyboard shortcuts', async () => {
const updates: Partial = {
keyboardShortcuts: {
- board: "B",
+ board: 'B',
},
};
const updated = await settingsService.updateGlobalSettings(updates);
- expect(updated.keyboardShortcuts.board).toBe("B");
- expect(updated.keyboardShortcuts.agent).toBe(
- DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts.agent
- );
+ expect(updated.keyboardShortcuts.board).toBe('B');
+ expect(updated.keyboardShortcuts.agent).toBe(DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts.agent);
});
- it("should create data directory if it does not exist", async () => {
+ it('should create data directory if it does not exist', async () => {
const newDataDir = path.join(os.tmpdir(), `new-data-dir-${Date.now()}`);
const newService = new SettingsService(newDataDir);
- await newService.updateGlobalSettings({ theme: "light" });
+ await newService.updateGlobalSettings({ theme: 'light' });
const stats = await fs.stat(newDataDir);
expect(stats.isDirectory()).toBe(true);
@@ -159,248 +157,248 @@ describe("settings-service.ts", () => {
});
});
- describe("hasGlobalSettings", () => {
- it("should return false when settings file does not exist", async () => {
+ describe('hasGlobalSettings', () => {
+ it('should return false when settings file does not exist', async () => {
const exists = await settingsService.hasGlobalSettings();
expect(exists).toBe(false);
});
- it("should return true when settings file exists", async () => {
- await settingsService.updateGlobalSettings({ theme: "light" });
+ it('should return true when settings file exists', async () => {
+ await settingsService.updateGlobalSettings({ theme: 'light' });
const exists = await settingsService.hasGlobalSettings();
expect(exists).toBe(true);
});
});
- describe("getCredentials", () => {
- it("should return default credentials when file does not exist", async () => {
+ describe('getCredentials', () => {
+ it('should return default credentials when file does not exist', async () => {
const credentials = await settingsService.getCredentials();
expect(credentials).toEqual(DEFAULT_CREDENTIALS);
});
- it("should read and return existing credentials", async () => {
+ it('should read and return existing credentials', async () => {
const customCredentials: Credentials = {
...DEFAULT_CREDENTIALS,
apiKeys: {
- anthropic: "sk-test-key",
+ anthropic: 'sk-test-key',
},
};
- const credentialsPath = path.join(testDataDir, "credentials.json");
+ const credentialsPath = path.join(testDataDir, 'credentials.json');
await fs.writeFile(credentialsPath, JSON.stringify(customCredentials, null, 2));
const credentials = await settingsService.getCredentials();
- expect(credentials.apiKeys.anthropic).toBe("sk-test-key");
+ expect(credentials.apiKeys.anthropic).toBe('sk-test-key');
});
- it("should merge with defaults for missing api keys", async () => {
+ it('should merge with defaults for missing api keys', async () => {
const partialCredentials = {
version: CREDENTIALS_VERSION,
apiKeys: {
- anthropic: "sk-test",
+ anthropic: 'sk-test',
},
};
- const credentialsPath = path.join(testDataDir, "credentials.json");
+ const credentialsPath = path.join(testDataDir, 'credentials.json');
await fs.writeFile(credentialsPath, JSON.stringify(partialCredentials, null, 2));
const credentials = await settingsService.getCredentials();
- expect(credentials.apiKeys.anthropic).toBe("sk-test");
+ expect(credentials.apiKeys.anthropic).toBe('sk-test');
});
});
- describe("updateCredentials", () => {
- it("should create credentials file with updates", async () => {
+ describe('updateCredentials', () => {
+ it('should create credentials file with updates', async () => {
const updates: Partial = {
apiKeys: {
- anthropic: "sk-test-key",
+ anthropic: 'sk-test-key',
},
};
const updated = await settingsService.updateCredentials(updates);
- expect(updated.apiKeys.anthropic).toBe("sk-test-key");
+ expect(updated.apiKeys.anthropic).toBe('sk-test-key');
expect(updated.version).toBe(CREDENTIALS_VERSION);
- const credentialsPath = path.join(testDataDir, "credentials.json");
- const fileContent = await fs.readFile(credentialsPath, "utf-8");
+ const credentialsPath = path.join(testDataDir, 'credentials.json');
+ const fileContent = await fs.readFile(credentialsPath, 'utf-8');
const saved = JSON.parse(fileContent);
- expect(saved.apiKeys.anthropic).toBe("sk-test-key");
+ expect(saved.apiKeys.anthropic).toBe('sk-test-key');
});
- it("should merge updates with existing credentials", async () => {
+ it('should merge updates with existing credentials', async () => {
const initial: Credentials = {
...DEFAULT_CREDENTIALS,
apiKeys: {
- anthropic: "sk-initial",
+ anthropic: 'sk-initial',
},
};
- const credentialsPath = path.join(testDataDir, "credentials.json");
+ const credentialsPath = path.join(testDataDir, 'credentials.json');
await fs.writeFile(credentialsPath, JSON.stringify(initial, null, 2));
const updates: Partial = {
apiKeys: {
- anthropic: "sk-updated",
+ anthropic: 'sk-updated',
},
};
const updated = await settingsService.updateCredentials(updates);
- expect(updated.apiKeys.anthropic).toBe("sk-updated");
+ expect(updated.apiKeys.anthropic).toBe('sk-updated');
});
- it("should deep merge api keys", async () => {
+ it('should deep merge api keys', async () => {
const initial: Credentials = {
...DEFAULT_CREDENTIALS,
apiKeys: {
- anthropic: "sk-anthropic",
+ anthropic: 'sk-anthropic',
},
};
- const credentialsPath = path.join(testDataDir, "credentials.json");
+ const credentialsPath = path.join(testDataDir, 'credentials.json');
await fs.writeFile(credentialsPath, JSON.stringify(initial, null, 2));
const updates: Partial = {
apiKeys: {
- anthropic: "sk-updated-anthropic",
+ anthropic: 'sk-updated-anthropic',
},
};
const updated = await settingsService.updateCredentials(updates);
- expect(updated.apiKeys.anthropic).toBe("sk-updated-anthropic");
+ expect(updated.apiKeys.anthropic).toBe('sk-updated-anthropic');
});
});
- describe("getMaskedCredentials", () => {
- it("should return masked credentials for empty keys", async () => {
+ describe('getMaskedCredentials', () => {
+ it('should return masked credentials for empty keys', async () => {
const masked = await settingsService.getMaskedCredentials();
expect(masked.anthropic.configured).toBe(false);
- expect(masked.anthropic.masked).toBe("");
+ expect(masked.anthropic.masked).toBe('');
});
- it("should mask keys correctly", async () => {
+ it('should mask keys correctly', async () => {
await settingsService.updateCredentials({
apiKeys: {
- anthropic: "sk-ant-api03-1234567890abcdef",
+ anthropic: 'sk-ant-api03-1234567890abcdef',
},
});
const masked = await settingsService.getMaskedCredentials();
expect(masked.anthropic.configured).toBe(true);
- expect(masked.anthropic.masked).toBe("sk-a...cdef");
+ expect(masked.anthropic.masked).toBe('sk-a...cdef');
});
- it("should handle short keys", async () => {
+ it('should handle short keys', async () => {
await settingsService.updateCredentials({
apiKeys: {
- anthropic: "short",
+ anthropic: 'short',
},
});
const masked = await settingsService.getMaskedCredentials();
expect(masked.anthropic.configured).toBe(true);
- expect(masked.anthropic.masked).toBe("");
+ expect(masked.anthropic.masked).toBe('');
});
});
- describe("hasCredentials", () => {
- it("should return false when credentials file does not exist", async () => {
+ describe('hasCredentials', () => {
+ it('should return false when credentials file does not exist', async () => {
const exists = await settingsService.hasCredentials();
expect(exists).toBe(false);
});
- it("should return true when credentials file exists", async () => {
+ it('should return true when credentials file exists', async () => {
await settingsService.updateCredentials({
- apiKeys: { anthropic: "test" },
+ apiKeys: { anthropic: 'test' },
});
const exists = await settingsService.hasCredentials();
expect(exists).toBe(true);
});
});
- describe("getProjectSettings", () => {
- it("should return default settings when file does not exist", async () => {
+ describe('getProjectSettings', () => {
+ it('should return default settings when file does not exist', async () => {
const settings = await settingsService.getProjectSettings(testProjectDir);
expect(settings).toEqual(DEFAULT_PROJECT_SETTINGS);
});
- it("should read and return existing project settings", async () => {
+ it('should read and return existing project settings', async () => {
const customSettings: ProjectSettings = {
...DEFAULT_PROJECT_SETTINGS,
- theme: "light",
+ theme: 'light',
useWorktrees: true,
};
- const automakerDir = path.join(testProjectDir, ".automaker");
+ const automakerDir = path.join(testProjectDir, '.automaker');
await fs.mkdir(automakerDir, { recursive: true });
- const settingsPath = path.join(automakerDir, "settings.json");
+ const settingsPath = path.join(automakerDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(customSettings, null, 2));
const settings = await settingsService.getProjectSettings(testProjectDir);
- expect(settings.theme).toBe("light");
+ expect(settings.theme).toBe('light');
expect(settings.useWorktrees).toBe(true);
});
- it("should merge with defaults for missing properties", async () => {
+ it('should merge with defaults for missing properties', async () => {
const partialSettings = {
version: PROJECT_SETTINGS_VERSION,
- theme: "dark",
+ theme: 'dark',
};
- const automakerDir = path.join(testProjectDir, ".automaker");
+ const automakerDir = path.join(testProjectDir, '.automaker');
await fs.mkdir(automakerDir, { recursive: true });
- const settingsPath = path.join(automakerDir, "settings.json");
+ const settingsPath = path.join(automakerDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(partialSettings, null, 2));
const settings = await settingsService.getProjectSettings(testProjectDir);
- expect(settings.theme).toBe("dark");
+ expect(settings.theme).toBe('dark');
expect(settings.version).toBe(PROJECT_SETTINGS_VERSION);
});
});
- describe("updateProjectSettings", () => {
- it("should create project settings file with updates", async () => {
+ describe('updateProjectSettings', () => {
+ it('should create project settings file with updates', async () => {
const updates: Partial = {
- theme: "light",
+ theme: 'light',
useWorktrees: true,
};
const updated = await settingsService.updateProjectSettings(testProjectDir, updates);
- expect(updated.theme).toBe("light");
+ expect(updated.theme).toBe('light');
expect(updated.useWorktrees).toBe(true);
expect(updated.version).toBe(PROJECT_SETTINGS_VERSION);
- const automakerDir = path.join(testProjectDir, ".automaker");
- const settingsPath = path.join(automakerDir, "settings.json");
- const fileContent = await fs.readFile(settingsPath, "utf-8");
+ const automakerDir = path.join(testProjectDir, '.automaker');
+ const settingsPath = path.join(automakerDir, 'settings.json');
+ const fileContent = await fs.readFile(settingsPath, 'utf-8');
const saved = JSON.parse(fileContent);
- expect(saved.theme).toBe("light");
+ expect(saved.theme).toBe('light');
expect(saved.useWorktrees).toBe(true);
});
- it("should merge updates with existing project settings", async () => {
+ it('should merge updates with existing project settings', async () => {
const initial: ProjectSettings = {
...DEFAULT_PROJECT_SETTINGS,
- theme: "dark",
+ theme: 'dark',
useWorktrees: false,
};
- const automakerDir = path.join(testProjectDir, ".automaker");
+ const automakerDir = path.join(testProjectDir, '.automaker');
await fs.mkdir(automakerDir, { recursive: true });
- const settingsPath = path.join(automakerDir, "settings.json");
+ const settingsPath = path.join(automakerDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(initial, null, 2));
const updates: Partial = {
- theme: "light",
+ theme: 'light',
};
const updated = await settingsService.updateProjectSettings(testProjectDir, updates);
- expect(updated.theme).toBe("light");
+ expect(updated.theme).toBe('light');
expect(updated.useWorktrees).toBe(false); // Preserved
});
- it("should deep merge board background", async () => {
+ it('should deep merge board background', async () => {
const initial: ProjectSettings = {
...DEFAULT_PROJECT_SETTINGS,
boardBackground: {
- imagePath: "/path/to/image.jpg",
+ imagePath: '/path/to/image.jpg',
cardOpacity: 0.8,
columnOpacity: 0.9,
columnBorderEnabled: true,
@@ -410,9 +408,9 @@ describe("settings-service.ts", () => {
hideScrollbar: false,
},
};
- const automakerDir = path.join(testProjectDir, ".automaker");
+ const automakerDir = path.join(testProjectDir, '.automaker');
await fs.mkdir(automakerDir, { recursive: true });
- const settingsPath = path.join(automakerDir, "settings.json");
+ const settingsPath = path.join(automakerDir, 'settings.json');
await fs.writeFile(settingsPath, JSON.stringify(initial, null, 2));
const updates: Partial = {
@@ -423,17 +421,17 @@ describe("settings-service.ts", () => {
const updated = await settingsService.updateProjectSettings(testProjectDir, updates);
- expect(updated.boardBackground?.imagePath).toBe("/path/to/image.jpg");
+ expect(updated.boardBackground?.imagePath).toBe('/path/to/image.jpg');
expect(updated.boardBackground?.cardOpacity).toBe(0.9);
expect(updated.boardBackground?.columnOpacity).toBe(0.9);
});
- it("should create .automaker directory if it does not exist", async () => {
+ it('should create .automaker directory if it does not exist', async () => {
const newProjectDir = path.join(os.tmpdir(), `new-project-${Date.now()}`);
- await settingsService.updateProjectSettings(newProjectDir, { theme: "light" });
+ await settingsService.updateProjectSettings(newProjectDir, { theme: 'light' });
- const automakerDir = path.join(newProjectDir, ".automaker");
+ const automakerDir = path.join(newProjectDir, '.automaker');
const stats = await fs.stat(automakerDir);
expect(stats.isDirectory()).toBe(true);
@@ -441,25 +439,25 @@ describe("settings-service.ts", () => {
});
});
- describe("hasProjectSettings", () => {
- it("should return false when project settings file does not exist", async () => {
+ describe('hasProjectSettings', () => {
+ it('should return false when project settings file does not exist', async () => {
const exists = await settingsService.hasProjectSettings(testProjectDir);
expect(exists).toBe(false);
});
- it("should return true when project settings file exists", async () => {
- await settingsService.updateProjectSettings(testProjectDir, { theme: "light" });
+ it('should return true when project settings file exists', async () => {
+ await settingsService.updateProjectSettings(testProjectDir, { theme: 'light' });
const exists = await settingsService.hasProjectSettings(testProjectDir);
expect(exists).toBe(true);
});
});
- describe("migrateFromLocalStorage", () => {
- it("should migrate global settings from localStorage data", async () => {
+ describe('migrateFromLocalStorage', () => {
+ it('should migrate global settings from localStorage data', async () => {
const localStorageData = {
- "automaker-storage": JSON.stringify({
+ 'automaker-storage': JSON.stringify({
state: {
- theme: "light",
+ theme: 'light',
sidebarOpen: false,
maxConcurrency: 5,
},
@@ -474,17 +472,17 @@ describe("settings-service.ts", () => {
expect(result.migratedProjectCount).toBe(0);
const settings = await settingsService.getGlobalSettings();
- expect(settings.theme).toBe("light");
+ expect(settings.theme).toBe('light');
expect(settings.sidebarOpen).toBe(false);
expect(settings.maxConcurrency).toBe(5);
});
- it("should migrate credentials from localStorage data", async () => {
+ it('should migrate credentials from localStorage data', async () => {
const localStorageData = {
- "automaker-storage": JSON.stringify({
+ 'automaker-storage': JSON.stringify({
state: {
apiKeys: {
- anthropic: "sk-test-key",
+ anthropic: 'sk-test-key',
},
},
}),
@@ -496,24 +494,24 @@ describe("settings-service.ts", () => {
expect(result.migratedCredentials).toBe(true);
const credentials = await settingsService.getCredentials();
- expect(credentials.apiKeys.anthropic).toBe("sk-test-key");
+ expect(credentials.apiKeys.anthropic).toBe('sk-test-key');
});
- it("should migrate project settings from localStorage data", async () => {
+ it('should migrate project settings from localStorage data', async () => {
const localStorageData = {
- "automaker-storage": JSON.stringify({
+ 'automaker-storage': JSON.stringify({
state: {
projects: [
{
- id: "proj1",
- name: "Project 1",
+ id: 'proj1',
+ name: 'Project 1',
path: testProjectDir,
- theme: "light",
+ theme: 'light',
},
],
boardBackgroundByProject: {
[testProjectDir]: {
- imagePath: "/path/to/image.jpg",
+ imagePath: '/path/to/image.jpg',
cardOpacity: 0.8,
columnOpacity: 0.9,
columnBorderEnabled: true,
@@ -533,30 +531,30 @@ describe("settings-service.ts", () => {
expect(result.migratedProjectCount).toBe(1);
const projectSettings = await settingsService.getProjectSettings(testProjectDir);
- expect(projectSettings.theme).toBe("light");
- expect(projectSettings.boardBackground?.imagePath).toBe("/path/to/image.jpg");
+ expect(projectSettings.theme).toBe('light');
+ expect(projectSettings.boardBackground?.imagePath).toBe('/path/to/image.jpg');
});
- it("should handle direct localStorage values", async () => {
+ it('should handle direct localStorage values', async () => {
const localStorageData = {
- "automaker:lastProjectDir": "/path/to/project",
- "file-browser-recent-folders": JSON.stringify(["/path1", "/path2"]),
- "worktree-panel-collapsed": "true",
+ 'automaker:lastProjectDir': '/path/to/project',
+ 'file-browser-recent-folders': JSON.stringify(['/path1', '/path2']),
+ 'worktree-panel-collapsed': 'true',
};
const result = await settingsService.migrateFromLocalStorage(localStorageData);
expect(result.success).toBe(true);
const settings = await settingsService.getGlobalSettings();
- expect(settings.lastProjectDir).toBe("/path/to/project");
- expect(settings.recentFolders).toEqual(["/path1", "/path2"]);
+ expect(settings.lastProjectDir).toBe('/path/to/project');
+ expect(settings.recentFolders).toEqual(['/path1', '/path2']);
expect(settings.worktreePanelCollapsed).toBe(true);
});
- it("should handle invalid JSON gracefully", async () => {
+ it('should handle invalid JSON gracefully', async () => {
const localStorageData = {
- "automaker-storage": "invalid json",
- "file-browser-recent-folders": "invalid json",
+ 'automaker-storage': 'invalid json',
+ 'file-browser-recent-folders': 'invalid json',
};
const result = await settingsService.migrateFromLocalStorage(localStorageData);
@@ -565,7 +563,7 @@ describe("settings-service.ts", () => {
expect(result.errors.length).toBeGreaterThan(0);
});
- it("should handle migration errors gracefully", async () => {
+ it('should handle migration errors gracefully', async () => {
// Create a read-only directory to cause write errors
const readOnlyDir = path.join(os.tmpdir(), `readonly-${Date.now()}`);
await fs.mkdir(readOnlyDir, { recursive: true });
@@ -573,8 +571,8 @@ describe("settings-service.ts", () => {
const readOnlyService = new SettingsService(readOnlyDir);
const localStorageData = {
- "automaker-storage": JSON.stringify({
- state: { theme: "light" },
+ 'automaker-storage': JSON.stringify({
+ state: { theme: 'light' },
}),
};
@@ -588,15 +586,15 @@ describe("settings-service.ts", () => {
});
});
- describe("getDataDir", () => {
- it("should return the data directory path", () => {
+ describe('getDataDir', () => {
+ it('should return the data directory path', () => {
const dataDir = settingsService.getDataDir();
expect(dataDir).toBe(testDataDir);
});
});
- describe("atomicWriteJson", () => {
- it("should handle write errors and clean up temp file", async () => {
+ describe('atomicWriteJson', () => {
+ it('should handle write errors and clean up temp file', async () => {
// Create a read-only directory to cause write errors
const readOnlyDir = path.join(os.tmpdir(), `readonly-${Date.now()}`);
await fs.mkdir(readOnlyDir, { recursive: true });
@@ -604,13 +602,10 @@ describe("settings-service.ts", () => {
const readOnlyService = new SettingsService(readOnlyDir);
- await expect(
- readOnlyService.updateGlobalSettings({ theme: "light" })
- ).rejects.toThrow();
+ await expect(readOnlyService.updateGlobalSettings({ theme: 'light' })).rejects.toThrow();
await fs.chmod(readOnlyDir, 0o755);
await fs.rm(readOnlyDir, { recursive: true, force: true });
});
});
});
-
diff --git a/apps/server/tests/unit/services/terminal-service.test.ts b/apps/server/tests/unit/services/terminal-service.test.ts
index d273061a8..44e823b01 100644
--- a/apps/server/tests/unit/services/terminal-service.test.ts
+++ b/apps/server/tests/unit/services/terminal-service.test.ts
@@ -1,14 +1,14 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { TerminalService, getTerminalService } from "@/services/terminal-service.js";
-import * as pty from "node-pty";
-import * as os from "os";
-import * as fs from "fs";
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
+import { TerminalService, getTerminalService } from '@/services/terminal-service.js';
+import * as pty from 'node-pty';
+import * as os from 'os';
+import * as fs from 'fs';
-vi.mock("node-pty");
-vi.mock("fs");
-vi.mock("os");
+vi.mock('node-pty');
+vi.mock('fs');
+vi.mock('os');
-describe("terminal-service.ts", () => {
+describe('terminal-service.ts', () => {
let service: TerminalService;
let mockPtyProcess: any;
@@ -26,225 +26,225 @@ describe("terminal-service.ts", () => {
};
vi.mocked(pty.spawn).mockReturnValue(mockPtyProcess);
- vi.mocked(os.homedir).mockReturnValue("/home/user");
- vi.mocked(os.platform).mockReturnValue("linux");
- vi.mocked(os.arch).mockReturnValue("x64");
+ vi.mocked(os.homedir).mockReturnValue('/home/user');
+ vi.mocked(os.platform).mockReturnValue('linux');
+ vi.mocked(os.arch).mockReturnValue('x64');
});
afterEach(() => {
service.cleanup();
});
- describe("detectShell", () => {
- it("should detect PowerShell Core on Windows when available", () => {
- vi.mocked(os.platform).mockReturnValue("win32");
+ describe('detectShell', () => {
+ it('should detect PowerShell Core on Windows when available', () => {
+ vi.mocked(os.platform).mockReturnValue('win32');
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
- return path === "C:\\Program Files\\PowerShell\\7\\pwsh.exe";
+ return path === 'C:\\Program Files\\PowerShell\\7\\pwsh.exe';
});
const result = service.detectShell();
- expect(result.shell).toBe("C:\\Program Files\\PowerShell\\7\\pwsh.exe");
+ expect(result.shell).toBe('C:\\Program Files\\PowerShell\\7\\pwsh.exe');
expect(result.args).toEqual([]);
});
- it("should fall back to PowerShell on Windows if Core not available", () => {
- vi.mocked(os.platform).mockReturnValue("win32");
+ it('should fall back to PowerShell on Windows if Core not available', () => {
+ vi.mocked(os.platform).mockReturnValue('win32');
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
- return path === "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe";
+ return path === 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe';
});
const result = service.detectShell();
- expect(result.shell).toBe("C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe");
+ expect(result.shell).toBe('C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe');
expect(result.args).toEqual([]);
});
- it("should fall back to cmd.exe on Windows if no PowerShell", () => {
- vi.mocked(os.platform).mockReturnValue("win32");
+ it('should fall back to cmd.exe on Windows if no PowerShell', () => {
+ vi.mocked(os.platform).mockReturnValue('win32');
vi.mocked(fs.existsSync).mockReturnValue(false);
const result = service.detectShell();
- expect(result.shell).toBe("cmd.exe");
+ expect(result.shell).toBe('cmd.exe');
expect(result.args).toEqual([]);
});
- it("should detect user shell on macOS", () => {
- vi.mocked(os.platform).mockReturnValue("darwin");
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/zsh" });
+ it('should detect user shell on macOS', () => {
+ vi.mocked(os.platform).mockReturnValue('darwin');
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/zsh' });
vi.mocked(fs.existsSync).mockReturnValue(true);
const result = service.detectShell();
- expect(result.shell).toBe("/bin/zsh");
- expect(result.args).toEqual(["--login"]);
+ expect(result.shell).toBe('/bin/zsh');
+ expect(result.args).toEqual(['--login']);
});
- it("should fall back to zsh on macOS if user shell not available", () => {
- vi.mocked(os.platform).mockReturnValue("darwin");
- vi.spyOn(process, "env", "get").mockReturnValue({});
+ it('should fall back to zsh on macOS if user shell not available', () => {
+ vi.mocked(os.platform).mockReturnValue('darwin');
+ vi.spyOn(process, 'env', 'get').mockReturnValue({});
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
- return path === "/bin/zsh";
+ return path === '/bin/zsh';
});
const result = service.detectShell();
- expect(result.shell).toBe("/bin/zsh");
- expect(result.args).toEqual(["--login"]);
+ expect(result.shell).toBe('/bin/zsh');
+ expect(result.args).toEqual(['--login']);
});
- it("should fall back to bash on macOS if zsh not available", () => {
- vi.mocked(os.platform).mockReturnValue("darwin");
- vi.spyOn(process, "env", "get").mockReturnValue({});
+ it('should fall back to bash on macOS if zsh not available', () => {
+ vi.mocked(os.platform).mockReturnValue('darwin');
+ vi.spyOn(process, 'env', 'get').mockReturnValue({});
vi.mocked(fs.existsSync).mockReturnValue(false);
const result = service.detectShell();
- expect(result.shell).toBe("/bin/bash");
- expect(result.args).toEqual(["--login"]);
+ expect(result.shell).toBe('/bin/bash');
+ expect(result.args).toEqual(['--login']);
});
- it("should detect user shell on Linux", () => {
- vi.mocked(os.platform).mockReturnValue("linux");
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ it('should detect user shell on Linux', () => {
+ vi.mocked(os.platform).mockReturnValue('linux');
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
vi.mocked(fs.existsSync).mockReturnValue(true);
const result = service.detectShell();
- expect(result.shell).toBe("/bin/bash");
- expect(result.args).toEqual(["--login"]);
+ expect(result.shell).toBe('/bin/bash');
+ expect(result.args).toEqual(['--login']);
});
- it("should fall back to bash on Linux if user shell not available", () => {
- vi.mocked(os.platform).mockReturnValue("linux");
- vi.spyOn(process, "env", "get").mockReturnValue({});
+ it('should fall back to bash on Linux if user shell not available', () => {
+ vi.mocked(os.platform).mockReturnValue('linux');
+ vi.spyOn(process, 'env', 'get').mockReturnValue({});
vi.mocked(fs.existsSync).mockImplementation((path: any) => {
- return path === "/bin/bash";
+ return path === '/bin/bash';
});
const result = service.detectShell();
- expect(result.shell).toBe("/bin/bash");
- expect(result.args).toEqual(["--login"]);
+ expect(result.shell).toBe('/bin/bash');
+ expect(result.args).toEqual(['--login']);
});
- it("should fall back to sh on Linux if bash not available", () => {
- vi.mocked(os.platform).mockReturnValue("linux");
- vi.spyOn(process, "env", "get").mockReturnValue({});
+ it('should fall back to sh on Linux if bash not available', () => {
+ vi.mocked(os.platform).mockReturnValue('linux');
+ vi.spyOn(process, 'env', 'get').mockReturnValue({});
vi.mocked(fs.existsSync).mockReturnValue(false);
const result = service.detectShell();
- expect(result.shell).toBe("/bin/sh");
+ expect(result.shell).toBe('/bin/sh');
expect(result.args).toEqual([]);
});
- it("should detect WSL and use appropriate shell", () => {
- vi.mocked(os.platform).mockReturnValue("linux");
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ it('should detect WSL and use appropriate shell', () => {
+ vi.mocked(os.platform).mockReturnValue('linux');
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
vi.mocked(fs.existsSync).mockReturnValue(true);
- vi.mocked(fs.readFileSync).mockReturnValue("Linux version 5.10.0-microsoft-standard-WSL2");
+ vi.mocked(fs.readFileSync).mockReturnValue('Linux version 5.10.0-microsoft-standard-WSL2');
const result = service.detectShell();
- expect(result.shell).toBe("/bin/bash");
- expect(result.args).toEqual(["--login"]);
+ expect(result.shell).toBe('/bin/bash');
+ expect(result.args).toEqual(['--login']);
});
});
- describe("isWSL", () => {
- it("should return true if /proc/version contains microsoft", () => {
+ describe('isWSL', () => {
+ it('should return true if /proc/version contains microsoft', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
- vi.mocked(fs.readFileSync).mockReturnValue("Linux version 5.10.0-microsoft-standard-WSL2");
+ vi.mocked(fs.readFileSync).mockReturnValue('Linux version 5.10.0-microsoft-standard-WSL2');
expect(service.isWSL()).toBe(true);
});
- it("should return true if /proc/version contains wsl", () => {
+ it('should return true if /proc/version contains wsl', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
- vi.mocked(fs.readFileSync).mockReturnValue("Linux version 5.10.0-wsl2");
+ vi.mocked(fs.readFileSync).mockReturnValue('Linux version 5.10.0-wsl2');
expect(service.isWSL()).toBe(true);
});
- it("should return true if WSL_DISTRO_NAME is set", () => {
+ it('should return true if WSL_DISTRO_NAME is set', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
- vi.spyOn(process, "env", "get").mockReturnValue({ WSL_DISTRO_NAME: "Ubuntu" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ WSL_DISTRO_NAME: 'Ubuntu' });
expect(service.isWSL()).toBe(true);
});
- it("should return true if WSLENV is set", () => {
+ it('should return true if WSLENV is set', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
- vi.spyOn(process, "env", "get").mockReturnValue({ WSLENV: "PATH/l" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ WSLENV: 'PATH/l' });
expect(service.isWSL()).toBe(true);
});
- it("should return false if not in WSL", () => {
+ it('should return false if not in WSL', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
- vi.spyOn(process, "env", "get").mockReturnValue({});
+ vi.spyOn(process, 'env', 'get').mockReturnValue({});
expect(service.isWSL()).toBe(false);
});
- it("should return false if error reading /proc/version", () => {
+ it('should return false if error reading /proc/version', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockImplementation(() => {
- throw new Error("Permission denied");
+ throw new Error('Permission denied');
});
expect(service.isWSL()).toBe(false);
});
});
- describe("getPlatformInfo", () => {
- it("should return platform information", () => {
- vi.mocked(os.platform).mockReturnValue("linux");
- vi.mocked(os.arch).mockReturnValue("x64");
+ describe('getPlatformInfo', () => {
+ it('should return platform information', () => {
+ vi.mocked(os.platform).mockReturnValue('linux');
+ vi.mocked(os.arch).mockReturnValue('x64');
vi.mocked(fs.existsSync).mockReturnValue(true);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const info = service.getPlatformInfo();
- expect(info.platform).toBe("linux");
- expect(info.arch).toBe("x64");
- expect(info.defaultShell).toBe("/bin/bash");
- expect(typeof info.isWSL).toBe("boolean");
+ expect(info.platform).toBe('linux');
+ expect(info.arch).toBe('x64');
+ expect(info.defaultShell).toBe('/bin/bash');
+ expect(typeof info.isWSL).toBe('boolean');
});
});
- describe("createSession", () => {
- it("should create a new terminal session", () => {
+ describe('createSession', () => {
+ it('should create a new terminal session', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession({
- cwd: "/test/dir",
+ cwd: '/test/dir',
cols: 100,
rows: 30,
});
expect(session.id).toMatch(/^term-/);
- expect(session.cwd).toBe("/test/dir");
- expect(session.shell).toBe("/bin/bash");
+ expect(session.cwd).toBe('/test/dir');
+ expect(session.shell).toBe('/bin/bash');
expect(pty.spawn).toHaveBeenCalledWith(
- "/bin/bash",
- ["--login"],
+ '/bin/bash',
+ ['--login'],
expect.objectContaining({
- cwd: "/test/dir",
+ cwd: '/test/dir',
cols: 100,
rows: 30,
})
);
});
- it("should use default cols and rows if not provided", () => {
+ it('should use default cols and rows if not provided', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
service.createSession();
@@ -258,61 +258,61 @@ describe("terminal-service.ts", () => {
);
});
- it("should fall back to home directory if cwd does not exist", () => {
+ it('should fall back to home directory if cwd does not exist', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockImplementation(() => {
- throw new Error("ENOENT");
+ throw new Error('ENOENT');
});
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession({
- cwd: "/nonexistent",
+ cwd: '/nonexistent',
});
- expect(session.cwd).toBe("/home/user");
+ expect(session.cwd).toBe('/home/user');
});
- it("should fall back to home directory if cwd is not a directory", () => {
+ it('should fall back to home directory if cwd is not a directory', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => false } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession({
- cwd: "/file.txt",
+ cwd: '/file.txt',
});
- expect(session.cwd).toBe("/home/user");
+ expect(session.cwd).toBe('/home/user');
});
- it("should fix double slashes in path", () => {
+ it('should fix double slashes in path', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession({
- cwd: "//test/dir",
+ cwd: '//test/dir',
});
- expect(session.cwd).toBe("/test/dir");
+ expect(session.cwd).toBe('/test/dir');
});
- it("should preserve WSL UNC paths", () => {
+ it('should preserve WSL UNC paths', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession({
- cwd: "//wsl$/Ubuntu/home",
+ cwd: '//wsl$/Ubuntu/home',
});
- expect(session.cwd).toBe("//wsl$/Ubuntu/home");
+ expect(session.cwd).toBe('//wsl$/Ubuntu/home');
});
- it("should handle data events from PTY", () => {
+ it('should handle data events from PTY', () => {
vi.useFakeTimers();
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const dataCallback = vi.fn();
service.onData(dataCallback);
@@ -321,7 +321,7 @@ describe("terminal-service.ts", () => {
// Simulate data event
const onDataHandler = mockPtyProcess.onData.mock.calls[0][0];
- onDataHandler("test data");
+ onDataHandler('test data');
// Wait for throttled output
vi.advanceTimersByTime(20);
@@ -331,10 +331,10 @@ describe("terminal-service.ts", () => {
vi.useRealTimers();
});
- it("should handle exit events from PTY", () => {
+ it('should handle exit events from PTY', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const exitCallback = vi.fn();
service.onExit(exitCallback);
@@ -350,32 +350,32 @@ describe("terminal-service.ts", () => {
});
});
- describe("write", () => {
- it("should write data to existing session", () => {
+ describe('write', () => {
+ it('should write data to existing session', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession();
- const result = service.write(session.id, "ls\n");
+ const result = service.write(session.id, 'ls\n');
expect(result).toBe(true);
- expect(mockPtyProcess.write).toHaveBeenCalledWith("ls\n");
+ expect(mockPtyProcess.write).toHaveBeenCalledWith('ls\n');
});
- it("should return false for non-existent session", () => {
- const result = service.write("nonexistent", "data");
+ it('should return false for non-existent session', () => {
+ const result = service.write('nonexistent', 'data');
expect(result).toBe(false);
expect(mockPtyProcess.write).not.toHaveBeenCalled();
});
});
- describe("resize", () => {
- it("should resize existing session", () => {
+ describe('resize', () => {
+ it('should resize existing session', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession();
const result = service.resize(session.id, 120, 40);
@@ -384,19 +384,19 @@ describe("terminal-service.ts", () => {
expect(mockPtyProcess.resize).toHaveBeenCalledWith(120, 40);
});
- it("should return false for non-existent session", () => {
- const result = service.resize("nonexistent", 120, 40);
+ it('should return false for non-existent session', () => {
+ const result = service.resize('nonexistent', 120, 40);
expect(result).toBe(false);
expect(mockPtyProcess.resize).not.toHaveBeenCalled();
});
- it("should handle resize errors", () => {
+ it('should handle resize errors', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
mockPtyProcess.resize.mockImplementation(() => {
- throw new Error("Resize failed");
+ throw new Error('Resize failed');
});
const session = service.createSession();
@@ -406,32 +406,40 @@ describe("terminal-service.ts", () => {
});
});
- describe("killSession", () => {
- it("should kill existing session", () => {
+ describe('killSession', () => {
+ it('should kill existing session', () => {
+ vi.useFakeTimers();
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession();
const result = service.killSession(session.id);
expect(result).toBe(true);
- expect(mockPtyProcess.kill).toHaveBeenCalled();
+ expect(mockPtyProcess.kill).toHaveBeenCalledWith('SIGTERM');
+
+ // Session is removed after SIGKILL timeout (1 second)
+ vi.advanceTimersByTime(1000);
+
+ expect(mockPtyProcess.kill).toHaveBeenCalledWith('SIGKILL');
expect(service.getSession(session.id)).toBeUndefined();
+
+ vi.useRealTimers();
});
- it("should return false for non-existent session", () => {
- const result = service.killSession("nonexistent");
+ it('should return false for non-existent session', () => {
+ const result = service.killSession('nonexistent');
expect(result).toBe(false);
});
- it("should handle kill errors", () => {
+ it('should handle kill errors', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
mockPtyProcess.kill.mockImplementation(() => {
- throw new Error("Kill failed");
+ throw new Error('Kill failed');
});
const session = service.createSession();
@@ -441,11 +449,11 @@ describe("terminal-service.ts", () => {
});
});
- describe("getSession", () => {
- it("should return existing session", () => {
+ describe('getSession', () => {
+ it('should return existing session', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession();
const retrieved = service.getSession(session.id);
@@ -453,84 +461,84 @@ describe("terminal-service.ts", () => {
expect(retrieved).toBe(session);
});
- it("should return undefined for non-existent session", () => {
- const retrieved = service.getSession("nonexistent");
+ it('should return undefined for non-existent session', () => {
+ const retrieved = service.getSession('nonexistent');
expect(retrieved).toBeUndefined();
});
});
- describe("getScrollback", () => {
- it("should return scrollback buffer for existing session", () => {
+ describe('getScrollback', () => {
+ it('should return scrollback buffer for existing session', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session = service.createSession();
- session.scrollbackBuffer = "test scrollback";
+ session.scrollbackBuffer = 'test scrollback';
const scrollback = service.getScrollback(session.id);
- expect(scrollback).toBe("test scrollback");
+ expect(scrollback).toBe('test scrollback');
});
- it("should return null for non-existent session", () => {
- const scrollback = service.getScrollback("nonexistent");
+ it('should return null for non-existent session', () => {
+ const scrollback = service.getScrollback('nonexistent');
expect(scrollback).toBeNull();
});
});
- describe("getAllSessions", () => {
- it("should return all active sessions", () => {
+ describe('getAllSessions', () => {
+ it('should return all active sessions', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
- const session1 = service.createSession({ cwd: "/dir1" });
- const session2 = service.createSession({ cwd: "/dir2" });
+ const session1 = service.createSession({ cwd: '/dir1' });
+ const session2 = service.createSession({ cwd: '/dir2' });
const sessions = service.getAllSessions();
expect(sessions).toHaveLength(2);
expect(sessions[0].id).toBe(session1.id);
expect(sessions[1].id).toBe(session2.id);
- expect(sessions[0].cwd).toBe("/dir1");
- expect(sessions[1].cwd).toBe("/dir2");
+ expect(sessions[0].cwd).toBe('/dir1');
+ expect(sessions[1].cwd).toBe('/dir2');
});
- it("should return empty array if no sessions", () => {
+ it('should return empty array if no sessions', () => {
const sessions = service.getAllSessions();
expect(sessions).toEqual([]);
});
});
- describe("onData and onExit", () => {
- it("should allow subscribing and unsubscribing from data events", () => {
+ describe('onData and onExit', () => {
+ it('should allow subscribing and unsubscribing from data events', () => {
const callback = vi.fn();
const unsubscribe = service.onData(callback);
- expect(typeof unsubscribe).toBe("function");
+ expect(typeof unsubscribe).toBe('function');
unsubscribe();
});
- it("should allow subscribing and unsubscribing from exit events", () => {
+ it('should allow subscribing and unsubscribing from exit events', () => {
const callback = vi.fn();
const unsubscribe = service.onExit(callback);
- expect(typeof unsubscribe).toBe("function");
+ expect(typeof unsubscribe).toBe('function');
unsubscribe();
});
});
- describe("cleanup", () => {
- it("should clean up all sessions", () => {
+ describe('cleanup', () => {
+ it('should clean up all sessions', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
const session1 = service.createSession();
const session2 = service.createSession();
@@ -542,12 +550,12 @@ describe("terminal-service.ts", () => {
expect(service.getAllSessions()).toHaveLength(0);
});
- it("should handle cleanup errors gracefully", () => {
+ it('should handle cleanup errors gracefully', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ isDirectory: () => true } as any);
- vi.spyOn(process, "env", "get").mockReturnValue({ SHELL: "/bin/bash" });
+ vi.spyOn(process, 'env', 'get').mockReturnValue({ SHELL: '/bin/bash' });
mockPtyProcess.kill.mockImplementation(() => {
- throw new Error("Kill failed");
+ throw new Error('Kill failed');
});
service.createSession();
@@ -556,8 +564,8 @@ describe("terminal-service.ts", () => {
});
});
- describe("getTerminalService", () => {
- it("should return singleton instance", () => {
+ describe('getTerminalService', () => {
+ it('should return singleton instance', () => {
const instance1 = getTerminalService();
const instance2 = getTerminalService();
diff --git a/apps/server/tests/utils/helpers.ts b/apps/server/tests/utils/helpers.ts
index 9daa99ecf..cf928f07a 100644
--- a/apps/server/tests/utils/helpers.ts
+++ b/apps/server/tests/utils/helpers.ts
@@ -24,7 +24,7 @@ export async function waitFor(
const start = Date.now();
while (!condition()) {
if (Date.now() - start > timeout) {
- throw new Error("Timeout waiting for condition");
+ throw new Error('Timeout waiting for condition');
}
await new Promise((resolve) => setTimeout(resolve, interval));
}
diff --git a/apps/server/tests/utils/mocks.ts b/apps/server/tests/utils/mocks.ts
index ce5b14575..380ac9fd4 100644
--- a/apps/server/tests/utils/mocks.ts
+++ b/apps/server/tests/utils/mocks.ts
@@ -3,10 +3,10 @@
* Provides reusable mocks for common dependencies
*/
-import { vi } from "vitest";
-import type { ChildProcess } from "child_process";
-import { EventEmitter } from "events";
-import type { Readable } from "stream";
+import { vi } from 'vitest';
+import type { ChildProcess } from 'child_process';
+import { EventEmitter } from 'events';
+import type { Readable } from 'stream';
/**
* Mock child_process.spawn for subprocess tests
@@ -31,19 +31,19 @@ export function createMockChildProcess(options: {
process.nextTick(() => {
// Emit stdout lines
for (const line of stdout) {
- mockProcess.stdout.emit("data", Buffer.from(line + "\n"));
+ mockProcess.stdout.emit('data', Buffer.from(line + '\n'));
}
// Emit stderr lines
for (const line of stderr) {
- mockProcess.stderr.emit("data", Buffer.from(line + "\n"));
+ mockProcess.stderr.emit('data', Buffer.from(line + '\n'));
}
// Emit exit or error
if (shouldError) {
- mockProcess.emit("error", new Error("Process error"));
+ mockProcess.emit('error', new Error('Process error'));
} else {
- mockProcess.emit("exit", exitCode);
+ mockProcess.emit('exit', exitCode);
}
});
diff --git a/apps/server/vitest.config.ts b/apps/server/vitest.config.ts
index aae12c787..b879365a9 100644
--- a/apps/server/vitest.config.ts
+++ b/apps/server/vitest.config.ts
@@ -1,20 +1,20 @@
-import { defineConfig } from "vitest/config";
-import path from "path";
+import { defineConfig } from 'vitest/config';
+import path from 'path';
export default defineConfig({
test: {
reporters: ['verbose'],
globals: true,
- environment: "node",
- setupFiles: ["./tests/setup.ts"],
+ environment: 'node',
+ setupFiles: ['./tests/setup.ts'],
coverage: {
- provider: "v8",
- reporter: ["text", "json", "html", "lcov"],
- include: ["src/**/*.ts"],
+ provider: 'v8',
+ reporter: ['text', 'json', 'html', 'lcov'],
+ include: ['src/**/*.ts'],
exclude: [
- "src/**/*.d.ts",
- "src/index.ts",
- "src/routes/**", // Routes are better tested with integration tests
+ 'src/**/*.d.ts',
+ 'src/index.ts',
+ 'src/routes/**', // Routes are better tested with integration tests
],
thresholds: {
// Increased thresholds to ensure better code quality
@@ -25,22 +25,28 @@ export default defineConfig({
statements: 60,
},
},
- include: ["tests/**/*.test.ts", "tests/**/*.spec.ts"],
- exclude: ["**/node_modules/**", "**/dist/**"],
+ include: ['tests/**/*.test.ts', 'tests/**/*.spec.ts'],
+ exclude: ['**/node_modules/**', '**/dist/**'],
mockReset: true,
restoreMocks: true,
clearMocks: true,
},
resolve: {
alias: {
- "@": path.resolve(__dirname, "./src"),
+ '@': path.resolve(__dirname, './src'),
// Resolve shared packages to source files for proper mocking in tests
- "@automaker/utils": path.resolve(__dirname, "../../libs/utils/src/index.ts"),
- "@automaker/platform": path.resolve(__dirname, "../../libs/platform/src/index.ts"),
- "@automaker/types": path.resolve(__dirname, "../../libs/types/src/index.ts"),
- "@automaker/model-resolver": path.resolve(__dirname, "../../libs/model-resolver/src/index.ts"),
- "@automaker/dependency-resolver": path.resolve(__dirname, "../../libs/dependency-resolver/src/index.ts"),
- "@automaker/git-utils": path.resolve(__dirname, "../../libs/git-utils/src/index.ts"),
+ '@automaker/utils': path.resolve(__dirname, '../../libs/utils/src/index.ts'),
+ '@automaker/platform': path.resolve(__dirname, '../../libs/platform/src/index.ts'),
+ '@automaker/types': path.resolve(__dirname, '../../libs/types/src/index.ts'),
+ '@automaker/model-resolver': path.resolve(
+ __dirname,
+ '../../libs/model-resolver/src/index.ts'
+ ),
+ '@automaker/dependency-resolver': path.resolve(
+ __dirname,
+ '../../libs/dependency-resolver/src/index.ts'
+ ),
+ '@automaker/git-utils': path.resolve(__dirname, '../../libs/git-utils/src/index.ts'),
},
},
});
diff --git a/apps/ui/docs/AGENT_ARCHITECTURE.md b/apps/ui/docs/AGENT_ARCHITECTURE.md
index ca5bee5e0..4c9f0d111 100644
--- a/apps/ui/docs/AGENT_ARCHITECTURE.md
+++ b/apps/ui/docs/AGENT_ARCHITECTURE.md
@@ -90,9 +90,9 @@ const {
clearHistory, // Clear conversation
error, // Error state
} = useElectronAgent({
- sessionId: "project_xyz",
- workingDirectory: "/path/to/project",
- onToolUse: (tool) => console.log("Using:", tool),
+ sessionId: 'project_xyz',
+ workingDirectory: '/path/to/project',
+ onToolUse: (tool) => console.log('Using:', tool),
});
```
@@ -160,7 +160,7 @@ Each session file contains:
Session IDs are generated from project paths:
```typescript
-const sessionId = `project_${projectPath.replace(/[^a-zA-Z0-9]/g, "_")}`;
+const sessionId = `project_${projectPath.replace(/[^a-zA-Z0-9]/g, '_')}`;
```
This ensures:
diff --git a/apps/ui/docs/SESSION_MANAGEMENT.md b/apps/ui/docs/SESSION_MANAGEMENT.md
index 9ca27867e..b4c5eac77 100644
--- a/apps/ui/docs/SESSION_MANAGEMENT.md
+++ b/apps/ui/docs/SESSION_MANAGEMENT.md
@@ -7,24 +7,28 @@ The Automaker Agent Chat now supports multiple concurrent sessions, allowing you
## Features
### ✨ Multiple Sessions
+
- Create unlimited agent sessions per project
- Each session has its own conversation history
- Switch between sessions instantly
- Sessions persist across app restarts
### 📋 Session Organization
+
- Custom names for easy identification
- Last message preview
- Message count tracking
- Sort by most recently updated
### 🗄️ Archive & Delete
+
- Archive old sessions to declutter
- Unarchive when needed
- Permanently delete sessions
- Confirm before destructive actions
### 💾 Automatic Persistence
+
- All sessions auto-save to disk
- Survive Next.js restarts
- Survive Electron app restarts
@@ -67,6 +71,7 @@ Click the panel icon in the header to show/hide the session manager.
4. The new session is immediately active
**Example session names:**
+
- "Feature: Dark Mode"
- "Bug: Login redirect"
- "Refactor: API layer"
@@ -93,6 +98,7 @@ Click the **"Clear"** button in the chat header to delete all messages from the
3. Toggle **"Show Archived"** to view archived sessions
**When to archive:**
+
- Completed features
- Resolved bugs
- Old experiments
@@ -117,16 +123,19 @@ Click the **"Clear"** button in the chat header to delete all messages from the
Sessions are stored in your user data directory:
**macOS:**
+
```
~/Library/Application Support/automaker/agent-sessions/
```
**Windows:**
+
```
%APPDATA%/automaker/agent-sessions/
```
**Linux:**
+
```
~/.config/automaker/agent-sessions/
```
@@ -215,12 +224,14 @@ Use prefixes to organize sessions by type:
### When to Create Multiple Sessions
**Do create separate sessions for:**
+
- ✅ Different features
- ✅ Unrelated bugs
- ✅ Experimental work
- ✅ Different contexts or approaches
**Don't create separate sessions for:**
+
- ❌ Same feature, different iterations
- ❌ Related bug fixes
- ❌ Continuation of previous work
@@ -272,7 +283,7 @@ Use prefixes to organize sessions by type:
## Keyboard Shortcuts
-*(Coming soon)*
+_(Coming soon)_
- `Cmd/Ctrl + K` - Create new session
- `Cmd/Ctrl + [` - Previous session
@@ -284,11 +295,13 @@ Use prefixes to organize sessions by type:
### Session Not Saving
**Check:**
+
- Electron has write permissions
- Disk space available
- Check Electron console for errors
**Solution:**
+
```bash
# macOS - Check permissions
ls -la ~/Library/Application\ Support/automaker/
@@ -300,11 +313,13 @@ chmod -R u+w ~/Library/Application\ Support/automaker/
### Can't Switch Sessions
**Check:**
+
- Session is not archived
- No errors in console
- Agent is not currently processing
**Solution:**
+
- Wait for current message to complete
- Check for error messages
- Try clearing and reloading
@@ -312,11 +327,13 @@ chmod -R u+w ~/Library/Application\ Support/automaker/
### Session Disappeared
**Check:**
+
- Not filtered by archive status
- Not accidentally deleted
- Check backup files
**Recovery:**
+
- Toggle "Show Archived"
- Check filesystem for `.json` files
- Restore from backup if available
@@ -326,15 +343,17 @@ chmod -R u+w ~/Library/Application\ Support/automaker/
For developers integrating session management:
### Create Session
+
```typescript
const result = await window.electronAPI.sessions.create(
- "Session Name",
- "/project/path",
- "/working/directory"
+ 'Session Name',
+ '/project/path',
+ '/working/directory'
);
```
### List Sessions
+
```typescript
const { sessions } = await window.electronAPI.sessions.list(
false // includeArchived
@@ -342,21 +361,20 @@ const { sessions } = await window.electronAPI.sessions.list(
```
### Update Session
+
```typescript
-await window.electronAPI.sessions.update(
- sessionId,
- "New Name",
- ["tag1", "tag2"]
-);
+await window.electronAPI.sessions.update(sessionId, 'New Name', ['tag1', 'tag2']);
```
### Archive/Unarchive
+
```typescript
await window.electronAPI.sessions.archive(sessionId);
await window.electronAPI.sessions.unarchive(sessionId);
```
### Delete Session
+
```typescript
await window.electronAPI.sessions.delete(sessionId);
```
diff --git a/apps/ui/eslint.config.mjs b/apps/ui/eslint.config.mjs
index 0b7d6f0e0..d7bc54d48 100644
--- a/apps/ui/eslint.config.mjs
+++ b/apps/ui/eslint.config.mjs
@@ -1,111 +1,111 @@
-import { defineConfig, globalIgnores } from "eslint/config";
-import js from "@eslint/js";
-import ts from "@typescript-eslint/eslint-plugin";
-import tsParser from "@typescript-eslint/parser";
+import { defineConfig, globalIgnores } from 'eslint/config';
+import js from '@eslint/js';
+import ts from '@typescript-eslint/eslint-plugin';
+import tsParser from '@typescript-eslint/parser';
const eslintConfig = defineConfig([
js.configs.recommended,
{
- files: ["**/*.mjs", "**/*.cjs"],
+ files: ['**/*.mjs', '**/*.cjs'],
languageOptions: {
globals: {
- console: "readonly",
- process: "readonly",
- require: "readonly",
- __dirname: "readonly",
- __filename: "readonly",
+ console: 'readonly',
+ process: 'readonly',
+ require: 'readonly',
+ __dirname: 'readonly',
+ __filename: 'readonly',
},
},
},
{
- files: ["**/*.ts", "**/*.tsx"],
+ files: ['**/*.ts', '**/*.tsx'],
languageOptions: {
parser: tsParser,
parserOptions: {
- ecmaVersion: "latest",
- sourceType: "module",
+ ecmaVersion: 'latest',
+ sourceType: 'module',
},
globals: {
// Browser/DOM APIs
- window: "readonly",
- document: "readonly",
- navigator: "readonly",
- Navigator: "readonly",
- localStorage: "readonly",
- sessionStorage: "readonly",
- fetch: "readonly",
- WebSocket: "readonly",
- File: "readonly",
- FileList: "readonly",
- FileReader: "readonly",
- Blob: "readonly",
- atob: "readonly",
- crypto: "readonly",
- prompt: "readonly",
- confirm: "readonly",
- getComputedStyle: "readonly",
- requestAnimationFrame: "readonly",
+ window: 'readonly',
+ document: 'readonly',
+ navigator: 'readonly',
+ Navigator: 'readonly',
+ localStorage: 'readonly',
+ sessionStorage: 'readonly',
+ fetch: 'readonly',
+ WebSocket: 'readonly',
+ File: 'readonly',
+ FileList: 'readonly',
+ FileReader: 'readonly',
+ Blob: 'readonly',
+ atob: 'readonly',
+ crypto: 'readonly',
+ prompt: 'readonly',
+ confirm: 'readonly',
+ getComputedStyle: 'readonly',
+ requestAnimationFrame: 'readonly',
// DOM Element Types
- HTMLElement: "readonly",
- HTMLInputElement: "readonly",
- HTMLDivElement: "readonly",
- HTMLButtonElement: "readonly",
- HTMLSpanElement: "readonly",
- HTMLTextAreaElement: "readonly",
- HTMLHeadingElement: "readonly",
- HTMLParagraphElement: "readonly",
- HTMLImageElement: "readonly",
- Element: "readonly",
+ HTMLElement: 'readonly',
+ HTMLInputElement: 'readonly',
+ HTMLDivElement: 'readonly',
+ HTMLButtonElement: 'readonly',
+ HTMLSpanElement: 'readonly',
+ HTMLTextAreaElement: 'readonly',
+ HTMLHeadingElement: 'readonly',
+ HTMLParagraphElement: 'readonly',
+ HTMLImageElement: 'readonly',
+ Element: 'readonly',
// Event Types
- Event: "readonly",
- KeyboardEvent: "readonly",
- DragEvent: "readonly",
- PointerEvent: "readonly",
- CustomEvent: "readonly",
- ClipboardEvent: "readonly",
- WheelEvent: "readonly",
- DataTransfer: "readonly",
+ Event: 'readonly',
+ KeyboardEvent: 'readonly',
+ DragEvent: 'readonly',
+ PointerEvent: 'readonly',
+ CustomEvent: 'readonly',
+ ClipboardEvent: 'readonly',
+ WheelEvent: 'readonly',
+ DataTransfer: 'readonly',
// Web APIs
- ResizeObserver: "readonly",
- AbortSignal: "readonly",
- Audio: "readonly",
- ScrollBehavior: "readonly",
+ ResizeObserver: 'readonly',
+ AbortSignal: 'readonly',
+ Audio: 'readonly',
+ ScrollBehavior: 'readonly',
// Timers
- setTimeout: "readonly",
- setInterval: "readonly",
- clearTimeout: "readonly",
- clearInterval: "readonly",
+ setTimeout: 'readonly',
+ setInterval: 'readonly',
+ clearTimeout: 'readonly',
+ clearInterval: 'readonly',
// Node.js (for scripts and Electron)
- process: "readonly",
- require: "readonly",
- __dirname: "readonly",
- __filename: "readonly",
- NodeJS: "readonly",
+ process: 'readonly',
+ require: 'readonly',
+ __dirname: 'readonly',
+ __filename: 'readonly',
+ NodeJS: 'readonly',
// React
- React: "readonly",
- JSX: "readonly",
+ React: 'readonly',
+ JSX: 'readonly',
// Electron
- Electron: "readonly",
+ Electron: 'readonly',
// Console
- console: "readonly",
+ console: 'readonly',
},
},
plugins: {
- "@typescript-eslint": ts,
+ '@typescript-eslint': ts,
},
rules: {
...ts.configs.recommended.rules,
- "@typescript-eslint/no-unused-vars": ["warn", { argsIgnorePattern: "^_" }],
- "@typescript-eslint/no-explicit-any": "warn",
+ '@typescript-eslint/no-unused-vars': ['warn', { argsIgnorePattern: '^_' }],
+ '@typescript-eslint/no-explicit-any': 'warn',
},
},
globalIgnores([
- "dist/**",
- "dist-electron/**",
- "node_modules/**",
- "server-bundle/**",
- "release/**",
- "src/routeTree.gen.ts",
+ 'dist/**',
+ 'dist-electron/**',
+ 'node_modules/**',
+ 'server-bundle/**',
+ 'release/**',
+ 'src/routeTree.gen.ts',
]),
]);
diff --git a/apps/ui/index.html b/apps/ui/index.html
index 02087b013..49a7aa1ee 100644
--- a/apps/ui/index.html
+++ b/apps/ui/index.html
@@ -8,7 +8,7 @@