Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
176 changes: 176 additions & 0 deletions src/action/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@ import { readFileSync, appendFileSync } from 'node:fs';
import { dirname, join } from 'node:path';
import { Octokit } from '@octokit/rest';
import { loadWardenConfig, resolveTrigger, type ResolvedTrigger } from '../config/loader.js';
import type { ScheduleConfig } from '../config/schema.js';
import { buildEventContext } from '../event/context.js';
import { buildScheduleEventContext } from '../event/schedule-context.js';
import { runSkill } from '../sdk/runner.js';
import { renderSkillReport } from '../output/renderer.js';
import { createOrUpdateIssue, createFixPR } from '../output/github-issues.js';
import { matchTrigger, shouldFail, countFindingsAtOrAbove, countSeverity } from '../triggers/matcher.js';
import { resolveSkillAsync } from '../skills/loader.js';
import type { EventContext, SkillReport } from '../types/index.js';
Expand Down Expand Up @@ -132,6 +135,174 @@ async function postReviewToGitHub(
}
}

/**
* Get the default branch for a repository from the GitHub API.
*/
async function getDefaultBranchFromAPI(
octokit: Octokit,
owner: string,
repo: string
): Promise<string> {
const { data } = await octokit.repos.get({ owner, repo });
return data.default_branch;
}

/**
* Handle scheduled analysis events.
*/
async function runScheduledAnalysis(
octokit: Octokit,
inputs: ActionInputs,
repoPath: string
): Promise<void> {
logGroup('Loading configuration');
console.log(`Config path: ${inputs.configPath}`);
logGroupEnd();

const configFullPath = join(repoPath, inputs.configPath);
const config = loadWardenConfig(dirname(configFullPath));

// Find schedule triggers
const scheduleTriggers = config.triggers.filter((t) => t.event === 'schedule');
if (scheduleTriggers.length === 0) {
console.log('No schedule triggers configured');
setOutput('findings-count', 0);
setOutput('critical-count', 0);
setOutput('high-count', 0);
setOutput('summary', 'No schedule triggers configured');
return;
}

// Get repo info from environment
const githubRepository = process.env['GITHUB_REPOSITORY'];
if (!githubRepository) {
setFailed('GITHUB_REPOSITORY environment variable not set');
}
const [owner, repo] = githubRepository.split('/');
if (!owner || !repo) {
setFailed('Invalid GITHUB_REPOSITORY format');
}

const headSha = process.env['GITHUB_SHA'] ?? '';
if (!headSha) {
setFailed('GITHUB_SHA environment variable not set');
}

const defaultBranch = await getDefaultBranchFromAPI(octokit, owner, repo);

logGroup('Processing schedule triggers');
for (const trigger of scheduleTriggers) {
console.log(`- ${trigger.name}: ${trigger.skill}`);
}
logGroupEnd();

const allReports: SkillReport[] = [];
let totalFindings = 0;
const failureReasons: string[] = [];
let shouldFailAction = false;

// Process each schedule trigger
for (const trigger of scheduleTriggers) {
const resolved = resolveTrigger(trigger, config);
logGroup(`Running trigger: ${trigger.name} (skill: ${resolved.skill})`);

try {
// Build context from paths filter
const patterns = resolved.filters?.paths ?? ['**/*'];
const ignorePatterns = resolved.filters?.ignorePaths;

const context = await buildScheduleEventContext({
patterns,
ignorePatterns,
repoPath,
owner,
name: repo,
defaultBranch,
headSha,
});

// Skip if no matching files
if (!context.pullRequest?.files.length) {
console.log(`No files match trigger ${trigger.name}`);
logGroupEnd();
continue;
}

console.log(`Found ${context.pullRequest.files.length} files matching patterns`);

// Run skill
const skill = await resolveSkillAsync(resolved.skill, repoPath, config.skills);
const report = await runSkill(skill, context, {
apiKey: inputs.anthropicApiKey,
model: resolved.model,
});
console.log(`Found ${report.findings.length} findings`);

allReports.push(report);
totalFindings += report.findings.length;

// Create/update issue with findings
const scheduleConfig: Partial<ScheduleConfig> = trigger.schedule ?? {};
const issueTitle = scheduleConfig.issueTitle ?? `Warden: ${trigger.name}`;

const issueResult = await createOrUpdateIssue(octokit, owner, repo, [report], {
title: issueTitle,
labels: resolved.output?.labels,
commitSha: headSha,
});

if (issueResult) {
console.log(`${issueResult.created ? 'Created' : 'Updated'} issue #${issueResult.issueNumber}`);
console.log(`Issue URL: ${issueResult.issueUrl}`);
}

// Create fix PR if enabled and there are fixable findings
if (scheduleConfig.createFixPR) {
const fixResult = await createFixPR(octokit, owner, repo, report.findings, {
branchPrefix: scheduleConfig.fixBranchPrefix ?? 'warden-fix',
baseBranch: defaultBranch,
baseSha: headSha,
repoPath,
triggerName: trigger.name,
});

if (fixResult) {
console.log(`Created fix PR #${fixResult.prNumber} with ${fixResult.fixCount} fixes`);
console.log(`PR URL: ${fixResult.prUrl}`);
}
}

// Check failure condition
const failOn = resolved.output?.failOn ?? inputs.failOn;
if (failOn && shouldFail(report, failOn)) {
shouldFailAction = true;
const count = countFindingsAtOrAbove(report, failOn);
failureReasons.push(`${trigger.name}: Found ${count} ${failOn}+ severity issues`);
}

logGroupEnd();
} catch (error) {
console.error(`::warning::Trigger ${trigger.name} failed: ${error}`);
logGroupEnd();
}
}

// Set outputs
const criticalCount = countSeverity(allReports, 'critical');
const highCount = countSeverity(allReports, 'high');

setOutput('findings-count', totalFindings);
setOutput('critical-count', criticalCount);
setOutput('high-count', highCount);
setOutput('summary', allReports.map((r) => r.summary).join('\n') || 'Scheduled analysis complete');

if (shouldFailAction) {
setFailed(failureReasons.join('; '));
}

console.log(`\nScheduled analysis complete: ${totalFindings} total findings`);
}

async function run(): Promise<void> {
const inputs = getInputs();

Expand All @@ -151,6 +322,11 @@ async function run(): Promise<void> {

const octokit = new Octokit({ auth: inputs.githubToken });

// Route schedule events to dedicated handler
if (eventName === 'schedule' || eventName === 'workflow_dispatch') {
return runScheduledAnalysis(octokit, inputs, repoPath);
}

let eventPayload: unknown;
try {
eventPayload = JSON.parse(readFileSync(eventPath, 'utf-8'));
Expand Down
44 changes: 41 additions & 3 deletions src/config/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,17 +47,55 @@ export const OutputConfigSchema = z.object({
});
export type OutputConfig = z.infer<typeof OutputConfigSchema>;

// Schedule-specific configuration
export const ScheduleConfigSchema = z.object({
/** Title for the tracking issue (default: "Warden: {triggerName}") */
issueTitle: z.string().optional(),
/** Create PR with fixes when suggestedFix is available */
createFixPR: z.boolean().default(false),
/** Branch prefix for fix PRs (default: "warden-fix") */
fixBranchPrefix: z.string().default('warden-fix'),
});
export type ScheduleConfig = z.infer<typeof ScheduleConfigSchema>;

// Trigger definition
export const TriggerSchema = z.object({
name: z.string().min(1),
event: z.enum(['pull_request', 'issues', 'issue_comment']),
actions: z.array(z.string()).min(1),
event: z.enum(['pull_request', 'issues', 'issue_comment', 'schedule']),
/** Actions to trigger on. Required for all events except 'schedule'. */
actions: z.array(z.string()).min(1).optional(),
skill: z.string().min(1),
filters: PathFilterSchema.optional(),
output: OutputConfigSchema.optional(),
/** Model to use for this trigger (e.g., 'claude-sonnet-4-20250514'). Uses SDK default if not specified. */
model: z.string().optional(),
});
/** Schedule-specific configuration. Only used when event is 'schedule'. */
schedule: ScheduleConfigSchema.optional(),
}).refine(
(data) => {
// actions is required unless event is 'schedule'
if (data.event !== 'schedule') {
return data.actions !== undefined && data.actions.length > 0;
}
return true;
},
{
message: "actions is required for non-schedule events",
path: ["actions"],
}
).refine(
(data) => {
// paths filter is required for schedule events
if (data.event === 'schedule') {
return data.filters?.paths !== undefined && data.filters.paths.length > 0;
}
return true;
},
{
message: "filters.paths is required for schedule events",
path: ["filters", "paths"],
}
);
export type Trigger = z.infer<typeof TriggerSchema>;

// Runner configuration
Expand Down
1 change: 1 addition & 0 deletions src/event/index.ts
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
export * from './context.js';
export * from './schedule-context.js';
101 changes: 101 additions & 0 deletions src/event/schedule-context.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import type { EventContext, FileChange } from '../types/index.js';
import { expandAndCreateFileChanges } from '../cli/files.js';
import { matchGlob } from '../triggers/matcher.js';

export interface ScheduleContextOptions {
/** Glob patterns from trigger's filters.paths */
patterns: string[];
/** Glob patterns from trigger's filters.ignorePaths */
ignorePatterns?: string[];
/** Repository root path (GITHUB_WORKSPACE) */
repoPath: string;
/** Repository owner (from GITHUB_REPOSITORY) */
owner: string;
/** Repository name */
name: string;
/** Default branch name */
defaultBranch: string;
/** Current commit SHA */
headSha: string;
}

/**
* Build an EventContext for scheduled runs.
*
* Creates a synthetic pullRequest context from file globs using real repo info.
* The runner processes this normally because the files have patch data.
*/
export async function buildScheduleEventContext(
options: ScheduleContextOptions
): Promise<EventContext> {
const {
patterns,
ignorePatterns,
repoPath,
owner,
name,
defaultBranch,
headSha,
} = options;

// Expand glob patterns and create FileChange objects with full content as patch
let fileChanges = await expandAndCreateFileChanges(patterns, repoPath);

// Filter out ignored patterns
if (ignorePatterns && ignorePatterns.length > 0) {
fileChanges = fileChanges.filter((file) => {
const isIgnored = ignorePatterns.some((pattern) =>
matchGlob(pattern, file.filename)
);
return !isIgnored;
});
}

return {
eventType: 'schedule',
action: 'scheduled',
repository: {
owner,
name,
fullName: `${owner}/${name}`,
defaultBranch,
},
// Synthetic pullRequest context for runner compatibility
pullRequest: {
number: 0, // No actual PR
title: 'Scheduled Analysis',
body: null,
author: 'warden',
baseBranch: defaultBranch,
headBranch: defaultBranch,
headSha,
files: fileChanges,
},
repoPath,
};
}

/**
* Filter file changes to only include files matching the given patterns.
* Used when a schedule trigger has specific path filters.
*/
export function filterFilesByPatterns(
files: FileChange[],
patterns: string[],
ignorePatterns?: string[]
): FileChange[] {
let filtered = files.filter((file) =>
patterns.some((pattern) => matchGlob(pattern, file.filename))
);

if (ignorePatterns && ignorePatterns.length > 0) {
filtered = filtered.filter((file) => {
const isIgnored = ignorePatterns.some((pattern) =>
matchGlob(pattern, file.filename)
);
return !isIgnored;
});
}

return filtered;
}
Loading