Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 36 additions & 1 deletion pkg/cli/logs_command.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,39 @@ Downloaded artifacts include:
- agent-stdio.log: Agent standard output/error logs
- aw.patch: Git patch of changes made during execution
- workflow-logs/: GitHub Actions workflow run logs (job logs organized in subdirectory)
- summary.json: Complete metrics and run data for all downloaded runs

Campaign Orchestrator Usage:
In a campaign orchestrator workflow, use this command in a pre-step to download logs,
then access the data in subsequent steps without needing GitHub CLI access:

steps:
- name: Download logs from last 30 days
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
mkdir -p /tmp/portfolio-logs
gh aw logs <worker> --start-date -1mo -o /tmp/portfolio-logs

In your analysis step, reference the pre-downloaded data:

**All workflow execution data has been pre-downloaded for you in the previous workflow step.**

- **JSON Summary**: /tmp/portfolio-logs/summary.json - Contains all metrics and run data you need
- **Run Logs**: /tmp/portfolio-logs/run-{database-id}/ - Individual run logs (if needed for detailed analysis)

**DO NOT call 'gh aw logs' or any GitHub CLI commands** - they will not work in your environment.
All data you need is in the summary.json file.

Live Tracking with Project Boards:
Use the summary.json data to update your campaign project board, treating issues/PRs (workers)
on the board as the real-time view of progress, ownership, and status. The orchestrator workflow
can use the 'update-project' safe output to sync status fields without modifying worker workflow
files. Workers remain unchanged while the campaign board reflects current execution state.

For incremental updates, pull data for each worker based on the last pull time using --start-date
(e.g., --start-date -1d for daily updates) and align with existing board items. Compare run data
from summary.json with board status to update only changed workers, preserving board state.

` + WorkflowIDExplanation + `

Expand Down Expand Up @@ -123,6 +156,7 @@ Examples:
timeout, _ := cmd.Flags().GetInt("timeout")
repoOverride, _ := cmd.Flags().GetString("repo")
campaignOnly, _ := cmd.Flags().GetBool("campaign")
summaryFile, _ := cmd.Flags().GetString("summary-file")

// Resolve relative dates to absolute dates for GitHub CLI
now := time.Now()
Expand Down Expand Up @@ -150,7 +184,7 @@ Examples:
}
}

return DownloadWorkflowLogs(workflowName, count, startDate, endDate, outputDir, engine, ref, beforeRunID, afterRunID, repoOverride, verbose, toolGraph, noStaged, firewallOnly, noFirewall, parse, jsonOutput, timeout, campaignOnly)
return DownloadWorkflowLogs(workflowName, count, startDate, endDate, outputDir, engine, ref, beforeRunID, afterRunID, repoOverride, verbose, toolGraph, noStaged, firewallOnly, noFirewall, parse, jsonOutput, timeout, campaignOnly, summaryFile)
},
}

Expand All @@ -172,6 +206,7 @@ Examples:
logsCmd.Flags().Bool("parse", false, "Run JavaScript parsers on agent logs and firewall logs, writing Markdown to log.md and firewall.md")
addJSONFlag(logsCmd)
logsCmd.Flags().Int("timeout", 0, "Download timeout in seconds (0 = no timeout)")
logsCmd.Flags().String("summary-file", "summary.json", "Path to write the summary JSON file relative to output directory (use empty string to disable)")
logsCmd.MarkFlagsMutuallyExclusive("firewall", "no-firewall")

// Register completions for logs command
Expand Down
12 changes: 10 additions & 2 deletions pkg/cli/logs_orchestrator.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ import (
var logsOrchestratorLog = logger.New("cli:logs_orchestrator")

// DownloadWorkflowLogs downloads and analyzes workflow logs with metrics
func DownloadWorkflowLogs(workflowName string, count int, startDate, endDate, outputDir, engine, ref string, beforeRunID, afterRunID int64, repoOverride string, verbose bool, toolGraph bool, noStaged bool, firewallOnly bool, noFirewall bool, parse bool, jsonOutput bool, timeout int, campaignOnly bool) error {
logsOrchestratorLog.Printf("Starting workflow log download: workflow=%s, count=%d, startDate=%s, endDate=%s, outputDir=%s, campaignOnly=%v", workflowName, count, startDate, endDate, outputDir, campaignOnly)
func DownloadWorkflowLogs(workflowName string, count int, startDate, endDate, outputDir, engine, ref string, beforeRunID, afterRunID int64, repoOverride string, verbose bool, toolGraph bool, noStaged bool, firewallOnly bool, noFirewall bool, parse bool, jsonOutput bool, timeout int, campaignOnly bool, summaryFile string) error {
logsOrchestratorLog.Printf("Starting workflow log download: workflow=%s, count=%d, startDate=%s, endDate=%s, outputDir=%s, campaignOnly=%v, summaryFile=%s", workflowName, count, startDate, endDate, outputDir, campaignOnly, summaryFile)
if verbose {
fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Fetching workflow runs from GitHub Actions..."))
}
Expand Down Expand Up @@ -418,6 +418,14 @@ func DownloadWorkflowLogs(workflowName string, count int, startDate, endDate, ou
// Build structured logs data
logsData := buildLogsData(processedRuns, outputDir, continuation)

// Write summary file if requested (default behavior unless disabled with empty string)
if summaryFile != "" {
summaryPath := filepath.Join(outputDir, summaryFile)
if err := writeSummaryFile(summaryPath, logsData, verbose); err != nil {
return fmt.Errorf("failed to write summary file: %w", err)
}
}

// Render output based on format preference
if jsonOutput {
if err := renderLogsJSON(logsData); err != nil {
Expand Down
40 changes: 40 additions & 0 deletions pkg/cli/logs_report.go
Original file line number Diff line number Diff line change
Expand Up @@ -730,6 +730,46 @@ func renderLogsJSON(data LogsData) error {
return encoder.Encode(data)
}

// writeSummaryFile writes the logs data to a JSON file
// This file contains complete metrics and run data for all downloaded workflow runs.
// It's primarily designed for campaign orchestrators to access workflow execution data
// in subsequent steps without needing GitHub CLI access.
//
// The summary file includes:
// - Aggregate metrics (total runs, tokens, costs, errors, warnings)
// - Individual run details with metrics and metadata
// - Tool usage statistics
// - Error and warning summaries
// - Network access logs (if available)
// - Firewall logs (if available)
func writeSummaryFile(path string, data LogsData, verbose bool) error {
reportLog.Printf("Writing summary file: path=%s, runs=%d", path, data.Summary.TotalRuns)

// Create parent directory if it doesn't exist
dir := filepath.Dir(path)
if err := os.MkdirAll(dir, 0755); err != nil {
return fmt.Errorf("failed to create directory for summary file: %w", err)
}

// Marshal to JSON with indentation for readability
jsonData, err := json.MarshalIndent(data, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal logs data to JSON: %w", err)
}

// Write to file
if err := os.WriteFile(path, jsonData, 0644); err != nil {
return fmt.Errorf("failed to write summary file: %w", err)
}

if verbose {
fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Wrote summary to %s", path)))
}

reportLog.Printf("Successfully wrote summary file: %s", path)
return nil
}

// renderLogsConsole outputs the logs data as formatted console output
func renderLogsConsole(data LogsData) {
reportLog.Printf("Rendering logs data to console: %d runs, %d errors, %d warnings",
Expand Down
137 changes: 137 additions & 0 deletions pkg/cli/logs_summary_file_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
package cli

import (
"encoding/json"
"os"
"path/filepath"
"testing"
"time"
)

// TestWriteSummaryFile tests the writeSummaryFile function
func TestWriteSummaryFile(t *testing.T) {
// Create a temporary directory for testing
tmpDir := t.TempDir()
summaryPath := filepath.Join(tmpDir, "test-summary.json")

// Create sample logs data
logsData := LogsData{
Summary: LogsSummary{
TotalRuns: 3,
TotalDuration: "1h30m",
TotalTokens: 15000,
TotalCost: 2.50,
TotalTurns: 25,
TotalErrors: 2,
TotalWarnings: 5,
TotalMissingTools: 1,
},
Runs: []RunData{
{
DatabaseID: 12345,
Number: 1,
WorkflowName: "Test Workflow",
Agent: "copilot",
Status: "completed",
Conclusion: "success",
Duration: "30m",
TokenUsage: 5000,
EstimatedCost: 0.75,
Turns: 10,
ErrorCount: 0,
WarningCount: 2,
MissingToolCount: 0,
CreatedAt: time.Now(),
URL: "https://github.com/owner/repo/actions/runs/12345",
LogsPath: "/tmp/logs/run-12345",
},
},
LogsLocation: tmpDir,
}

// Test writing summary file
err := writeSummaryFile(summaryPath, logsData, false)
if err != nil {
t.Fatalf("Failed to write summary file: %v", err)
}

// Verify file was created
if _, err := os.Stat(summaryPath); os.IsNotExist(err) {
t.Fatal("Summary file was not created")
}

// Read and verify the content
data, err := os.ReadFile(summaryPath)
if err != nil {
t.Fatalf("Failed to read summary file: %v", err)
}

// Parse the JSON to verify it's valid
var parsedData LogsData
if err := json.Unmarshal(data, &parsedData); err != nil {
t.Fatalf("Failed to parse summary JSON: %v", err)
}

// Verify key fields
if parsedData.Summary.TotalRuns != logsData.Summary.TotalRuns {
t.Errorf("Expected TotalRuns %d, got %d", logsData.Summary.TotalRuns, parsedData.Summary.TotalRuns)
}
if parsedData.Summary.TotalTokens != logsData.Summary.TotalTokens {
t.Errorf("Expected TotalTokens %d, got %d", logsData.Summary.TotalTokens, parsedData.Summary.TotalTokens)
}
if len(parsedData.Runs) != len(logsData.Runs) {
t.Errorf("Expected %d runs, got %d", len(logsData.Runs), len(parsedData.Runs))
}
if len(parsedData.Runs) > 0 {
if parsedData.Runs[0].DatabaseID != logsData.Runs[0].DatabaseID {
t.Errorf("Expected DatabaseID %d, got %d", logsData.Runs[0].DatabaseID, parsedData.Runs[0].DatabaseID)
}
}
}

// TestWriteSummaryFileCreatesDirectory tests that parent directory is created
func TestWriteSummaryFileCreatesDirectory(t *testing.T) {
// Create a temporary directory for testing
tmpDir := t.TempDir()
summaryPath := filepath.Join(tmpDir, "subdir", "nested", "summary.json")

// Create minimal logs data
logsData := LogsData{
Summary: LogsSummary{
TotalRuns: 1,
},
Runs: []RunData{},
LogsLocation: tmpDir,
}

// Test writing summary file (should create nested directories)
err := writeSummaryFile(summaryPath, logsData, false)
if err != nil {
t.Fatalf("Failed to write summary file: %v", err)
}

// Verify file was created
if _, err := os.Stat(summaryPath); os.IsNotExist(err) {
t.Fatal("Summary file was not created")
}

// Verify nested directories were created
dir := filepath.Dir(summaryPath)
if _, err := os.Stat(dir); os.IsNotExist(err) {
t.Fatal("Parent directories were not created")
}
}

// TestWriteSummaryFileWithEmptyPath tests that empty path skips writing
func TestSummaryFileDisabling(t *testing.T) {
// This test verifies the behavior when summaryFile is empty string
// The actual skip logic is in the orchestrator, but we document the behavior here

// Empty string path should be handled by the caller (orchestrator)
// to skip calling writeSummaryFile entirely
summaryFile := ""
if summaryFile == "" {
// This is the expected behavior - skip writing
t.Log("Empty summary file path correctly skips writing")
}
}
4 changes: 2 additions & 2 deletions pkg/cli/logs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func TestDownloadWorkflowLogs(t *testing.T) {
// Test the DownloadWorkflowLogs function
// This should either fail with auth error (if not authenticated)
// or succeed with no results (if authenticated but no workflows match)
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", "", "", 0, 0, "", false, false, false, false, false, false, false, 0, false)
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", "", "", 0, 0, "", false, false, false, false, false, false, false, 0, false, "summary.json")

// If GitHub CLI is authenticated, the function may succeed but find no results
// If not authenticated, it should return an auth error
Expand Down Expand Up @@ -917,7 +917,7 @@ func TestDownloadWorkflowLogsWithEngineFilter(t *testing.T) {
if !tt.expectError {
// For valid engines, test that the function can be called without panic
// It may still fail with auth errors, which is expected
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", tt.engine, "", 0, 0, "", false, false, false, false, false, false, false, 0, false)
err := DownloadWorkflowLogs("", 1, "", "", "./test-logs", tt.engine, "", 0, 0, "", false, false, false, false, false, false, false, 0, false, "summary.json")

// Clean up any created directories
os.RemoveAll("./test-logs")
Expand Down
Loading