diff --git a/pkg/cli/audit_report_render.go b/pkg/cli/audit_report_render.go index c0ca595dbe..2d07975897 100644 --- a/pkg/cli/audit_report_render.go +++ b/pkg/cli/audit_report_render.go @@ -31,6 +31,7 @@ func renderConsole(data AuditData, logsPath string) { // Key Findings Section - NEW if len(data.KeyFindings) > 0 { + auditReportLog.Printf("Rendering %d key findings", len(data.KeyFindings)) fmt.Fprintln(os.Stderr, console.FormatSectionHeader("Key Findings")) fmt.Fprintln(os.Stderr) renderKeyFindings(data.KeyFindings) @@ -38,6 +39,7 @@ func renderConsole(data AuditData, logsPath string) { // Recommendations Section - NEW if len(data.Recommendations) > 0 { + auditReportLog.Printf("Rendering %d recommendations", len(data.Recommendations)) fmt.Fprintln(os.Stderr, console.FormatSectionHeader("Recommendations")) fmt.Fprintln(os.Stderr) renderRecommendations(data.Recommendations) @@ -45,6 +47,7 @@ func renderConsole(data AuditData, logsPath string) { // Failure Analysis Section - NEW if data.FailureAnalysis != nil { + auditReportLog.Print("Rendering failure analysis") fmt.Fprintln(os.Stderr, console.FormatSectionHeader("Failure Analysis")) fmt.Fprintln(os.Stderr) renderFailureAnalysis(data.FailureAnalysis) @@ -64,6 +67,7 @@ func renderConsole(data AuditData, logsPath string) { // Jobs Section - use new table rendering if len(data.Jobs) > 0 { + auditReportLog.Printf("Rendering jobs table with %d jobs", len(data.Jobs)) fmt.Fprintln(os.Stderr, console.FormatSectionHeader("Jobs")) fmt.Fprintln(os.Stderr) renderJobsTable(data.Jobs) diff --git a/pkg/cli/update_workflows.go b/pkg/cli/update_workflows.go index 26bafbde08..cde48d5be7 100644 --- a/pkg/cli/update_workflows.go +++ b/pkg/cli/update_workflows.go @@ -43,13 +43,16 @@ func UpdateWorkflows(workflowNames []string, allowMajor, force, verbose bool, en // Update each workflow for _, wf := range workflows { + updateLog.Printf("Updating workflow: %s (source: %s)", wf.Name, wf.SourceSpec) if err := updateWorkflow(wf, allowMajor, force, verbose, engineOverride, noStopAfter, stopAfter, merge); err != nil { + updateLog.Printf("Failed to update workflow %s: %v", wf.Name, err) failedUpdates = append(failedUpdates, updateFailure{ Name: wf.Name, Error: err.Error(), }) continue } + updateLog.Printf("Successfully updated workflow: %s", wf.Name) successfulUpdates = append(successfulUpdates, wf.Name) } @@ -65,6 +68,7 @@ func UpdateWorkflows(workflowNames []string, allowMajor, force, verbose bool, en // findWorkflowsWithSource finds all workflows that have a source field func findWorkflowsWithSource(workflowsDir string, filterNames []string, verbose bool) ([]*workflowWithSource, error) { + updateLog.Printf("Finding workflows with source field in %s", workflowsDir) var workflows []*workflowWithSource // Read all .md files in workflows directory @@ -72,6 +76,7 @@ func findWorkflowsWithSource(workflowsDir string, filterNames []string, verbose if err != nil { return nil, fmt.Errorf("failed to read workflows directory: %w", err) } + updateLog.Printf("Found %d entries in workflows directory", len(entries)) for _, entry := range entries { if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".md") { diff --git a/pkg/parser/include_processor.go b/pkg/parser/include_processor.go index 23797be1a1..ac39e106df 100644 --- a/pkg/parser/include_processor.go +++ b/pkg/parser/include_processor.go @@ -10,12 +10,15 @@ import ( "strings" "github.com/github/gh-aw/pkg/console" + "github.com/github/gh-aw/pkg/logger" ) +var includeLog = logger.New("parser:include_processor") + // ProcessIncludes processes @include, @import (deprecated), and {{#import: directives in markdown content // This matches the bash process_includes function behavior func ProcessIncludes(content, baseDir string, extractTools bool) (string, error) { - log.Printf("Processing includes: baseDir=%s, extractTools=%t, content_size=%d", baseDir, extractTools, len(content)) + includeLog.Printf("Processing includes: baseDir=%s, extractTools=%t, content_size=%d", baseDir, extractTools, len(content)) visited := make(map[string]bool) return processIncludesWithVisited(content, baseDir, extractTools, visited) } @@ -61,7 +64,7 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi // Resolve file path first to get the canonical path fullPath, err := ResolveIncludePath(filePath, baseDir, nil) if err != nil { - log.Printf("Failed to resolve include path '%s': %v", filePath, err) + includeLog.Printf("Failed to resolve include path '%s': %v", filePath, err) if isOptional { // For optional includes, show a friendly informational message to stdout if !extractTools { @@ -75,7 +78,7 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi // Check for repeated imports using the resolved full path if visited[fullPath] { - log.Printf("Skipping already included file: %s", fullPath) + includeLog.Printf("Skipping already included file: %s", fullPath) if !extractTools { fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Already included: %s, skipping", filePath))) } @@ -83,7 +86,7 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi } // Mark as visited using the resolved full path - log.Printf("Processing include file: %s", fullPath) + includeLog.Printf("Processing include file: %s", fullPath) visited[fullPath] = true // Process the included file @@ -113,10 +116,12 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi // processIncludedFile processes a single included file, optionally extracting a section // processIncludedFileWithVisited processes a single included file with cycle detection for nested includes func processIncludedFileWithVisited(filePath, sectionName string, extractTools bool, visited map[string]bool) (string, error) { + includeLog.Printf("Reading included file: %s (extractTools=%t, section=%s)", filePath, extractTools, sectionName) content, err := os.ReadFile(filePath) if err != nil { return "", fmt.Errorf("failed to read included file %s: %w", filePath, err) } + includeLog.Printf("Read %d bytes from included file: %s", len(content), filePath) // Validate included file frontmatter based on file location result, err := ExtractFrontmatterFromContent(string(content)) @@ -140,8 +145,10 @@ func processIncludedFileWithVisited(filePath, sectionName string, extractTools b if validationErr != nil { if isWorkflowFile { // For workflow files, strict validation must pass + includeLog.Printf("Validation failed for workflow file %s: %v", filePath, validationErr) return "", fmt.Errorf("invalid frontmatter in included file %s: %w", filePath, validationErr) } else { + includeLog.Printf("Validation failed for non-workflow file %s, applying relaxed validation", filePath) // For non-workflow files, fall back to relaxed validation with warnings if len(result.Frontmatter) > 0 { // Valid fields for non-workflow frontmatter (fields that are allowed in shared workflows) diff --git a/pkg/workflow/compiler_yaml_main_job.go b/pkg/workflow/compiler_yaml_main_job.go index a35f230872..c47673c56b 100644 --- a/pkg/workflow/compiler_yaml_main_job.go +++ b/pkg/workflow/compiler_yaml_main_job.go @@ -167,12 +167,15 @@ func (c *Compiler) generateMainJobSteps(yaml *strings.Builder, data *WorkflowDat } // Add cache steps if cache configuration is present + compilerYamlLog.Printf("Generating cache steps for workflow") generateCacheSteps(yaml, data, c.verbose) // Add cache-memory steps if cache-memory configuration is present + compilerYamlLog.Printf("Generating cache-memory steps for workflow") generateCacheMemorySteps(yaml, data) // Add repo-memory clone steps if repo-memory configuration is present + compilerYamlLog.Printf("Generating repo-memory steps for workflow") generateRepoMemorySteps(yaml, data) // Configure git credentials for agentic workflows @@ -240,9 +243,11 @@ func (c *Compiler) generateMainJobSteps(yaml *strings.Builder, data *WorkflowDat } // Add AI execution step using the agentic engine + compilerYamlLog.Printf("Generating engine execution steps for %s", engine.GetID()) c.generateEngineExecutionSteps(yaml, data, engine, logFileFull) // Mark that we've completed agent execution - step order validation starts from here + compilerYamlLog.Print("Marking agent execution as complete for step order tracking") c.stepOrderTracker.MarkAgentExecutionComplete() // Regenerate git credentials after agent execution diff --git a/pkg/workflow/safe_outputs_config_generation.go b/pkg/workflow/safe_outputs_config_generation.go index 289b1dfcf7..2d4a306c5d 100644 --- a/pkg/workflow/safe_outputs_config_generation.go +++ b/pkg/workflow/safe_outputs_config_generation.go @@ -58,6 +58,7 @@ func populateDispatchWorkflowFiles(data *WorkflowData, markdownPath string) { func generateSafeOutputsConfig(data *WorkflowData) string { // Pass the safe-outputs configuration for validation if data.SafeOutputs == nil { + safeOutputsConfigLog.Print("No safe outputs configuration found, returning empty config") return "" } safeOutputsConfigLog.Print("Generating safe outputs configuration for workflow") @@ -341,7 +342,9 @@ func generateSafeOutputsConfig(data *WorkflowData) string { // Add safe-jobs configuration from SafeOutputs.Jobs if len(data.SafeOutputs.Jobs) > 0 { + safeOutputsConfigLog.Printf("Processing %d safe job configurations", len(data.SafeOutputs.Jobs)) for jobName, jobConfig := range data.SafeOutputs.Jobs { + safeOutputsConfigLog.Printf("Generating config for safe job: %s", jobName) safeJobConfig := map[string]any{} // Add description if present @@ -441,6 +444,7 @@ func generateSafeOutputsConfig(data *WorkflowData) string { } configJSON, _ := json.Marshal(safeOutputsConfig) + safeOutputsConfigLog.Printf("Safe outputs config generation complete: %d tool types configured", len(safeOutputsConfig)) return string(configJSON) }