diff --git a/pkg/cli/compile_stats.go b/pkg/cli/compile_stats.go index 5db2fe4d7d..afa67e9359 100644 --- a/pkg/cli/compile_stats.go +++ b/pkg/cli/compile_stats.go @@ -7,11 +7,14 @@ import ( "sort" "github.com/githubnext/gh-aw/pkg/console" + "github.com/githubnext/gh-aw/pkg/logger" "github.com/githubnext/gh-aw/pkg/styles" "github.com/githubnext/gh-aw/pkg/tty" "gopkg.in/yaml.v3" ) +var compileStatsLog = logger.New("cli:compile_stats") + // WorkflowStats holds statistics about a compiled workflow type WorkflowStats struct { Workflow string @@ -26,9 +29,11 @@ type WorkflowStats struct { // collectWorkflowStats parses a lock file and collects statistics func collectWorkflowStats(lockFilePath string) (*WorkflowStats, error) { + compileStatsLog.Printf("Collecting workflow stats: file=%s", lockFilePath) // Get file size fileInfo, err := os.Stat(lockFilePath) if err != nil { + compileStatsLog.Printf("Failed to stat file: %v", err) return nil, fmt.Errorf("failed to stat file: %w", err) } @@ -51,6 +56,7 @@ func collectWorkflowStats(lockFilePath string) (*WorkflowStats, error) { // Count jobs and steps if jobs, ok := workflowYAML["jobs"].(map[string]any); ok { stats.Jobs = len(jobs) + compileStatsLog.Printf("Workflow has %d jobs", stats.Jobs) // Iterate through jobs to count steps and scripts for _, jobData := range jobs { @@ -79,11 +85,14 @@ func collectWorkflowStats(lockFilePath string) (*WorkflowStats, error) { } } + compileStatsLog.Printf("Stats collected: jobs=%d, steps=%d, scripts=%d, size=%d bytes", + stats.Jobs, stats.Steps, stats.ScriptCount, stats.FileSize) return stats, nil } // displayStatsTable displays workflow statistics in a sorted table func displayStatsTable(statsList []*WorkflowStats) { + compileStatsLog.Printf("Displaying stats table: workflow_count=%d", len(statsList)) if len(statsList) == 0 { fmt.Fprintln(os.Stderr, console.FormatWarningMessage("No workflow statistics to display")) return diff --git a/pkg/parser/content_extractor.go b/pkg/parser/content_extractor.go index 27ea563acf..48b58395e3 100644 --- a/pkg/parser/content_extractor.go +++ b/pkg/parser/content_extractor.go @@ -9,8 +9,10 @@ import ( // extractToolsFromContent extracts tools and mcp-servers sections from frontmatter as JSON string func extractToolsFromContent(content string) (string, error) { + log.Printf("Extracting tools from content: size=%d bytes", len(content)) result, err := ExtractFrontmatterFromContent(content) if err != nil { + log.Printf("Failed to extract frontmatter: %v", err) return "{}", nil // Return empty object on error to match bash behavior } @@ -36,9 +38,11 @@ func extractToolsFromContent(content string) (string, error) { // If nothing was extracted, return empty object if len(extracted) == 0 { + log.Print("No tools or mcp-servers found in content") return "{}", nil } + log.Printf("Extracted %d tool/server configurations", len(extracted)) // Convert to JSON string extractedJSON, err := json.Marshal(extracted) if err != nil { diff --git a/pkg/parser/include_expander.go b/pkg/parser/include_expander.go index 9850aba6d3..c90751224e 100644 --- a/pkg/parser/include_expander.go +++ b/pkg/parser/include_expander.go @@ -18,11 +18,13 @@ func ExpandIncludes(content, baseDir string, extractTools bool) (string, error) // ExpandIncludesWithManifest recursively expands @include and @import directives and returns list of included files func ExpandIncludesWithManifest(content, baseDir string, extractTools bool) (string, []string, error) { + log.Printf("Expanding includes: baseDir=%s, extractTools=%t, content_size=%d", baseDir, extractTools, len(content)) const maxDepth = 10 currentContent := content visited := make(map[string]bool) for depth := 0; depth < maxDepth; depth++ { + log.Printf("Include expansion depth: %d", depth) // Process includes in current content processedContent, err := processIncludesWithVisited(currentContent, baseDir, extractTools, visited) if err != nil { @@ -59,6 +61,7 @@ func ExpandIncludesWithManifest(content, baseDir string, extractTools bool) (str } } + log.Printf("Include expansion complete: visited_files=%d", len(includedFiles)) if extractTools { // For tools mode, merge all extracted JSON objects mergedTools, err := mergeToolsFromJSON(currentContent) diff --git a/pkg/parser/include_processor.go b/pkg/parser/include_processor.go index 971234bb31..e69113edad 100644 --- a/pkg/parser/include_processor.go +++ b/pkg/parser/include_processor.go @@ -15,6 +15,7 @@ import ( // ProcessIncludes processes @include, @import (deprecated), and {{#import: directives in markdown content // This matches the bash process_includes function behavior func ProcessIncludes(content, baseDir string, extractTools bool) (string, error) { + log.Printf("Processing includes: baseDir=%s, extractTools=%t, content_size=%d", baseDir, extractTools, len(content)) visited := make(map[string]bool) return processIncludesWithVisited(content, baseDir, extractTools, visited) } @@ -60,6 +61,7 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi // Resolve file path first to get the canonical path fullPath, err := ResolveIncludePath(filePath, baseDir, nil) if err != nil { + log.Printf("Failed to resolve include path '%s': %v", filePath, err) if isOptional { // For optional includes, show a friendly informational message to stdout if !extractTools { @@ -73,6 +75,7 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi // Check for repeated imports using the resolved full path if visited[fullPath] { + log.Printf("Skipping already included file: %s", fullPath) if !extractTools { fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Already included: %s, skipping", filePath))) } @@ -80,6 +83,7 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi } // Mark as visited using the resolved full path + log.Printf("Processing include file: %s", fullPath) visited[fullPath] = true // Process the included file diff --git a/pkg/parser/tools_merger.go b/pkg/parser/tools_merger.go index 21aa6f5b16..6b8bb7b586 100644 --- a/pkg/parser/tools_merger.go +++ b/pkg/parser/tools_merger.go @@ -9,6 +9,7 @@ import ( // mergeToolsFromJSON merges multiple JSON tool objects from content func mergeToolsFromJSON(content string) (string, error) { + log.Printf("Merging tools from JSON: content_size=%d bytes", len(content)) // Clean up the content first content = strings.TrimSpace(content) @@ -44,9 +45,11 @@ func mergeToolsFromJSON(content string) (string, error) { // If no valid objects found, return empty if len(jsonObjects) == 0 { + log.Print("No valid JSON objects found in content, returning empty object") return "{}", nil } + log.Printf("Found %d JSON objects to merge", len(jsonObjects)) // Merge all objects merged := make(map[string]any) for _, obj := range jsonObjects { @@ -70,6 +73,7 @@ func mergeToolsFromJSON(content string) (string, error) { // Only supports merging arrays and maps for neutral tools (bash, web-fetch, web-search, edit, mcp-*). // Removes all legacy Claude tool merging logic. func MergeTools(base, additional map[string]any) (map[string]any, error) { + log.Printf("Merging tools: base_keys=%d, additional_keys=%d", len(base), len(additional)) result := make(map[string]any) // Copy base