Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions pkg/cli/compile_stats.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,14 @@ import (
"sort"

"github.com/githubnext/gh-aw/pkg/console"
"github.com/githubnext/gh-aw/pkg/logger"
"github.com/githubnext/gh-aw/pkg/styles"
"github.com/githubnext/gh-aw/pkg/tty"
"gopkg.in/yaml.v3"
)

var compileStatsLog = logger.New("cli:compile_stats")

// WorkflowStats holds statistics about a compiled workflow
type WorkflowStats struct {
Workflow string
Expand All @@ -26,9 +29,11 @@ type WorkflowStats struct {

// collectWorkflowStats parses a lock file and collects statistics
func collectWorkflowStats(lockFilePath string) (*WorkflowStats, error) {
compileStatsLog.Printf("Collecting workflow stats: file=%s", lockFilePath)
// Get file size
fileInfo, err := os.Stat(lockFilePath)
if err != nil {
compileStatsLog.Printf("Failed to stat file: %v", err)
return nil, fmt.Errorf("failed to stat file: %w", err)
}

Expand All @@ -51,6 +56,7 @@ func collectWorkflowStats(lockFilePath string) (*WorkflowStats, error) {
// Count jobs and steps
if jobs, ok := workflowYAML["jobs"].(map[string]any); ok {
stats.Jobs = len(jobs)
compileStatsLog.Printf("Workflow has %d jobs", stats.Jobs)

// Iterate through jobs to count steps and scripts
for _, jobData := range jobs {
Expand Down Expand Up @@ -79,11 +85,14 @@ func collectWorkflowStats(lockFilePath string) (*WorkflowStats, error) {
}
}

compileStatsLog.Printf("Stats collected: jobs=%d, steps=%d, scripts=%d, size=%d bytes",
stats.Jobs, stats.Steps, stats.ScriptCount, stats.FileSize)
return stats, nil
}

// displayStatsTable displays workflow statistics in a sorted table
func displayStatsTable(statsList []*WorkflowStats) {
compileStatsLog.Printf("Displaying stats table: workflow_count=%d", len(statsList))
if len(statsList) == 0 {
fmt.Fprintln(os.Stderr, console.FormatWarningMessage("No workflow statistics to display"))
return
Expand Down
4 changes: 4 additions & 0 deletions pkg/parser/content_extractor.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,10 @@ import (

// extractToolsFromContent extracts tools and mcp-servers sections from frontmatter as JSON string
func extractToolsFromContent(content string) (string, error) {
log.Printf("Extracting tools from content: size=%d bytes", len(content))
result, err := ExtractFrontmatterFromContent(content)
if err != nil {
log.Printf("Failed to extract frontmatter: %v", err)
return "{}", nil // Return empty object on error to match bash behavior
}

Expand All @@ -36,9 +38,11 @@ func extractToolsFromContent(content string) (string, error) {

// If nothing was extracted, return empty object
if len(extracted) == 0 {
log.Print("No tools or mcp-servers found in content")
return "{}", nil
}

log.Printf("Extracted %d tool/server configurations", len(extracted))
// Convert to JSON string
extractedJSON, err := json.Marshal(extracted)
if err != nil {
Expand Down
3 changes: 3 additions & 0 deletions pkg/parser/include_expander.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,13 @@ func ExpandIncludes(content, baseDir string, extractTools bool) (string, error)

// ExpandIncludesWithManifest recursively expands @include and @import directives and returns list of included files
func ExpandIncludesWithManifest(content, baseDir string, extractTools bool) (string, []string, error) {
log.Printf("Expanding includes: baseDir=%s, extractTools=%t, content_size=%d", baseDir, extractTools, len(content))
const maxDepth = 10
currentContent := content
visited := make(map[string]bool)

for depth := 0; depth < maxDepth; depth++ {
log.Printf("Include expansion depth: %d", depth)
// Process includes in current content
processedContent, err := processIncludesWithVisited(currentContent, baseDir, extractTools, visited)
if err != nil {
Expand Down Expand Up @@ -59,6 +61,7 @@ func ExpandIncludesWithManifest(content, baseDir string, extractTools bool) (str
}
}

log.Printf("Include expansion complete: visited_files=%d", len(includedFiles))
if extractTools {
// For tools mode, merge all extracted JSON objects
mergedTools, err := mergeToolsFromJSON(currentContent)
Expand Down
4 changes: 4 additions & 0 deletions pkg/parser/include_processor.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
// ProcessIncludes processes @include, @import (deprecated), and {{#import: directives in markdown content
// This matches the bash process_includes function behavior
func ProcessIncludes(content, baseDir string, extractTools bool) (string, error) {
log.Printf("Processing includes: baseDir=%s, extractTools=%t, content_size=%d", baseDir, extractTools, len(content))
visited := make(map[string]bool)
return processIncludesWithVisited(content, baseDir, extractTools, visited)
}
Expand Down Expand Up @@ -60,6 +61,7 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi
// Resolve file path first to get the canonical path
fullPath, err := ResolveIncludePath(filePath, baseDir, nil)
if err != nil {
log.Printf("Failed to resolve include path '%s': %v", filePath, err)
if isOptional {
// For optional includes, show a friendly informational message to stdout
if !extractTools {
Expand All @@ -73,13 +75,15 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi

// Check for repeated imports using the resolved full path
if visited[fullPath] {
log.Printf("Skipping already included file: %s", fullPath)
if !extractTools {
fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Already included: %s, skipping", filePath)))
}
continue
}

// Mark as visited using the resolved full path
log.Printf("Processing include file: %s", fullPath)
visited[fullPath] = true

// Process the included file
Expand Down
4 changes: 4 additions & 0 deletions pkg/parser/tools_merger.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (

// mergeToolsFromJSON merges multiple JSON tool objects from content
func mergeToolsFromJSON(content string) (string, error) {
log.Printf("Merging tools from JSON: content_size=%d bytes", len(content))
// Clean up the content first
content = strings.TrimSpace(content)

Expand Down Expand Up @@ -44,9 +45,11 @@ func mergeToolsFromJSON(content string) (string, error) {

// If no valid objects found, return empty
if len(jsonObjects) == 0 {
log.Print("No valid JSON objects found in content, returning empty object")
return "{}", nil
}

log.Printf("Found %d JSON objects to merge", len(jsonObjects))
// Merge all objects
merged := make(map[string]any)
for _, obj := range jsonObjects {
Expand All @@ -70,6 +73,7 @@ func mergeToolsFromJSON(content string) (string, error) {
// Only supports merging arrays and maps for neutral tools (bash, web-fetch, web-search, edit, mcp-*).
// Removes all legacy Claude tool merging logic.
func MergeTools(base, additional map[string]any) (map[string]any, error) {
log.Printf("Merging tools: base_keys=%d, additional_keys=%d", len(base), len(additional))
result := make(map[string]any)

// Copy base
Expand Down