From 205c6d62d5216dcb18f96731547f40f20dc9c6a1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 4 Jan 2026 05:56:14 +0000 Subject: [PATCH 1/3] Initial plan From 36a73e575539897bcfbc63032420cd69ff4bbe02 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 4 Jan 2026 06:03:34 +0000 Subject: [PATCH 2/3] Fix repo-memory path inconsistencies in meta-orchestrator workflows Co-authored-by: mnkiefer <8320933+mnkiefer@users.noreply.github.com> --- .github/workflows/campaign-manager.lock.yml | 10 +++++----- .github/workflows/campaign-manager.md | 10 +++++----- .github/workflows/ci-doctor.lock.yml | 16 +++------------- .../workflows/cli-consistency-checker.lock.yml | 12 +----------- .github/workflows/daily-news.lock.yml | 12 +----------- .github/workflows/dependabot-go-checker.lock.yml | 12 +----------- .github/workflows/firewall-escape.lock.yml | 12 +----------- .github/workflows/firewall.lock.yml | 16 +++------------- .../workflows/workflow-health-manager.lock.yml | 10 +++++----- .github/workflows/workflow-health-manager.md | 10 +++++----- 10 files changed, 30 insertions(+), 90 deletions(-) diff --git a/.github/workflows/campaign-manager.lock.yml b/.github/workflows/campaign-manager.lock.yml index df7264f555..a9468793c4 100644 --- a/.github/workflows/campaign-manager.lock.yml +++ b/.github/workflows/campaign-manager.lock.yml @@ -674,7 +674,7 @@ jobs: ### 3. Performance Monitoring **Aggregate metrics across campaigns:** - - Load shared metrics from: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/latest.json` + - Load shared metrics from: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Use workflow metrics for campaigns to assess: - Workflow success rates for campaign workflows - Safe output volume (issues, PRs created by campaign workflows) @@ -686,7 +686,7 @@ jobs: - Identify campaigns that are ahead, on track, or behind schedule **Trend analysis:** - - Load historical daily metrics from: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/daily/` + - Load historical daily metrics from: `/tmp/gh-aw/repo-memory/default/metrics/daily/` - Compare current metrics with historical data (7-day, 30-day trends) - Identify improving or degrading trends in workflow performance - Calculate velocity trends from safe output volume over time @@ -734,7 +734,7 @@ jobs: ## Shared Memory Integration - **Access shared repo memory at `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/`** + **Access shared repo memory at `/tmp/gh-aw/repo-memory/default/`** This workflow shares memory with other meta-orchestrators (Workflow Health Manager and Agent Performance Analyzer) to coordinate insights and avoid duplicate work. @@ -742,12 +742,12 @@ jobs: The Metrics Collector workflow runs daily and stores performance metrics in a structured JSON format: - 1. **Latest Metrics**: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/latest.json` + 1. **Latest Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Most recent daily metrics snapshot - Contains workflow success rates, safe output volumes, engagement data - Use to assess campaign health without redundant API queries - 2. **Historical Metrics**: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/daily/YYYY-MM-DD.json` + 2. **Historical Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/daily/YYYY-MM-DD.json` - Daily metrics for the last 30 days - Calculate campaign velocity trends - Identify performance degradation early diff --git a/.github/workflows/campaign-manager.md b/.github/workflows/campaign-manager.md index 5e91ff2f97..06171a9365 100644 --- a/.github/workflows/campaign-manager.md +++ b/.github/workflows/campaign-manager.md @@ -73,7 +73,7 @@ As a meta-orchestrator, you coordinate between multiple campaigns, analyze their ### 3. Performance Monitoring **Aggregate metrics across campaigns:** -- Load shared metrics from: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/latest.json` +- Load shared metrics from: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Use workflow metrics for campaigns to assess: - Workflow success rates for campaign workflows - Safe output volume (issues, PRs created by campaign workflows) @@ -85,7 +85,7 @@ As a meta-orchestrator, you coordinate between multiple campaigns, analyze their - Identify campaigns that are ahead, on track, or behind schedule **Trend analysis:** -- Load historical daily metrics from: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/daily/` +- Load historical daily metrics from: `/tmp/gh-aw/repo-memory/default/metrics/daily/` - Compare current metrics with historical data (7-day, 30-day trends) - Identify improving or degrading trends in workflow performance - Calculate velocity trends from safe output volume over time @@ -133,7 +133,7 @@ Execute these phases each time you run: ## Shared Memory Integration -**Access shared repo memory at `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/`** +**Access shared repo memory at `/tmp/gh-aw/repo-memory/default/`** This workflow shares memory with other meta-orchestrators (Workflow Health Manager and Agent Performance Analyzer) to coordinate insights and avoid duplicate work. @@ -141,12 +141,12 @@ This workflow shares memory with other meta-orchestrators (Workflow Health Manag The Metrics Collector workflow runs daily and stores performance metrics in a structured JSON format: -1. **Latest Metrics**: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/latest.json` +1. **Latest Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Most recent daily metrics snapshot - Contains workflow success rates, safe output volumes, engagement data - Use to assess campaign health without redundant API queries -2. **Historical Metrics**: `/tmp/gh-aw/repo-memory-default/memory/meta-orchestrators/metrics/daily/YYYY-MM-DD.json` +2. **Historical Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/daily/YYYY-MM-DD.json` - Daily metrics for the last 30 days - Calculate campaign velocity trends - Identify performance degradation early diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml index f2093b0556..d3e33d8083 100644 --- a/.github/workflows/ci-doctor.lock.yml +++ b/.github/workflows/ci-doctor.lock.yml @@ -190,7 +190,7 @@ jobs: const determineAutomaticLockdown = require('/tmp/gh-aw/actions/determine_automatic_lockdown.cjs'); await determineAutomaticLockdown(github, context, core); - name: Downloading container images - run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 mcp/fetch + run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 - name: Write Safe Outputs Config run: | mkdir -p /tmp/gh-aw/safeoutputs @@ -442,16 +442,6 @@ jobs: "GITHUB_WORKSPACE": "\${GITHUB_WORKSPACE}", "DEFAULT_BRANCH": "\${DEFAULT_BRANCH}" } - }, - "web-fetch": { - "command": "docker", - "args": [ - "run", - "-i", - "--rm", - "mcp/fetch" - ], - "tools": ["*"] } } } @@ -862,12 +852,12 @@ jobs: # Copilot CLI tool arguments (sorted): # --allow-tool github # --allow-tool safeoutputs - # --allow-tool web-fetch + # --allow-tool web_fetch timeout-minutes: 10 run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --mount /home/runner/.copilot:/home/runner/.copilot:rw --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,host.docker.internal,raw.githubusercontent.com,registry.npmjs.org --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --image-tag 0.7.0 \ - -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool safeoutputs --allow-tool web-fetch --add-dir /tmp/gh-aw/cache-memory/ --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \ + -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool safeoutputs --allow-tool web_fetch --add-dir /tmp/gh-aw/cache-memory/ --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \ 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE diff --git a/.github/workflows/cli-consistency-checker.lock.yml b/.github/workflows/cli-consistency-checker.lock.yml index 6e9dd8eeb7..99ab165a43 100644 --- a/.github/workflows/cli-consistency-checker.lock.yml +++ b/.github/workflows/cli-consistency-checker.lock.yml @@ -163,7 +163,7 @@ jobs: const determineAutomaticLockdown = require('/tmp/gh-aw/actions/determine_automatic_lockdown.cjs'); await determineAutomaticLockdown(github, context, core); - name: Downloading container images - run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 mcp/fetch + run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 - name: Write Safe Outputs Config run: | mkdir -p /tmp/gh-aw/safeoutputs @@ -379,16 +379,6 @@ jobs: "GITHUB_WORKSPACE": "\${GITHUB_WORKSPACE}", "DEFAULT_BRANCH": "\${DEFAULT_BRANCH}" } - }, - "web-fetch": { - "command": "docker", - "args": [ - "run", - "-i", - "--rm", - "mcp/fetch" - ], - "tools": ["*"] } } } diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml index 46213dbe1d..ec897dc5e0 100644 --- a/.github/workflows/daily-news.lock.yml +++ b/.github/workflows/daily-news.lock.yml @@ -227,7 +227,7 @@ jobs: const determineAutomaticLockdown = require('/tmp/gh-aw/actions/determine_automatic_lockdown.cjs'); await determineAutomaticLockdown(github, context, core); - name: Downloading container images - run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 mcp/fetch + run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 - name: Write Safe Outputs Config run: | mkdir -p /tmp/gh-aw/safeoutputs @@ -464,16 +464,6 @@ jobs: "env": { "TAVILY_API_KEY": "\${TAVILY_API_KEY}" } - }, - "web-fetch": { - "command": "docker", - "args": [ - "run", - "-i", - "--rm", - "mcp/fetch" - ], - "tools": ["*"] } } } diff --git a/.github/workflows/dependabot-go-checker.lock.yml b/.github/workflows/dependabot-go-checker.lock.yml index 27cdb582ac..500c013392 100644 --- a/.github/workflows/dependabot-go-checker.lock.yml +++ b/.github/workflows/dependabot-go-checker.lock.yml @@ -165,7 +165,7 @@ jobs: const determineAutomaticLockdown = require('/tmp/gh-aw/actions/determine_automatic_lockdown.cjs'); await determineAutomaticLockdown(github, context, core); - name: Downloading container images - run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 mcp/fetch + run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 - name: Write Safe Outputs Config run: | mkdir -p /tmp/gh-aw/safeoutputs @@ -419,16 +419,6 @@ jobs: "GITHUB_WORKSPACE": "\${GITHUB_WORKSPACE}", "DEFAULT_BRANCH": "\${DEFAULT_BRANCH}" } - }, - "web-fetch": { - "command": "docker", - "args": [ - "run", - "-i", - "--rm", - "mcp/fetch" - ], - "tools": ["*"] } } } diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml index df97def336..c607a35ac8 100644 --- a/.github/workflows/firewall-escape.lock.yml +++ b/.github/workflows/firewall-escape.lock.yml @@ -175,7 +175,7 @@ jobs: const determineAutomaticLockdown = require('/tmp/gh-aw/actions/determine_automatic_lockdown.cjs'); await determineAutomaticLockdown(github, context, core); - name: Downloading container images - run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 mcp/fetch + run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 - name: Setup MCPs env: GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} @@ -206,16 +206,6 @@ jobs: "env": { "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}" } - }, - "web-fetch": { - "command": "docker", - "args": [ - "run", - "-i", - "--rm", - "mcp/fetch" - ], - "tools": ["*"] } } } diff --git a/.github/workflows/firewall.lock.yml b/.github/workflows/firewall.lock.yml index 6f1c8f5da1..05ef70e03e 100644 --- a/.github/workflows/firewall.lock.yml +++ b/.github/workflows/firewall.lock.yml @@ -151,7 +151,7 @@ jobs: const determineAutomaticLockdown = require('/tmp/gh-aw/actions/determine_automatic_lockdown.cjs'); await determineAutomaticLockdown(github, context, core); - name: Downloading container images - run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 mcp/fetch + run: bash /tmp/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.26.3 - name: Setup MCPs env: GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} @@ -182,16 +182,6 @@ jobs: "env": { "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}" } - }, - "web-fetch": { - "command": "docker", - "args": [ - "run", - "-i", - "--rm", - "mcp/fetch" - ], - "tools": ["*"] } } } @@ -405,12 +395,12 @@ jobs: id: agentic_execution # Copilot CLI tool arguments (sorted): # --allow-tool github - # --allow-tool web-fetch + # --allow-tool web_fetch timeout-minutes: 5 run: | set -o pipefail sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --mount /home/runner/.copilot:/home/runner/.copilot:rw --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.npms.io,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,bun.sh,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,deb.nodesource.com,deno.land,get.pnpm.io,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,nodejs.org,npm.pkg.github.com,npmjs.com,npmjs.org,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.bower.io,registry.npmjs.com,registry.npmjs.org,registry.yarnpkg.com,repo.yarnpkg.com,s.symcb.com,s.symcd.com,security.ubuntu.com,skimdb.npmjs.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.npmjs.com,www.npmjs.org,yarnpkg.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --image-tag 0.7.0 \ - -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool web-fetch --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} \ + -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool web_fetch --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} \ 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: COPILOT_AGENT_RUNNER_TYPE: STANDALONE diff --git a/.github/workflows/workflow-health-manager.lock.yml b/.github/workflows/workflow-health-manager.lock.yml index 7a717e2ca2..885f8a79ff 100644 --- a/.github/workflows/workflow-health-manager.lock.yml +++ b/.github/workflows/workflow-health-manager.lock.yml @@ -599,7 +599,7 @@ jobs: - Flag workflows with compilation warnings **Monitor workflow execution:** - - Load shared metrics from: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/latest.json` + - Load shared metrics from: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Use workflow_runs data for each workflow: - Total runs, successful runs, failed runs - Success rate (already calculated) @@ -659,7 +659,7 @@ jobs: **Quality metrics:** - Use historical metrics for trend analysis: - - Load daily metrics from: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/daily/` + - Load daily metrics from: `/tmp/gh-aw/repo-memory/default/metrics/daily/` - Calculate 7-day and 30-day success rate trends - Identify workflows with declining quality - Calculate workflow reliability score (0-100): @@ -696,7 +696,7 @@ jobs: ## Shared Memory Integration - **Access shared repo memory at `/tmp/gh-aw/repo-memory-default/memory/default/`** + **Access shared repo memory at `/tmp/gh-aw/repo-memory/default/`** This workflow shares memory with other meta-orchestrators (Campaign Manager and Agent Performance Analyzer) to coordinate insights and avoid duplicate work. @@ -704,12 +704,12 @@ jobs: The Metrics Collector workflow runs daily and stores performance metrics in a structured JSON format: - 1. **Latest Metrics**: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/latest.json` + 1. **Latest Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Most recent workflow run statistics - Success rates, failure counts for all workflows - Use to identify failing workflows without querying GitHub API repeatedly - 2. **Historical Metrics**: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/daily/YYYY-MM-DD.json` + 2. **Historical Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/daily/YYYY-MM-DD.json` - Daily metrics for the last 30 days - Track workflow health trends over time - Identify recent regressions by comparing current vs. historical success rates diff --git a/.github/workflows/workflow-health-manager.md b/.github/workflows/workflow-health-manager.md index b3ab692e9f..d3ec4b153c 100644 --- a/.github/workflows/workflow-health-manager.md +++ b/.github/workflows/workflow-health-manager.md @@ -71,7 +71,7 @@ As a meta-orchestrator for workflow health, you oversee the operational health o - Flag workflows with compilation warnings **Monitor workflow execution:** -- Load shared metrics from: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/latest.json` +- Load shared metrics from: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Use workflow_runs data for each workflow: - Total runs, successful runs, failed runs - Success rate (already calculated) @@ -131,7 +131,7 @@ As a meta-orchestrator for workflow health, you oversee the operational health o **Quality metrics:** - Use historical metrics for trend analysis: - - Load daily metrics from: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/daily/` + - Load daily metrics from: `/tmp/gh-aw/repo-memory/default/metrics/daily/` - Calculate 7-day and 30-day success rate trends - Identify workflows with declining quality - Calculate workflow reliability score (0-100): @@ -168,7 +168,7 @@ Execute these phases each run: ## Shared Memory Integration -**Access shared repo memory at `/tmp/gh-aw/repo-memory-default/memory/default/`** +**Access shared repo memory at `/tmp/gh-aw/repo-memory/default/`** This workflow shares memory with other meta-orchestrators (Campaign Manager and Agent Performance Analyzer) to coordinate insights and avoid duplicate work. @@ -176,12 +176,12 @@ This workflow shares memory with other meta-orchestrators (Campaign Manager and The Metrics Collector workflow runs daily and stores performance metrics in a structured JSON format: -1. **Latest Metrics**: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/latest.json` +1. **Latest Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/latest.json` - Most recent workflow run statistics - Success rates, failure counts for all workflows - Use to identify failing workflows without querying GitHub API repeatedly -2. **Historical Metrics**: `/tmp/gh-aw/repo-memory-default/memory/default/metrics/daily/YYYY-MM-DD.json` +2. **Historical Metrics**: `/tmp/gh-aw/repo-memory/default/metrics/daily/YYYY-MM-DD.json` - Daily metrics for the last 30 days - Track workflow health trends over time - Identify recent regressions by comparing current vs. historical success rates From 1f2f1fdb636f51825802217fe9c6617b589fa16f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 4 Jan 2026 06:07:51 +0000 Subject: [PATCH 3/3] Complete metrics data infrastructure fix - ready for testing Co-authored-by: mnkiefer <8320933+mnkiefer@users.noreply.github.com> --- pkg/cli/logs_download.go | 26 ++-- pkg/cli/logs_flatten_test.go | 236 +++++++++++++++++------------------ 2 files changed, 131 insertions(+), 131 deletions(-) diff --git a/pkg/cli/logs_download.go b/pkg/cli/logs_download.go index 52725addd7..5d879e2c5e 100644 --- a/pkg/cli/logs_download.go +++ b/pkg/cli/logs_download.go @@ -112,15 +112,15 @@ func flattenSingleFileArtifacts(outputDir string, verbose bool) error { // This function moves those files to the root output directory and removes the nested structure func flattenUnifiedArtifact(outputDir string, verbose bool) error { agentArtifactsDir := filepath.Join(outputDir, "agent-artifacts") - + // Check if agent-artifacts directory exists if _, err := os.Stat(agentArtifactsDir); os.IsNotExist(err) { // No unified artifact, nothing to flatten return nil } - + logsDownloadLog.Printf("Flattening unified agent-artifacts directory: %s", agentArtifactsDir) - + // Look for tmp/gh-aw/ subdirectory structure tmpGhAwPath := filepath.Join(agentArtifactsDir, "tmp", "gh-aw") if _, err := os.Stat(tmpGhAwPath); os.IsNotExist(err) { @@ -128,26 +128,26 @@ func flattenUnifiedArtifact(outputDir string, verbose bool) error { logsDownloadLog.Printf("No tmp/gh-aw structure found in agent-artifacts, skipping flatten") return nil } - + // Walk through tmp/gh-aw and move all files to root output directory err := filepath.Walk(tmpGhAwPath, func(path string, info os.FileInfo, err error) error { if err != nil { return err } - + // Skip the root directory itself if path == tmpGhAwPath { return nil } - + // Calculate relative path from tmp/gh-aw relPath, err := filepath.Rel(tmpGhAwPath, path) if err != nil { return fmt.Errorf("failed to get relative path for %s: %w", path, err) } - + destPath := filepath.Join(outputDir, relPath) - + if info.IsDir() { // Create directory in destination if err := os.MkdirAll(destPath, 0755); err != nil { @@ -160,7 +160,7 @@ func flattenUnifiedArtifact(outputDir string, verbose bool) error { if err := os.MkdirAll(filepath.Dir(destPath), 0755); err != nil { return fmt.Errorf("failed to create parent directory for %s: %w", destPath, err) } - + if err := os.Rename(path, destPath); err != nil { return fmt.Errorf("failed to move file %s to %s: %w", path, destPath, err) } @@ -169,14 +169,14 @@ func flattenUnifiedArtifact(outputDir string, verbose bool) error { fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Flattened: %s → %s", relPath, relPath))) } } - + return nil }) - + if err != nil { return fmt.Errorf("failed to flatten unified artifact: %w", err) } - + // Remove the now-empty agent-artifacts directory structure if err := os.RemoveAll(agentArtifactsDir); err != nil { logsDownloadLog.Printf("Failed to remove agent-artifacts directory %s: %v", agentArtifactsDir, err) @@ -190,7 +190,7 @@ func flattenUnifiedArtifact(outputDir string, verbose bool) error { fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Flattened unified agent-artifacts and removed nested structure")) } } - + return nil } diff --git a/pkg/cli/logs_flatten_test.go b/pkg/cli/logs_flatten_test.go index af31c0c400..ce84b42c8a 100644 --- a/pkg/cli/logs_flatten_test.go +++ b/pkg/cli/logs_flatten_test.go @@ -321,132 +321,132 @@ func TestAuditCanFindFlattenedArtifacts(t *testing.T) { } func TestFlattenUnifiedArtifact(t *testing.T) { -tests := []struct { -name string -setup func(string) error -expectedFiles []string -expectedDirs []string -unexpectedFiles []string -unexpectedDirs []string -}{ -{ -name: "unified artifact with nested structure gets flattened", -setup: func(dir string) error { -// Create the structure: agent-artifacts/tmp/gh-aw/... -nestedPath := filepath.Join(dir, "agent-artifacts", "tmp", "gh-aw") -if err := os.MkdirAll(nestedPath, 0755); err != nil { -return err -} + tests := []struct { + name string + setup func(string) error + expectedFiles []string + expectedDirs []string + unexpectedFiles []string + unexpectedDirs []string + }{ + { + name: "unified artifact with nested structure gets flattened", + setup: func(dir string) error { + // Create the structure: agent-artifacts/tmp/gh-aw/... + nestedPath := filepath.Join(dir, "agent-artifacts", "tmp", "gh-aw") + if err := os.MkdirAll(nestedPath, 0755); err != nil { + return err + } -// Create test files -if err := os.WriteFile(filepath.Join(nestedPath, "aw_info.json"), []byte("test"), 0644); err != nil { -return err -} + // Create test files + if err := os.WriteFile(filepath.Join(nestedPath, "aw_info.json"), []byte("test"), 0644); err != nil { + return err + } -// Create subdirectories with files -promptDir := filepath.Join(nestedPath, "aw-prompts") -if err := os.MkdirAll(promptDir, 0755); err != nil { -return err -} -if err := os.WriteFile(filepath.Join(promptDir, "prompt.txt"), []byte("test"), 0644); err != nil { -return err -} + // Create subdirectories with files + promptDir := filepath.Join(nestedPath, "aw-prompts") + if err := os.MkdirAll(promptDir, 0755); err != nil { + return err + } + if err := os.WriteFile(filepath.Join(promptDir, "prompt.txt"), []byte("test"), 0644); err != nil { + return err + } -mcpLogsDir := filepath.Join(nestedPath, "mcp-logs") -if err := os.MkdirAll(mcpLogsDir, 0755); err != nil { -return err -} -return os.WriteFile(filepath.Join(mcpLogsDir, "log.txt"), []byte("test"), 0644) -}, -expectedFiles: []string{ -"aw_info.json", -"aw-prompts/prompt.txt", -"mcp-logs/log.txt", -}, -expectedDirs: []string{ -"aw-prompts", -"mcp-logs", -}, -unexpectedDirs: []string{"agent-artifacts", "tmp", "gh-aw"}, -unexpectedFiles: []string{ -"agent-artifacts/tmp/gh-aw/aw_info.json", -"tmp/gh-aw/aw_info.json", -}, -}, -{ -name: "no agent-artifacts directory - no-op", -setup: func(dir string) error { -// Create a regular file structure without agent-artifacts -return os.WriteFile(filepath.Join(dir, "regular.txt"), []byte("test"), 0644) -}, -expectedFiles: []string{"regular.txt"}, -}, -{ -name: "agent-artifacts without tmp/gh-aw structure - no-op", -setup: func(dir string) error { -// Create agent-artifacts but without the expected nested structure -artifactDir := filepath.Join(dir, "agent-artifacts") -if err := os.MkdirAll(artifactDir, 0755); err != nil { -return err -} -return os.WriteFile(filepath.Join(artifactDir, "file.txt"), []byte("test"), 0644) -}, -expectedDirs: []string{"agent-artifacts"}, -expectedFiles: []string{"agent-artifacts/file.txt"}, -}, -} + mcpLogsDir := filepath.Join(nestedPath, "mcp-logs") + if err := os.MkdirAll(mcpLogsDir, 0755); err != nil { + return err + } + return os.WriteFile(filepath.Join(mcpLogsDir, "log.txt"), []byte("test"), 0644) + }, + expectedFiles: []string{ + "aw_info.json", + "aw-prompts/prompt.txt", + "mcp-logs/log.txt", + }, + expectedDirs: []string{ + "aw-prompts", + "mcp-logs", + }, + unexpectedDirs: []string{"agent-artifacts", "tmp", "gh-aw"}, + unexpectedFiles: []string{ + "agent-artifacts/tmp/gh-aw/aw_info.json", + "tmp/gh-aw/aw_info.json", + }, + }, + { + name: "no agent-artifacts directory - no-op", + setup: func(dir string) error { + // Create a regular file structure without agent-artifacts + return os.WriteFile(filepath.Join(dir, "regular.txt"), []byte("test"), 0644) + }, + expectedFiles: []string{"regular.txt"}, + }, + { + name: "agent-artifacts without tmp/gh-aw structure - no-op", + setup: func(dir string) error { + // Create agent-artifacts but without the expected nested structure + artifactDir := filepath.Join(dir, "agent-artifacts") + if err := os.MkdirAll(artifactDir, 0755); err != nil { + return err + } + return os.WriteFile(filepath.Join(artifactDir, "file.txt"), []byte("test"), 0644) + }, + expectedDirs: []string{"agent-artifacts"}, + expectedFiles: []string{"agent-artifacts/file.txt"}, + }, + } -for _, tt := range tests { -t.Run(tt.name, func(t *testing.T) { -tmpDir := testutil.TempDir(t, "test-flatten-unified-*") + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := testutil.TempDir(t, "test-flatten-unified-*") -// Setup test structure -if err := tt.setup(tmpDir); err != nil { -t.Fatalf("Setup failed: %v", err) -} + // Setup test structure + if err := tt.setup(tmpDir); err != nil { + t.Fatalf("Setup failed: %v", err) + } -// Run flattening -if err := flattenUnifiedArtifact(tmpDir, true); err != nil { -t.Fatalf("flattenUnifiedArtifact failed: %v", err) -} + // Run flattening + if err := flattenUnifiedArtifact(tmpDir, true); err != nil { + t.Fatalf("flattenUnifiedArtifact failed: %v", err) + } -// Verify expected files exist -for _, file := range tt.expectedFiles { -path := filepath.Join(tmpDir, file) -info, err := os.Stat(path) -if err != nil { -t.Errorf("Expected file %s does not exist: %v", file, err) -} else if info.IsDir() { -t.Errorf("Expected %s to be a file, but it's a directory", file) -} -} + // Verify expected files exist + for _, file := range tt.expectedFiles { + path := filepath.Join(tmpDir, file) + info, err := os.Stat(path) + if err != nil { + t.Errorf("Expected file %s does not exist: %v", file, err) + } else if info.IsDir() { + t.Errorf("Expected %s to be a file, but it's a directory", file) + } + } -// Verify expected directories exist -for _, dir := range tt.expectedDirs { -path := filepath.Join(tmpDir, dir) -info, err := os.Stat(path) -if err != nil { -t.Errorf("Expected directory %s does not exist: %v", dir, err) -} else if !info.IsDir() { -t.Errorf("Expected %s to be a directory", dir) -} -} + // Verify expected directories exist + for _, dir := range tt.expectedDirs { + path := filepath.Join(tmpDir, dir) + info, err := os.Stat(path) + if err != nil { + t.Errorf("Expected directory %s does not exist: %v", dir, err) + } else if !info.IsDir() { + t.Errorf("Expected %s to be a directory", dir) + } + } -// Verify unexpected files don't exist -for _, file := range tt.unexpectedFiles { -path := filepath.Join(tmpDir, file) -if _, err := os.Stat(path); err == nil { -t.Errorf("Unexpected file %s exists", file) -} -} + // Verify unexpected files don't exist + for _, file := range tt.unexpectedFiles { + path := filepath.Join(tmpDir, file) + if _, err := os.Stat(path); err == nil { + t.Errorf("Unexpected file %s exists", file) + } + } -// Verify unexpected directories don't exist -for _, dir := range tt.unexpectedDirs { -path := filepath.Join(tmpDir, dir) -if _, err := os.Stat(path); err == nil { -t.Errorf("Unexpected directory %s exists", dir) -} -} -}) -} + // Verify unexpected directories don't exist + for _, dir := range tt.unexpectedDirs { + path := filepath.Join(tmpDir, dir) + if _, err := os.Stat(path); err == nil { + t.Errorf("Unexpected directory %s exists", dir) + } + } + }) + } }