diff --git a/.github/workflows/ai-moderator.lock.yml b/.github/workflows/ai-moderator.lock.yml
index 40ad9d1ed4..90f2c61d79 100644
--- a/.github/workflows/ai-moderator.lock.yml
+++ b/.github/workflows/ai-moderator.lock.yml
@@ -304,8 +304,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/archie.lock.yml b/.github/workflows/archie.lock.yml
index 1ddb434408..be001a1929 100644
--- a/.github/workflows/archie.lock.yml
+++ b/.github/workflows/archie.lock.yml
@@ -1006,8 +1006,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/artifacts-summary.lock.yml b/.github/workflows/artifacts-summary.lock.yml
index 126d0bc313..7755426028 100644
--- a/.github/workflows/artifacts-summary.lock.yml
+++ b/.github/workflows/artifacts-summary.lock.yml
@@ -253,8 +253,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/brave.lock.yml b/.github/workflows/brave.lock.yml
index f4994833d8..d05acd1b89 100644
--- a/.github/workflows/brave.lock.yml
+++ b/.github/workflows/brave.lock.yml
@@ -985,8 +985,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/breaking-change-checker.lock.yml b/.github/workflows/breaking-change-checker.lock.yml
index 69006329ae..f236b75f9b 100644
--- a/.github/workflows/breaking-change-checker.lock.yml
+++ b/.github/workflows/breaking-change-checker.lock.yml
@@ -250,8 +250,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/campaign-generator.lock.yml b/.github/workflows/campaign-generator.lock.yml
index 1d87e63c3d..d2f3a37f40 100644
--- a/.github/workflows/campaign-generator.lock.yml
+++ b/.github/workflows/campaign-generator.lock.yml
@@ -296,8 +296,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/ci-coach.lock.yml b/.github/workflows/ci-coach.lock.yml
index 728a410cd4..a9fddef9d1 100644
--- a/.github/workflows/ci-coach.lock.yml
+++ b/.github/workflows/ci-coach.lock.yml
@@ -308,8 +308,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/ci-doctor.lock.yml b/.github/workflows/ci-doctor.lock.yml
index 12593a505c..0db97fc1ec 100644
--- a/.github/workflows/ci-doctor.lock.yml
+++ b/.github/workflows/ci-doctor.lock.yml
@@ -280,8 +280,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/cli-consistency-checker.lock.yml b/.github/workflows/cli-consistency-checker.lock.yml
index c6ae8a9737..c7fbce6b5f 100644
--- a/.github/workflows/cli-consistency-checker.lock.yml
+++ b/.github/workflows/cli-consistency-checker.lock.yml
@@ -249,8 +249,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/copilot-pr-merged-report.lock.yml b/.github/workflows/copilot-pr-merged-report.lock.yml
index 4750f51cf0..aea87d50db 100644
--- a/.github/workflows/copilot-pr-merged-report.lock.yml
+++ b/.github/workflows/copilot-pr-merged-report.lock.yml
@@ -254,8 +254,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
@@ -3008,17 +3008,18 @@ jobs:
EOF_TOOLS_JSON
cat > /tmp/gh-aw/safe-inputs/mcp-server.cjs << 'EOFSI'
const path = require("path");
- const { startSafeInputsServer } = require("./safe_inputs_mcp_server.cjs");
+ const { startHttpServer } = require("./safe_inputs_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
- try {
- startSafeInputsServer(configPath, {
- logDir: "/tmp/gh-aw/safe-inputs/logs",
- skipCleanup: true
- });
- } catch (error) {
- console.error("Failed to start safe-inputs stdio server:", error);
+ const port = parseInt(process.env.GH_AW_SAFE_INPUTS_PORT || "3000", 10);
+ const apiKey = process.env.GH_AW_SAFE_INPUTS_API_KEY || "";
+ startHttpServer(configPath, {
+ port: port,
+ stateless: false,
+ logDir: "/tmp/gh-aw/safe-inputs/logs"
+ }).catch(error => {
+ console.error("Failed to start safe-inputs HTTP server:", error);
process.exit(1);
- }
+ });
EOFSI
chmod +x /tmp/gh-aw/safe-inputs/mcp-server.cjs
@@ -3038,9 +3039,104 @@ jobs:
EOFSH_gh
chmod +x /tmp/gh-aw/safe-inputs/gh.sh
+ - name: Generate Safe Inputs MCP Server Config
+ id: safe-inputs-config
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ function generateSafeInputsConfig({ core, crypto }) {
+ const apiKeyBuffer = crypto.randomBytes(45);
+ const apiKey = apiKeyBuffer.toString("base64").replace(/[/+=]/g, "");
+ const port = 3000;
+ core.setOutput("safe_inputs_api_key", apiKey);
+ core.setOutput("safe_inputs_port", port.toString());
+ core.info(`Safe Inputs MCP server will run on port ${port}`);
+ return { apiKey, port };
+ }
+
+ // Execute the function
+ const crypto = require('crypto');
+ generateSafeInputsConfig({ core, crypto });
+
+ - name: Start Safe Inputs MCP HTTP Server
+ id: safe-inputs-start
+ run: |
+ # Set environment variables for the server
+ export GH_AW_SAFE_INPUTS_PORT=${{ steps.safe-inputs-config.outputs.safe_inputs_port }}
+ export GH_AW_SAFE_INPUTS_API_KEY=${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }}
+
+ export GH_AW_GH_TOKEN="${GH_AW_GH_TOKEN}"
+ export GH_DEBUG="${GH_DEBUG}"
+
+ cd /tmp/gh-aw/safe-inputs
+ # Verify required files exist
+ echo "Verifying safe-inputs setup..."
+ if [ ! -f mcp-server.cjs ]; then
+ echo "ERROR: mcp-server.cjs not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ if [ ! -f tools.json ]; then
+ echo "ERROR: tools.json not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ echo "Configuration files verified"
+ # Log environment configuration
+ echo "Server configuration:"
+ echo " Port: $GH_AW_SAFE_INPUTS_PORT"
+ echo " API Key: ${GH_AW_SAFE_INPUTS_API_KEY:0:8}..."
+ echo " Working directory: $(pwd)"
+ # Ensure logs directory exists
+ mkdir -p /tmp/gh-aw/safe-inputs/logs
+ # Create initial server.log file for artifact upload
+ {
+ echo "Safe Inputs MCP Server Log"
+ echo "Start time: $(date)"
+ echo "==========================================="
+ echo ""
+ } > /tmp/gh-aw/safe-inputs/logs/server.log
+ # Start the HTTP server in the background
+ echo "Starting safe-inputs MCP HTTP server..."
+ node mcp-server.cjs >> /tmp/gh-aw/safe-inputs/logs/server.log 2>&1 &
+ SERVER_PID=$!
+ echo "Started safe-inputs MCP server with PID $SERVER_PID"
+ # Wait for server to be ready (max 10 seconds)
+ echo "Waiting for server to become ready..."
+ for i in {1..10}; do
+ # Check if process is still running
+ if ! kill -0 $SERVER_PID 2>/dev/null; then
+ echo "ERROR: Server process $SERVER_PID has died"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ exit 1
+ fi
+ # Check if server is responding
+ if curl -s -f "http://localhost:$GH_AW_SAFE_INPUTS_PORT/health" > /dev/null 2>&1; then
+ echo "Safe Inputs MCP server is ready (attempt $i/10)"
+ break
+ fi
+ if [ "$i" -eq 10 ]; then
+ echo "ERROR: Safe Inputs MCP server failed to start after 10 seconds"
+ echo "Process status: $(pgrep -f 'mcp-server.cjs' || echo 'not running')"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ echo "Checking port availability:"
+ netstat -tuln | grep "$GH_AW_SAFE_INPUTS_PORT" || echo "Port $GH_AW_SAFE_INPUTS_PORT not listening"
+ exit 1
+ fi
+ echo "Waiting for server... (attempt $i/10)"
+ sleep 1
+ done
+ # Output the configuration for the MCP client
+ echo "port=$GH_AW_SAFE_INPUTS_PORT" >> "$GITHUB_OUTPUT"
+ echo "api_key=$GH_AW_SAFE_INPUTS_API_KEY" >> "$GITHUB_OUTPUT"
+
- name: Setup MCPs
env:
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}
+ GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_DEBUG: 1
run: |
@@ -3050,11 +3146,15 @@ jobs:
{
"mcpServers": {
"safeinputs": {
- "type": "local",
- "command": "node",
- "args": ["/tmp/gh-aw/safe-inputs/mcp-server.cjs"],
+ "type": "http",
+ "url": "http://host.docker.internal:\${GH_AW_SAFE_INPUTS_PORT}",
+ "headers": {
+ "Authorization": "Bearer \${GH_AW_SAFE_INPUTS_API_KEY}"
+ },
"tools": ["*"],
"env": {
+ "GH_AW_SAFE_INPUTS_PORT": "\${GH_AW_SAFE_INPUTS_PORT}",
+ "GH_AW_SAFE_INPUTS_API_KEY": "\${GH_AW_SAFE_INPUTS_API_KEY}",
"GH_AW_GH_TOKEN": "\${GH_AW_GH_TOKEN}",
"GH_DEBUG": "\${GH_DEBUG}"
}
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index 2c85d2530d..67d9a909d6 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -312,8 +312,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/copilot-pr-prompt-analysis.lock.yml b/.github/workflows/copilot-pr-prompt-analysis.lock.yml
index e220f3341f..99dac6e5b0 100644
--- a/.github/workflows/copilot-pr-prompt-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-prompt-analysis.lock.yml
@@ -280,8 +280,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/craft.lock.yml b/.github/workflows/craft.lock.yml
index d1932eb617..f7d54764b2 100644
--- a/.github/workflows/craft.lock.yml
+++ b/.github/workflows/craft.lock.yml
@@ -986,8 +986,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-assign-issue-to-user.lock.yml b/.github/workflows/daily-assign-issue-to-user.lock.yml
index d37136c5c9..a612f409e7 100644
--- a/.github/workflows/daily-assign-issue-to-user.lock.yml
+++ b/.github/workflows/daily-assign-issue-to-user.lock.yml
@@ -248,8 +248,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml
index 795c46b876..2e5b87adfd 100644
--- a/.github/workflows/daily-copilot-token-report.lock.yml
+++ b/.github/workflows/daily-copilot-token-report.lock.yml
@@ -299,8 +299,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index c7af2d5e26..bd2e5e937f 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -340,8 +340,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index 5670248043..09c0232730 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -342,8 +342,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-malicious-code-scan.lock.yml b/.github/workflows/daily-malicious-code-scan.lock.yml
index c00a28a133..6f8d1b12a0 100644
--- a/.github/workflows/daily-malicious-code-scan.lock.yml
+++ b/.github/workflows/daily-malicious-code-scan.lock.yml
@@ -249,8 +249,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index f87c8d26b1..1615cd253a 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -307,8 +307,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml
index 37297aac5c..49b0f08ceb 100644
--- a/.github/workflows/daily-repo-chronicle.lock.yml
+++ b/.github/workflows/daily-repo-chronicle.lock.yml
@@ -296,8 +296,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-team-status.lock.yml b/.github/workflows/daily-team-status.lock.yml
index 43d703d315..0725d7a8d4 100644
--- a/.github/workflows/daily-team-status.lock.yml
+++ b/.github/workflows/daily-team-status.lock.yml
@@ -262,8 +262,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/daily-workflow-updater.lock.yml b/.github/workflows/daily-workflow-updater.lock.yml
index 07c3b92f01..fa01eb9255 100644
--- a/.github/workflows/daily-workflow-updater.lock.yml
+++ b/.github/workflows/daily-workflow-updater.lock.yml
@@ -249,8 +249,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/dependabot-go-checker.lock.yml b/.github/workflows/dependabot-go-checker.lock.yml
index b333438bb0..7d399c36be 100644
--- a/.github/workflows/dependabot-go-checker.lock.yml
+++ b/.github/workflows/dependabot-go-checker.lock.yml
@@ -250,8 +250,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/dev-hawk.lock.yml b/.github/workflows/dev-hawk.lock.yml
index 229ee5323c..02f87e81d8 100644
--- a/.github/workflows/dev-hawk.lock.yml
+++ b/.github/workflows/dev-hawk.lock.yml
@@ -279,8 +279,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml
index 0e6ddb0bd7..185eb90cd9 100644
--- a/.github/workflows/dev.lock.yml
+++ b/.github/workflows/dev.lock.yml
@@ -254,8 +254,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
@@ -3018,17 +3018,18 @@ jobs:
EOF_TOOLS_JSON
cat > /tmp/gh-aw/safe-inputs/mcp-server.cjs << 'EOFSI'
const path = require("path");
- const { startSafeInputsServer } = require("./safe_inputs_mcp_server.cjs");
+ const { startHttpServer } = require("./safe_inputs_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
- try {
- startSafeInputsServer(configPath, {
- logDir: "/tmp/gh-aw/safe-inputs/logs",
- skipCleanup: true
- });
- } catch (error) {
- console.error("Failed to start safe-inputs stdio server:", error);
+ const port = parseInt(process.env.GH_AW_SAFE_INPUTS_PORT || "3000", 10);
+ const apiKey = process.env.GH_AW_SAFE_INPUTS_API_KEY || "";
+ startHttpServer(configPath, {
+ port: port,
+ stateless: false,
+ logDir: "/tmp/gh-aw/safe-inputs/logs"
+ }).catch(error => {
+ console.error("Failed to start safe-inputs HTTP server:", error);
process.exit(1);
- }
+ });
EOFSI
chmod +x /tmp/gh-aw/safe-inputs/mcp-server.cjs
@@ -3048,9 +3049,104 @@ jobs:
EOFSH_gh
chmod +x /tmp/gh-aw/safe-inputs/gh.sh
+ - name: Generate Safe Inputs MCP Server Config
+ id: safe-inputs-config
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ function generateSafeInputsConfig({ core, crypto }) {
+ const apiKeyBuffer = crypto.randomBytes(45);
+ const apiKey = apiKeyBuffer.toString("base64").replace(/[/+=]/g, "");
+ const port = 3000;
+ core.setOutput("safe_inputs_api_key", apiKey);
+ core.setOutput("safe_inputs_port", port.toString());
+ core.info(`Safe Inputs MCP server will run on port ${port}`);
+ return { apiKey, port };
+ }
+
+ // Execute the function
+ const crypto = require('crypto');
+ generateSafeInputsConfig({ core, crypto });
+
+ - name: Start Safe Inputs MCP HTTP Server
+ id: safe-inputs-start
+ run: |
+ # Set environment variables for the server
+ export GH_AW_SAFE_INPUTS_PORT=${{ steps.safe-inputs-config.outputs.safe_inputs_port }}
+ export GH_AW_SAFE_INPUTS_API_KEY=${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }}
+
+ export GH_AW_GH_TOKEN="${GH_AW_GH_TOKEN}"
+ export GH_DEBUG="${GH_DEBUG}"
+
+ cd /tmp/gh-aw/safe-inputs
+ # Verify required files exist
+ echo "Verifying safe-inputs setup..."
+ if [ ! -f mcp-server.cjs ]; then
+ echo "ERROR: mcp-server.cjs not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ if [ ! -f tools.json ]; then
+ echo "ERROR: tools.json not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ echo "Configuration files verified"
+ # Log environment configuration
+ echo "Server configuration:"
+ echo " Port: $GH_AW_SAFE_INPUTS_PORT"
+ echo " API Key: ${GH_AW_SAFE_INPUTS_API_KEY:0:8}..."
+ echo " Working directory: $(pwd)"
+ # Ensure logs directory exists
+ mkdir -p /tmp/gh-aw/safe-inputs/logs
+ # Create initial server.log file for artifact upload
+ {
+ echo "Safe Inputs MCP Server Log"
+ echo "Start time: $(date)"
+ echo "==========================================="
+ echo ""
+ } > /tmp/gh-aw/safe-inputs/logs/server.log
+ # Start the HTTP server in the background
+ echo "Starting safe-inputs MCP HTTP server..."
+ node mcp-server.cjs >> /tmp/gh-aw/safe-inputs/logs/server.log 2>&1 &
+ SERVER_PID=$!
+ echo "Started safe-inputs MCP server with PID $SERVER_PID"
+ # Wait for server to be ready (max 10 seconds)
+ echo "Waiting for server to become ready..."
+ for i in {1..10}; do
+ # Check if process is still running
+ if ! kill -0 $SERVER_PID 2>/dev/null; then
+ echo "ERROR: Server process $SERVER_PID has died"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ exit 1
+ fi
+ # Check if server is responding
+ if curl -s -f "http://localhost:$GH_AW_SAFE_INPUTS_PORT/health" > /dev/null 2>&1; then
+ echo "Safe Inputs MCP server is ready (attempt $i/10)"
+ break
+ fi
+ if [ "$i" -eq 10 ]; then
+ echo "ERROR: Safe Inputs MCP server failed to start after 10 seconds"
+ echo "Process status: $(pgrep -f 'mcp-server.cjs' || echo 'not running')"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ echo "Checking port availability:"
+ netstat -tuln | grep "$GH_AW_SAFE_INPUTS_PORT" || echo "Port $GH_AW_SAFE_INPUTS_PORT not listening"
+ exit 1
+ fi
+ echo "Waiting for server... (attempt $i/10)"
+ sleep 1
+ done
+ # Output the configuration for the MCP client
+ echo "port=$GH_AW_SAFE_INPUTS_PORT" >> "$GITHUB_OUTPUT"
+ echo "api_key=$GH_AW_SAFE_INPUTS_API_KEY" >> "$GITHUB_OUTPUT"
+
- name: Setup MCPs
env:
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}
+ GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_DEBUG: 1
run: |
@@ -3060,11 +3156,15 @@ jobs:
{
"mcpServers": {
"safeinputs": {
- "type": "local",
- "command": "node",
- "args": ["/tmp/gh-aw/safe-inputs/mcp-server.cjs"],
+ "type": "http",
+ "url": "http://host.docker.internal:\${GH_AW_SAFE_INPUTS_PORT}",
+ "headers": {
+ "Authorization": "Bearer \${GH_AW_SAFE_INPUTS_API_KEY}"
+ },
"tools": ["*"],
"env": {
+ "GH_AW_SAFE_INPUTS_PORT": "\${GH_AW_SAFE_INPUTS_PORT}",
+ "GH_AW_SAFE_INPUTS_API_KEY": "\${GH_AW_SAFE_INPUTS_API_KEY}",
"GH_AW_GH_TOKEN": "\${GH_AW_GH_TOKEN}",
"GH_DEBUG": "\${GH_DEBUG}"
}
diff --git a/.github/workflows/dictation-prompt.lock.yml b/.github/workflows/dictation-prompt.lock.yml
index ecbefd5471..0c4b368940 100644
--- a/.github/workflows/dictation-prompt.lock.yml
+++ b/.github/workflows/dictation-prompt.lock.yml
@@ -252,8 +252,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml
index 340fe0dae1..a104f35d62 100644
--- a/.github/workflows/docs-noob-tester.lock.yml
+++ b/.github/workflows/docs-noob-tester.lock.yml
@@ -252,8 +252,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/example-permissions-warning.lock.yml b/.github/workflows/example-permissions-warning.lock.yml
index 6be98b5ae8..589148d618 100644
--- a/.github/workflows/example-permissions-warning.lock.yml
+++ b/.github/workflows/example-permissions-warning.lock.yml
@@ -238,8 +238,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml
index 24780143f6..2a203bbb2b 100644
--- a/.github/workflows/firewall-escape.lock.yml
+++ b/.github/workflows/firewall-escape.lock.yml
@@ -265,8 +265,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/firewall.lock.yml b/.github/workflows/firewall.lock.yml
index 3478b16560..c3f5f344db 100644
--- a/.github/workflows/firewall.lock.yml
+++ b/.github/workflows/firewall.lock.yml
@@ -238,8 +238,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/glossary-maintainer.lock.yml b/.github/workflows/glossary-maintainer.lock.yml
index 363c9a232b..9adcbb2a37 100644
--- a/.github/workflows/glossary-maintainer.lock.yml
+++ b/.github/workflows/glossary-maintainer.lock.yml
@@ -281,8 +281,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/go-file-size-reduction-project64.campaign.g.lock.yml b/.github/workflows/go-file-size-reduction-project64.campaign.g.lock.yml
index 569d59bf5c..43469cc4d3 100644
--- a/.github/workflows/go-file-size-reduction-project64.campaign.g.lock.yml
+++ b/.github/workflows/go-file-size-reduction-project64.campaign.g.lock.yml
@@ -248,8 +248,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/go-file-size-reduction.campaign.g.lock.yml b/.github/workflows/go-file-size-reduction.campaign.g.lock.yml
index a4bae2032c..f780b048eb 100644
--- a/.github/workflows/go-file-size-reduction.campaign.g.lock.yml
+++ b/.github/workflows/go-file-size-reduction.campaign.g.lock.yml
@@ -248,8 +248,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/grumpy-reviewer.lock.yml b/.github/workflows/grumpy-reviewer.lock.yml
index 13ae70638f..ff3725b430 100644
--- a/.github/workflows/grumpy-reviewer.lock.yml
+++ b/.github/workflows/grumpy-reviewer.lock.yml
@@ -1000,8 +1000,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/hourly-ci-cleaner.lock.yml b/.github/workflows/hourly-ci-cleaner.lock.yml
index 19b49ca868..0f33974ed7 100644
--- a/.github/workflows/hourly-ci-cleaner.lock.yml
+++ b/.github/workflows/hourly-ci-cleaner.lock.yml
@@ -280,8 +280,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/human-ai-collaboration.lock.yml b/.github/workflows/human-ai-collaboration.lock.yml
index 11d3201695..b8b6f425f0 100644
--- a/.github/workflows/human-ai-collaboration.lock.yml
+++ b/.github/workflows/human-ai-collaboration.lock.yml
@@ -279,8 +279,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/incident-response.lock.yml b/.github/workflows/incident-response.lock.yml
index 547fe32413..ac4edf59ae 100644
--- a/.github/workflows/incident-response.lock.yml
+++ b/.github/workflows/incident-response.lock.yml
@@ -293,8 +293,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index 1e2d89c5bb..64d9382016 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -333,8 +333,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml
index 403fcc8ae1..160cea3cba 100644
--- a/.github/workflows/issue-monster.lock.yml
+++ b/.github/workflows/issue-monster.lock.yml
@@ -258,8 +258,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/issue-triage-agent.lock.yml b/.github/workflows/issue-triage-agent.lock.yml
index faa91b19a1..3702c294a3 100644
--- a/.github/workflows/issue-triage-agent.lock.yml
+++ b/.github/workflows/issue-triage-agent.lock.yml
@@ -203,8 +203,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/jsweep.lock.yml b/.github/workflows/jsweep.lock.yml
index ae946aa45f..1d5139901e 100644
--- a/.github/workflows/jsweep.lock.yml
+++ b/.github/workflows/jsweep.lock.yml
@@ -278,8 +278,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/layout-spec-maintainer.lock.yml b/.github/workflows/layout-spec-maintainer.lock.yml
index 94f39dfac6..528eeed2b6 100644
--- a/.github/workflows/layout-spec-maintainer.lock.yml
+++ b/.github/workflows/layout-spec-maintainer.lock.yml
@@ -254,8 +254,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml
index 93cb2fa9b7..a56f424b78 100644
--- a/.github/workflows/mcp-inspector.lock.yml
+++ b/.github/workflows/mcp-inspector.lock.yml
@@ -314,8 +314,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/mergefest.lock.yml b/.github/workflows/mergefest.lock.yml
index 763427f32c..60cd035afd 100644
--- a/.github/workflows/mergefest.lock.yml
+++ b/.github/workflows/mergefest.lock.yml
@@ -659,8 +659,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/notion-issue-summary.lock.yml b/.github/workflows/notion-issue-summary.lock.yml
index 576b3c5567..a59a84df28 100644
--- a/.github/workflows/notion-issue-summary.lock.yml
+++ b/.github/workflows/notion-issue-summary.lock.yml
@@ -255,8 +255,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml
index 600c234ab8..ecdff07d65 100644
--- a/.github/workflows/org-health-report.lock.yml
+++ b/.github/workflows/org-health-report.lock.yml
@@ -300,8 +300,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/org-wide-rollout.lock.yml b/.github/workflows/org-wide-rollout.lock.yml
index 8989f48813..ff4746cd24 100644
--- a/.github/workflows/org-wide-rollout.lock.yml
+++ b/.github/workflows/org-wide-rollout.lock.yml
@@ -300,8 +300,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/pdf-summary.lock.yml b/.github/workflows/pdf-summary.lock.yml
index a7472a4ce9..7dfc7666c8 100644
--- a/.github/workflows/pdf-summary.lock.yml
+++ b/.github/workflows/pdf-summary.lock.yml
@@ -1025,8 +1025,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/plan.lock.yml b/.github/workflows/plan.lock.yml
index bc1560b3d0..ba77b65e3e 100644
--- a/.github/workflows/plan.lock.yml
+++ b/.github/workflows/plan.lock.yml
@@ -985,8 +985,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml
index 7c155b60f7..3bb41bf802 100644
--- a/.github/workflows/poem-bot.lock.yml
+++ b/.github/workflows/poem-bot.lock.yml
@@ -1007,8 +1007,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml
index 50f3c60d4f..a34802ff7a 100644
--- a/.github/workflows/portfolio-analyst.lock.yml
+++ b/.github/workflows/portfolio-analyst.lock.yml
@@ -322,8 +322,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/pr-nitpick-reviewer.lock.yml b/.github/workflows/pr-nitpick-reviewer.lock.yml
index ff6c6bae0e..0344133135 100644
--- a/.github/workflows/pr-nitpick-reviewer.lock.yml
+++ b/.github/workflows/pr-nitpick-reviewer.lock.yml
@@ -704,8 +704,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml
index b7ace5b4ca..e16e780e00 100644
--- a/.github/workflows/python-data-charts.lock.yml
+++ b/.github/workflows/python-data-charts.lock.yml
@@ -294,8 +294,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/q.lock.yml b/.github/workflows/q.lock.yml
index 5b00eb74ea..5fd4c21dc0 100644
--- a/.github/workflows/q.lock.yml
+++ b/.github/workflows/q.lock.yml
@@ -1053,8 +1053,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml
index 409e0dceee..f2b0bc8803 100644
--- a/.github/workflows/release.lock.yml
+++ b/.github/workflows/release.lock.yml
@@ -258,8 +258,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/repo-tree-map.lock.yml b/.github/workflows/repo-tree-map.lock.yml
index 14ec279840..49bc091547 100644
--- a/.github/workflows/repo-tree-map.lock.yml
+++ b/.github/workflows/repo-tree-map.lock.yml
@@ -253,8 +253,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/repository-quality-improver.lock.yml b/.github/workflows/repository-quality-improver.lock.yml
index 1a11d45281..365ad1c514 100644
--- a/.github/workflows/repository-quality-improver.lock.yml
+++ b/.github/workflows/repository-quality-improver.lock.yml
@@ -278,8 +278,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/research.lock.yml b/.github/workflows/research.lock.yml
index 19f7dfe3e0..99e2ca5e63 100644
--- a/.github/workflows/research.lock.yml
+++ b/.github/workflows/research.lock.yml
@@ -256,8 +256,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/security-compliance.lock.yml b/.github/workflows/security-compliance.lock.yml
index 8e0f47e9bd..de344e3068 100644
--- a/.github/workflows/security-compliance.lock.yml
+++ b/.github/workflows/security-compliance.lock.yml
@@ -284,8 +284,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/slide-deck-maintainer.lock.yml b/.github/workflows/slide-deck-maintainer.lock.yml
index 905b5d5bc2..ad29cb921a 100644
--- a/.github/workflows/slide-deck-maintainer.lock.yml
+++ b/.github/workflows/slide-deck-maintainer.lock.yml
@@ -282,8 +282,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/smoke-copilot-no-firewall.lock.yml b/.github/workflows/smoke-copilot-no-firewall.lock.yml
index db78e27369..c2cdfcbecb 100644
--- a/.github/workflows/smoke-copilot-no-firewall.lock.yml
+++ b/.github/workflows/smoke-copilot-no-firewall.lock.yml
@@ -3477,17 +3477,18 @@ jobs:
EOF_TOOLS_JSON
cat > /tmp/gh-aw/safe-inputs/mcp-server.cjs << 'EOFSI'
const path = require("path");
- const { startSafeInputsServer } = require("./safe_inputs_mcp_server.cjs");
+ const { startHttpServer } = require("./safe_inputs_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
- try {
- startSafeInputsServer(configPath, {
- logDir: "/tmp/gh-aw/safe-inputs/logs",
- skipCleanup: true
- });
- } catch (error) {
- console.error("Failed to start safe-inputs stdio server:", error);
+ const port = parseInt(process.env.GH_AW_SAFE_INPUTS_PORT || "3000", 10);
+ const apiKey = process.env.GH_AW_SAFE_INPUTS_API_KEY || "";
+ startHttpServer(configPath, {
+ port: port,
+ stateless: false,
+ logDir: "/tmp/gh-aw/safe-inputs/logs"
+ }).catch(error => {
+ console.error("Failed to start safe-inputs HTTP server:", error);
process.exit(1);
- }
+ });
EOFSI
chmod +x /tmp/gh-aw/safe-inputs/mcp-server.cjs
@@ -3507,10 +3508,105 @@ jobs:
EOFSH_gh
chmod +x /tmp/gh-aw/safe-inputs/gh.sh
+ - name: Generate Safe Inputs MCP Server Config
+ id: safe-inputs-config
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ function generateSafeInputsConfig({ core, crypto }) {
+ const apiKeyBuffer = crypto.randomBytes(45);
+ const apiKey = apiKeyBuffer.toString("base64").replace(/[/+=]/g, "");
+ const port = 3000;
+ core.setOutput("safe_inputs_api_key", apiKey);
+ core.setOutput("safe_inputs_port", port.toString());
+ core.info(`Safe Inputs MCP server will run on port ${port}`);
+ return { apiKey, port };
+ }
+
+ // Execute the function
+ const crypto = require('crypto');
+ generateSafeInputsConfig({ core, crypto });
+
+ - name: Start Safe Inputs MCP HTTP Server
+ id: safe-inputs-start
+ run: |
+ # Set environment variables for the server
+ export GH_AW_SAFE_INPUTS_PORT=${{ steps.safe-inputs-config.outputs.safe_inputs_port }}
+ export GH_AW_SAFE_INPUTS_API_KEY=${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }}
+
+ export GH_AW_GH_TOKEN="${GH_AW_GH_TOKEN}"
+ export GH_DEBUG="${GH_DEBUG}"
+
+ cd /tmp/gh-aw/safe-inputs
+ # Verify required files exist
+ echo "Verifying safe-inputs setup..."
+ if [ ! -f mcp-server.cjs ]; then
+ echo "ERROR: mcp-server.cjs not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ if [ ! -f tools.json ]; then
+ echo "ERROR: tools.json not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ echo "Configuration files verified"
+ # Log environment configuration
+ echo "Server configuration:"
+ echo " Port: $GH_AW_SAFE_INPUTS_PORT"
+ echo " API Key: ${GH_AW_SAFE_INPUTS_API_KEY:0:8}..."
+ echo " Working directory: $(pwd)"
+ # Ensure logs directory exists
+ mkdir -p /tmp/gh-aw/safe-inputs/logs
+ # Create initial server.log file for artifact upload
+ {
+ echo "Safe Inputs MCP Server Log"
+ echo "Start time: $(date)"
+ echo "==========================================="
+ echo ""
+ } > /tmp/gh-aw/safe-inputs/logs/server.log
+ # Start the HTTP server in the background
+ echo "Starting safe-inputs MCP HTTP server..."
+ node mcp-server.cjs >> /tmp/gh-aw/safe-inputs/logs/server.log 2>&1 &
+ SERVER_PID=$!
+ echo "Started safe-inputs MCP server with PID $SERVER_PID"
+ # Wait for server to be ready (max 10 seconds)
+ echo "Waiting for server to become ready..."
+ for i in {1..10}; do
+ # Check if process is still running
+ if ! kill -0 $SERVER_PID 2>/dev/null; then
+ echo "ERROR: Server process $SERVER_PID has died"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ exit 1
+ fi
+ # Check if server is responding
+ if curl -s -f "http://localhost:$GH_AW_SAFE_INPUTS_PORT/health" > /dev/null 2>&1; then
+ echo "Safe Inputs MCP server is ready (attempt $i/10)"
+ break
+ fi
+ if [ "$i" -eq 10 ]; then
+ echo "ERROR: Safe Inputs MCP server failed to start after 10 seconds"
+ echo "Process status: $(pgrep -f 'mcp-server.cjs' || echo 'not running')"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ echo "Checking port availability:"
+ netstat -tuln | grep "$GH_AW_SAFE_INPUTS_PORT" || echo "Port $GH_AW_SAFE_INPUTS_PORT not listening"
+ exit 1
+ fi
+ echo "Waiting for server... (attempt $i/10)"
+ sleep 1
+ done
+ # Output the configuration for the MCP client
+ echo "port=$GH_AW_SAFE_INPUTS_PORT" >> "$GITHUB_OUTPUT"
+ echo "api_key=$GH_AW_SAFE_INPUTS_API_KEY" >> "$GITHUB_OUTPUT"
+
- name: Setup MCPs
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}
+ GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_DEBUG: 1
run: |
@@ -3546,11 +3642,15 @@ jobs:
"tools": ["*"]
},
"safeinputs": {
- "type": "local",
- "command": "node",
- "args": ["/tmp/gh-aw/safe-inputs/mcp-server.cjs"],
+ "type": "http",
+ "url": "http://host.docker.internal:\${GH_AW_SAFE_INPUTS_PORT}",
+ "headers": {
+ "Authorization": "Bearer \${GH_AW_SAFE_INPUTS_API_KEY}"
+ },
"tools": ["*"],
"env": {
+ "GH_AW_SAFE_INPUTS_PORT": "\${GH_AW_SAFE_INPUTS_PORT}",
+ "GH_AW_SAFE_INPUTS_API_KEY": "\${GH_AW_SAFE_INPUTS_API_KEY}",
"GH_AW_GH_TOKEN": "\${GH_AW_GH_TOKEN}",
"GH_DEBUG": "\${GH_DEBUG}"
}
diff --git a/.github/workflows/smoke-copilot-playwright.lock.yml b/.github/workflows/smoke-copilot-playwright.lock.yml
index 36ba1db97a..36a2773ad3 100644
--- a/.github/workflows/smoke-copilot-playwright.lock.yml
+++ b/.github/workflows/smoke-copilot-playwright.lock.yml
@@ -693,6 +693,14 @@ jobs:
# Verify installation
copilot --version
+ - name: Install awf binary
+ run: |
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
+ chmod +x awf
+ sudo mv awf /usr/local/bin/
+ which awf
+ awf --version
- name: Downloading container images
run: |
set -e
@@ -3568,17 +3576,18 @@ jobs:
EOF_TOOLS_JSON
cat > /tmp/gh-aw/safe-inputs/mcp-server.cjs << 'EOFSI'
const path = require("path");
- const { startSafeInputsServer } = require("./safe_inputs_mcp_server.cjs");
+ const { startHttpServer } = require("./safe_inputs_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
- try {
- startSafeInputsServer(configPath, {
- logDir: "/tmp/gh-aw/safe-inputs/logs",
- skipCleanup: true
- });
- } catch (error) {
- console.error("Failed to start safe-inputs stdio server:", error);
+ const port = parseInt(process.env.GH_AW_SAFE_INPUTS_PORT || "3000", 10);
+ const apiKey = process.env.GH_AW_SAFE_INPUTS_API_KEY || "";
+ startHttpServer(configPath, {
+ port: port,
+ stateless: false,
+ logDir: "/tmp/gh-aw/safe-inputs/logs"
+ }).catch(error => {
+ console.error("Failed to start safe-inputs HTTP server:", error);
process.exit(1);
- }
+ });
EOFSI
chmod +x /tmp/gh-aw/safe-inputs/mcp-server.cjs
@@ -3598,10 +3607,105 @@ jobs:
EOFSH_gh
chmod +x /tmp/gh-aw/safe-inputs/gh.sh
+ - name: Generate Safe Inputs MCP Server Config
+ id: safe-inputs-config
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ function generateSafeInputsConfig({ core, crypto }) {
+ const apiKeyBuffer = crypto.randomBytes(45);
+ const apiKey = apiKeyBuffer.toString("base64").replace(/[/+=]/g, "");
+ const port = 3000;
+ core.setOutput("safe_inputs_api_key", apiKey);
+ core.setOutput("safe_inputs_port", port.toString());
+ core.info(`Safe Inputs MCP server will run on port ${port}`);
+ return { apiKey, port };
+ }
+
+ // Execute the function
+ const crypto = require('crypto');
+ generateSafeInputsConfig({ core, crypto });
+
+ - name: Start Safe Inputs MCP HTTP Server
+ id: safe-inputs-start
+ run: |
+ # Set environment variables for the server
+ export GH_AW_SAFE_INPUTS_PORT=${{ steps.safe-inputs-config.outputs.safe_inputs_port }}
+ export GH_AW_SAFE_INPUTS_API_KEY=${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }}
+
+ export GH_AW_GH_TOKEN="${GH_AW_GH_TOKEN}"
+ export GH_DEBUG="${GH_DEBUG}"
+
+ cd /tmp/gh-aw/safe-inputs
+ # Verify required files exist
+ echo "Verifying safe-inputs setup..."
+ if [ ! -f mcp-server.cjs ]; then
+ echo "ERROR: mcp-server.cjs not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ if [ ! -f tools.json ]; then
+ echo "ERROR: tools.json not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ echo "Configuration files verified"
+ # Log environment configuration
+ echo "Server configuration:"
+ echo " Port: $GH_AW_SAFE_INPUTS_PORT"
+ echo " API Key: ${GH_AW_SAFE_INPUTS_API_KEY:0:8}..."
+ echo " Working directory: $(pwd)"
+ # Ensure logs directory exists
+ mkdir -p /tmp/gh-aw/safe-inputs/logs
+ # Create initial server.log file for artifact upload
+ {
+ echo "Safe Inputs MCP Server Log"
+ echo "Start time: $(date)"
+ echo "==========================================="
+ echo ""
+ } > /tmp/gh-aw/safe-inputs/logs/server.log
+ # Start the HTTP server in the background
+ echo "Starting safe-inputs MCP HTTP server..."
+ node mcp-server.cjs >> /tmp/gh-aw/safe-inputs/logs/server.log 2>&1 &
+ SERVER_PID=$!
+ echo "Started safe-inputs MCP server with PID $SERVER_PID"
+ # Wait for server to be ready (max 10 seconds)
+ echo "Waiting for server to become ready..."
+ for i in {1..10}; do
+ # Check if process is still running
+ if ! kill -0 $SERVER_PID 2>/dev/null; then
+ echo "ERROR: Server process $SERVER_PID has died"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ exit 1
+ fi
+ # Check if server is responding
+ if curl -s -f "http://localhost:$GH_AW_SAFE_INPUTS_PORT/health" > /dev/null 2>&1; then
+ echo "Safe Inputs MCP server is ready (attempt $i/10)"
+ break
+ fi
+ if [ "$i" -eq 10 ]; then
+ echo "ERROR: Safe Inputs MCP server failed to start after 10 seconds"
+ echo "Process status: $(pgrep -f 'mcp-server.cjs' || echo 'not running')"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ echo "Checking port availability:"
+ netstat -tuln | grep "$GH_AW_SAFE_INPUTS_PORT" || echo "Port $GH_AW_SAFE_INPUTS_PORT not listening"
+ exit 1
+ fi
+ echo "Waiting for server... (attempt $i/10)"
+ sleep 1
+ done
+ # Output the configuration for the MCP client
+ echo "port=$GH_AW_SAFE_INPUTS_PORT" >> "$GITHUB_OUTPUT"
+ echo "api_key=$GH_AW_SAFE_INPUTS_API_KEY" >> "$GITHUB_OUTPUT"
+
- name: Setup MCPs
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}
+ GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_DEBUG: 1
run: |
@@ -3637,11 +3741,15 @@ jobs:
"tools": ["*"]
},
"safeinputs": {
- "type": "local",
- "command": "node",
- "args": ["/tmp/gh-aw/safe-inputs/mcp-server.cjs"],
+ "type": "http",
+ "url": "http://host.docker.internal:\${GH_AW_SAFE_INPUTS_PORT}",
+ "headers": {
+ "Authorization": "Bearer \${GH_AW_SAFE_INPUTS_API_KEY}"
+ },
"tools": ["*"],
"env": {
+ "GH_AW_SAFE_INPUTS_PORT": "\${GH_AW_SAFE_INPUTS_PORT}",
+ "GH_AW_SAFE_INPUTS_API_KEY": "\${GH_AW_SAFE_INPUTS_API_KEY}",
"GH_AW_GH_TOKEN": "\${GH_AW_GH_TOKEN}",
"GH_DEBUG": "\${GH_DEBUG}"
}
@@ -3710,10 +3818,10 @@ jobs:
staged: false,
network_mode: "defaults",
allowed_domains: ["accounts.google.com","android.clients.google.com","api.github.com","clients2.google.com","defaults","github","node","playwright","www.google.com"],
- firewall_enabled: false,
+ firewall_enabled: true,
firewall_version: "",
steps: {
- firewall: ""
+ firewall: "squid"
},
created_at: new Date().toISOString()
};
@@ -4235,13 +4343,9 @@ jobs:
timeout-minutes: 5
run: |
set -o pipefail
- COPILOT_CLI_INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"
- mkdir -p /tmp/
- mkdir -p /tmp/gh-aw/
- mkdir -p /tmp/gh-aw/agent/
- mkdir -p /tmp/gh-aw/cache-memory/
- mkdir -p /tmp/gh-aw/sandbox/agent/logs/
- copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} 2>&1 | tee /tmp/gh-aw/agent-stdio.log
+ sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --allow-domains '*.githubusercontent.com,accounts.google.com,android.clients.google.com,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.npms.io,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,bun.sh,cdn.playwright.dev,clients2.google.com,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,deb.nodesource.com,deno.land,get.pnpm.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,nodejs.org,npm.pkg.github.com,npmjs.com,npmjs.org,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.bower.io,registry.npmjs.com,registry.npmjs.org,registry.yarnpkg.com,repo.yarnpkg.com,s.symcb.com,s.symcd.com,security.ubuntu.com,skimdb.npmjs.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.google.com,www.npmjs.com,www.npmjs.org,yarnpkg.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs \
+ -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \
+ 2>&1 | tee /tmp/gh-aw/agent-stdio.log
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
@@ -7236,6 +7340,165 @@ jobs:
return entries;
}
main();
+ - name: Upload Firewall Logs
+ if: always()
+ continue-on-error: true
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ with:
+ name: firewall-logs-smoke-copilot-playwright
+ path: /tmp/gh-aw/sandbox/firewall/logs/
+ if-no-files-found: ignore
+ - name: Parse firewall logs for step summary
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ with:
+ script: |
+ function sanitizeWorkflowName(name) {
+ return name
+ .toLowerCase()
+ .replace(/[:\\/\s]/g, "-")
+ .replace(/[^a-z0-9._-]/g, "-");
+ }
+ function main() {
+ const fs = require("fs");
+ const path = require("path");
+ try {
+ const squidLogsDir = `/tmp/gh-aw/sandbox/firewall/logs/`;
+ if (!fs.existsSync(squidLogsDir)) {
+ core.info(`No firewall logs directory found at: ${squidLogsDir}`);
+ return;
+ }
+ const files = fs.readdirSync(squidLogsDir).filter(file => file.endsWith(".log"));
+ if (files.length === 0) {
+ core.info(`No firewall log files found in: ${squidLogsDir}`);
+ return;
+ }
+ core.info(`Found ${files.length} firewall log file(s)`);
+ let totalRequests = 0;
+ let allowedRequests = 0;
+ let deniedRequests = 0;
+ const allowedDomains = new Set();
+ const deniedDomains = new Set();
+ const requestsByDomain = new Map();
+ for (const file of files) {
+ const filePath = path.join(squidLogsDir, file);
+ core.info(`Parsing firewall log: ${file}`);
+ const content = fs.readFileSync(filePath, "utf8");
+ const lines = content.split("\n").filter(line => line.trim());
+ for (const line of lines) {
+ const entry = parseFirewallLogLine(line);
+ if (!entry) {
+ continue;
+ }
+ totalRequests++;
+ const isAllowed = isRequestAllowed(entry.decision, entry.status);
+ if (isAllowed) {
+ allowedRequests++;
+ allowedDomains.add(entry.domain);
+ } else {
+ deniedRequests++;
+ deniedDomains.add(entry.domain);
+ }
+ if (!requestsByDomain.has(entry.domain)) {
+ requestsByDomain.set(entry.domain, { allowed: 0, denied: 0 });
+ }
+ const domainStats = requestsByDomain.get(entry.domain);
+ if (isAllowed) {
+ domainStats.allowed++;
+ } else {
+ domainStats.denied++;
+ }
+ }
+ }
+ const summary = generateFirewallSummary({
+ totalRequests,
+ allowedRequests,
+ deniedRequests,
+ allowedDomains: Array.from(allowedDomains).sort(),
+ deniedDomains: Array.from(deniedDomains).sort(),
+ requestsByDomain,
+ });
+ core.summary.addRaw(summary).write();
+ core.info("Firewall log summary generated successfully");
+ } catch (error) {
+ core.setFailed(error instanceof Error ? error : String(error));
+ }
+ }
+ function parseFirewallLogLine(line) {
+ const trimmed = line.trim();
+ if (!trimmed || trimmed.startsWith("#")) {
+ return null;
+ }
+ const fields = trimmed.match(/(?:[^\s"]+|"[^"]*")+/g);
+ if (!fields || fields.length < 10) {
+ return null;
+ }
+ const timestamp = fields[0];
+ if (!/^\d+(\.\d+)?$/.test(timestamp)) {
+ return null;
+ }
+ return {
+ timestamp,
+ clientIpPort: fields[1],
+ domain: fields[2],
+ destIpPort: fields[3],
+ proto: fields[4],
+ method: fields[5],
+ status: fields[6],
+ decision: fields[7],
+ url: fields[8],
+ userAgent: fields[9]?.replace(/^"|"$/g, "") || "-",
+ };
+ }
+ function isRequestAllowed(decision, status) {
+ const statusCode = parseInt(status, 10);
+ if (statusCode === 200 || statusCode === 206 || statusCode === 304) {
+ return true;
+ }
+ if (decision.includes("TCP_TUNNEL") || decision.includes("TCP_HIT") || decision.includes("TCP_MISS")) {
+ return true;
+ }
+ if (decision.includes("NONE_NONE") || decision.includes("TCP_DENIED") || statusCode === 403 || statusCode === 407) {
+ return false;
+ }
+ return false;
+ }
+ function generateFirewallSummary(analysis) {
+ const { totalRequests, requestsByDomain } = analysis;
+ const validDomains = Array.from(requestsByDomain.keys())
+ .filter(domain => domain !== "-")
+ .sort();
+ const uniqueDomainCount = validDomains.length;
+ let validAllowedRequests = 0;
+ let validDeniedRequests = 0;
+ for (const domain of validDomains) {
+ const stats = requestsByDomain.get(domain);
+ validAllowedRequests += stats.allowed;
+ validDeniedRequests += stats.denied;
+ }
+ let summary = "### 🔥 Firewall Activity\n\n";
+ summary += "\n";
+ summary += `📊 ${totalRequests} request${totalRequests !== 1 ? "s" : ""} | `;
+ summary += `${validAllowedRequests} allowed | `;
+ summary += `${validDeniedRequests} blocked | `;
+ summary += `${uniqueDomainCount} unique domain${uniqueDomainCount !== 1 ? "s" : ""}
\n\n`;
+ if (uniqueDomainCount > 0) {
+ summary += "| Domain | Allowed | Denied |\n";
+ summary += "|--------|---------|--------|\n";
+ for (const domain of validDomains) {
+ const stats = requestsByDomain.get(domain);
+ summary += `| ${domain} | ${stats.allowed} | ${stats.denied} |\n`;
+ }
+ } else {
+ summary += "No firewall activity detected.\n";
+ }
+ summary += "\n \n\n";
+ return summary;
+ }
+ const isDirectExecution = typeof module === "undefined" || (typeof require !== "undefined" && typeof require.main !== "undefined" && require.main === module);
+ if (isDirectExecution) {
+ main();
+ }
- name: Upload Agent Stdio
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
diff --git a/.github/workflows/smoke-copilot-playwright.md b/.github/workflows/smoke-copilot-playwright.md
index 041f8e5045..7b0a1855e5 100644
--- a/.github/workflows/smoke-copilot-playwright.md
+++ b/.github/workflows/smoke-copilot-playwright.md
@@ -30,7 +30,7 @@ network:
- accounts.google.com # Chrome account checks
- android.clients.google.com # Chrome internal
sandbox:
- agent: false # Firewall disabled (migrated from network.firewall)
+ agent: awf # Firewall enabled
tools:
cache-memory: true
edit:
@@ -56,7 +56,7 @@ safe-outputs:
run-success: "📰 VERDICT: [{workflow_name}]({run_url}) has concluded. All systems operational. This is a developing story. 🎤"
run-failure: "📰 DEVELOPING STORY: [{workflow_name}]({run_url}) reports {status}. Our correspondents are investigating the incident..."
timeout-minutes: 5
-strict: true
+strict: false
steps:
# Pre-flight Docker container test for Playwright MCP
- name: Pre-flight Playwright MCP Test
diff --git a/.github/workflows/smoke-copilot-safe-inputs.lock.yml b/.github/workflows/smoke-copilot-safe-inputs.lock.yml
index f12b29db9a..577db5f6e6 100644
--- a/.github/workflows/smoke-copilot-safe-inputs.lock.yml
+++ b/.github/workflows/smoke-copilot-safe-inputs.lock.yml
@@ -676,8 +676,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
@@ -3453,17 +3453,18 @@ jobs:
EOF_TOOLS_JSON
cat > /tmp/gh-aw/safe-inputs/mcp-server.cjs << 'EOFSI'
const path = require("path");
- const { startSafeInputsServer } = require("./safe_inputs_mcp_server.cjs");
+ const { startHttpServer } = require("./safe_inputs_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
- try {
- startSafeInputsServer(configPath, {
- logDir: "/tmp/gh-aw/safe-inputs/logs",
- skipCleanup: true
- });
- } catch (error) {
- console.error("Failed to start safe-inputs stdio server:", error);
+ const port = parseInt(process.env.GH_AW_SAFE_INPUTS_PORT || "3000", 10);
+ const apiKey = process.env.GH_AW_SAFE_INPUTS_API_KEY || "";
+ startHttpServer(configPath, {
+ port: port,
+ stateless: false,
+ logDir: "/tmp/gh-aw/safe-inputs/logs"
+ }).catch(error => {
+ console.error("Failed to start safe-inputs HTTP server:", error);
process.exit(1);
- }
+ });
EOFSI
chmod +x /tmp/gh-aw/safe-inputs/mcp-server.cjs
@@ -3483,9 +3484,104 @@ jobs:
EOFSH_gh
chmod +x /tmp/gh-aw/safe-inputs/gh.sh
+ - name: Generate Safe Inputs MCP Server Config
+ id: safe-inputs-config
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ function generateSafeInputsConfig({ core, crypto }) {
+ const apiKeyBuffer = crypto.randomBytes(45);
+ const apiKey = apiKeyBuffer.toString("base64").replace(/[/+=]/g, "");
+ const port = 3000;
+ core.setOutput("safe_inputs_api_key", apiKey);
+ core.setOutput("safe_inputs_port", port.toString());
+ core.info(`Safe Inputs MCP server will run on port ${port}`);
+ return { apiKey, port };
+ }
+
+ // Execute the function
+ const crypto = require('crypto');
+ generateSafeInputsConfig({ core, crypto });
+
+ - name: Start Safe Inputs MCP HTTP Server
+ id: safe-inputs-start
+ run: |
+ # Set environment variables for the server
+ export GH_AW_SAFE_INPUTS_PORT=${{ steps.safe-inputs-config.outputs.safe_inputs_port }}
+ export GH_AW_SAFE_INPUTS_API_KEY=${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }}
+
+ export GH_AW_GH_TOKEN="${GH_AW_GH_TOKEN}"
+ export GH_DEBUG="${GH_DEBUG}"
+
+ cd /tmp/gh-aw/safe-inputs
+ # Verify required files exist
+ echo "Verifying safe-inputs setup..."
+ if [ ! -f mcp-server.cjs ]; then
+ echo "ERROR: mcp-server.cjs not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ if [ ! -f tools.json ]; then
+ echo "ERROR: tools.json not found in /tmp/gh-aw/safe-inputs"
+ ls -la /tmp/gh-aw/safe-inputs/
+ exit 1
+ fi
+ echo "Configuration files verified"
+ # Log environment configuration
+ echo "Server configuration:"
+ echo " Port: $GH_AW_SAFE_INPUTS_PORT"
+ echo " API Key: ${GH_AW_SAFE_INPUTS_API_KEY:0:8}..."
+ echo " Working directory: $(pwd)"
+ # Ensure logs directory exists
+ mkdir -p /tmp/gh-aw/safe-inputs/logs
+ # Create initial server.log file for artifact upload
+ {
+ echo "Safe Inputs MCP Server Log"
+ echo "Start time: $(date)"
+ echo "==========================================="
+ echo ""
+ } > /tmp/gh-aw/safe-inputs/logs/server.log
+ # Start the HTTP server in the background
+ echo "Starting safe-inputs MCP HTTP server..."
+ node mcp-server.cjs >> /tmp/gh-aw/safe-inputs/logs/server.log 2>&1 &
+ SERVER_PID=$!
+ echo "Started safe-inputs MCP server with PID $SERVER_PID"
+ # Wait for server to be ready (max 10 seconds)
+ echo "Waiting for server to become ready..."
+ for i in {1..10}; do
+ # Check if process is still running
+ if ! kill -0 $SERVER_PID 2>/dev/null; then
+ echo "ERROR: Server process $SERVER_PID has died"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ exit 1
+ fi
+ # Check if server is responding
+ if curl -s -f "http://localhost:$GH_AW_SAFE_INPUTS_PORT/health" > /dev/null 2>&1; then
+ echo "Safe Inputs MCP server is ready (attempt $i/10)"
+ break
+ fi
+ if [ "$i" -eq 10 ]; then
+ echo "ERROR: Safe Inputs MCP server failed to start after 10 seconds"
+ echo "Process status: $(pgrep -f 'mcp-server.cjs' || echo 'not running')"
+ echo "Server log contents:"
+ cat /tmp/gh-aw/safe-inputs/logs/server.log
+ echo "Checking port availability:"
+ netstat -tuln | grep "$GH_AW_SAFE_INPUTS_PORT" || echo "Port $GH_AW_SAFE_INPUTS_PORT not listening"
+ exit 1
+ fi
+ echo "Waiting for server... (attempt $i/10)"
+ sleep 1
+ done
+ # Output the configuration for the MCP client
+ echo "port=$GH_AW_SAFE_INPUTS_PORT" >> "$GITHUB_OUTPUT"
+ echo "api_key=$GH_AW_SAFE_INPUTS_API_KEY" >> "$GITHUB_OUTPUT"
+
- name: Setup MCPs
env:
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}
+ GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_DEBUG: 1
run: |
@@ -3495,11 +3591,15 @@ jobs:
{
"mcpServers": {
"safeinputs": {
- "type": "local",
- "command": "node",
- "args": ["/tmp/gh-aw/safe-inputs/mcp-server.cjs"],
+ "type": "http",
+ "url": "http://host.docker.internal:\${GH_AW_SAFE_INPUTS_PORT}",
+ "headers": {
+ "Authorization": "Bearer \${GH_AW_SAFE_INPUTS_API_KEY}"
+ },
"tools": ["*"],
"env": {
+ "GH_AW_SAFE_INPUTS_PORT": "\${GH_AW_SAFE_INPUTS_PORT}",
+ "GH_AW_SAFE_INPUTS_API_KEY": "\${GH_AW_SAFE_INPUTS_API_KEY}",
"GH_AW_GH_TOKEN": "\${GH_AW_GH_TOKEN}",
"GH_DEBUG": "\${GH_DEBUG}"
}
diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml
index e0c05c6db0..696d5329cf 100644
--- a/.github/workflows/smoke-copilot.lock.yml
+++ b/.github/workflows/smoke-copilot.lock.yml
@@ -674,6 +674,14 @@ jobs:
# Verify installation
copilot --version
+ - name: Install awf binary
+ run: |
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
+ chmod +x awf
+ sudo mv awf /usr/local/bin/
+ which awf
+ awf --version
- name: Downloading container images
run: |
set -e
@@ -2375,10 +2383,10 @@ jobs:
staged: false,
network_mode: "defaults",
allowed_domains: ["defaults","node","github"],
- firewall_enabled: false,
+ firewall_enabled: true,
firewall_version: "",
steps: {
- firewall: ""
+ firewall: "squid"
},
created_at: new Date().toISOString()
};
@@ -2880,13 +2888,9 @@ jobs:
timeout-minutes: 5
run: |
set -o pipefail
- COPILOT_CLI_INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"
- mkdir -p /tmp/
- mkdir -p /tmp/gh-aw/
- mkdir -p /tmp/gh-aw/agent/
- mkdir -p /tmp/gh-aw/cache-memory/
- mkdir -p /tmp/gh-aw/sandbox/agent/logs/
- copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} 2>&1 | tee /tmp/gh-aw/agent-stdio.log
+ sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --allow-domains '*.githubusercontent.com,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.npms.io,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,bun.sh,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,deb.nodesource.com,deno.land,get.pnpm.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,nodejs.org,npm.pkg.github.com,npmjs.com,npmjs.org,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.bower.io,registry.npmjs.com,registry.npmjs.org,registry.yarnpkg.com,repo.yarnpkg.com,s.symcb.com,s.symcd.com,security.ubuntu.com,skimdb.npmjs.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.npmjs.com,www.npmjs.org,yarnpkg.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs \
+ -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \
+ 2>&1 | tee /tmp/gh-aw/agent-stdio.log
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
@@ -5871,6 +5875,165 @@ jobs:
return entries;
}
main();
+ - name: Upload Firewall Logs
+ if: always()
+ continue-on-error: true
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ with:
+ name: firewall-logs-smoke-copilot
+ path: /tmp/gh-aw/sandbox/firewall/logs/
+ if-no-files-found: ignore
+ - name: Parse firewall logs for step summary
+ if: always()
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ with:
+ script: |
+ function sanitizeWorkflowName(name) {
+ return name
+ .toLowerCase()
+ .replace(/[:\\/\s]/g, "-")
+ .replace(/[^a-z0-9._-]/g, "-");
+ }
+ function main() {
+ const fs = require("fs");
+ const path = require("path");
+ try {
+ const squidLogsDir = `/tmp/gh-aw/sandbox/firewall/logs/`;
+ if (!fs.existsSync(squidLogsDir)) {
+ core.info(`No firewall logs directory found at: ${squidLogsDir}`);
+ return;
+ }
+ const files = fs.readdirSync(squidLogsDir).filter(file => file.endsWith(".log"));
+ if (files.length === 0) {
+ core.info(`No firewall log files found in: ${squidLogsDir}`);
+ return;
+ }
+ core.info(`Found ${files.length} firewall log file(s)`);
+ let totalRequests = 0;
+ let allowedRequests = 0;
+ let deniedRequests = 0;
+ const allowedDomains = new Set();
+ const deniedDomains = new Set();
+ const requestsByDomain = new Map();
+ for (const file of files) {
+ const filePath = path.join(squidLogsDir, file);
+ core.info(`Parsing firewall log: ${file}`);
+ const content = fs.readFileSync(filePath, "utf8");
+ const lines = content.split("\n").filter(line => line.trim());
+ for (const line of lines) {
+ const entry = parseFirewallLogLine(line);
+ if (!entry) {
+ continue;
+ }
+ totalRequests++;
+ const isAllowed = isRequestAllowed(entry.decision, entry.status);
+ if (isAllowed) {
+ allowedRequests++;
+ allowedDomains.add(entry.domain);
+ } else {
+ deniedRequests++;
+ deniedDomains.add(entry.domain);
+ }
+ if (!requestsByDomain.has(entry.domain)) {
+ requestsByDomain.set(entry.domain, { allowed: 0, denied: 0 });
+ }
+ const domainStats = requestsByDomain.get(entry.domain);
+ if (isAllowed) {
+ domainStats.allowed++;
+ } else {
+ domainStats.denied++;
+ }
+ }
+ }
+ const summary = generateFirewallSummary({
+ totalRequests,
+ allowedRequests,
+ deniedRequests,
+ allowedDomains: Array.from(allowedDomains).sort(),
+ deniedDomains: Array.from(deniedDomains).sort(),
+ requestsByDomain,
+ });
+ core.summary.addRaw(summary).write();
+ core.info("Firewall log summary generated successfully");
+ } catch (error) {
+ core.setFailed(error instanceof Error ? error : String(error));
+ }
+ }
+ function parseFirewallLogLine(line) {
+ const trimmed = line.trim();
+ if (!trimmed || trimmed.startsWith("#")) {
+ return null;
+ }
+ const fields = trimmed.match(/(?:[^\s"]+|"[^"]*")+/g);
+ if (!fields || fields.length < 10) {
+ return null;
+ }
+ const timestamp = fields[0];
+ if (!/^\d+(\.\d+)?$/.test(timestamp)) {
+ return null;
+ }
+ return {
+ timestamp,
+ clientIpPort: fields[1],
+ domain: fields[2],
+ destIpPort: fields[3],
+ proto: fields[4],
+ method: fields[5],
+ status: fields[6],
+ decision: fields[7],
+ url: fields[8],
+ userAgent: fields[9]?.replace(/^"|"$/g, "") || "-",
+ };
+ }
+ function isRequestAllowed(decision, status) {
+ const statusCode = parseInt(status, 10);
+ if (statusCode === 200 || statusCode === 206 || statusCode === 304) {
+ return true;
+ }
+ if (decision.includes("TCP_TUNNEL") || decision.includes("TCP_HIT") || decision.includes("TCP_MISS")) {
+ return true;
+ }
+ if (decision.includes("NONE_NONE") || decision.includes("TCP_DENIED") || statusCode === 403 || statusCode === 407) {
+ return false;
+ }
+ return false;
+ }
+ function generateFirewallSummary(analysis) {
+ const { totalRequests, requestsByDomain } = analysis;
+ const validDomains = Array.from(requestsByDomain.keys())
+ .filter(domain => domain !== "-")
+ .sort();
+ const uniqueDomainCount = validDomains.length;
+ let validAllowedRequests = 0;
+ let validDeniedRequests = 0;
+ for (const domain of validDomains) {
+ const stats = requestsByDomain.get(domain);
+ validAllowedRequests += stats.allowed;
+ validDeniedRequests += stats.denied;
+ }
+ let summary = "### 🔥 Firewall Activity\n\n";
+ summary += "\n";
+ summary += `📊 ${totalRequests} request${totalRequests !== 1 ? "s" : ""} | `;
+ summary += `${validAllowedRequests} allowed | `;
+ summary += `${validDeniedRequests} blocked | `;
+ summary += `${uniqueDomainCount} unique domain${uniqueDomainCount !== 1 ? "s" : ""}
\n\n`;
+ if (uniqueDomainCount > 0) {
+ summary += "| Domain | Allowed | Denied |\n";
+ summary += "|--------|---------|--------|\n";
+ for (const domain of validDomains) {
+ const stats = requestsByDomain.get(domain);
+ summary += `| ${domain} | ${stats.allowed} | ${stats.denied} |\n`;
+ }
+ } else {
+ summary += "No firewall activity detected.\n";
+ }
+ summary += "\n \n\n";
+ return summary;
+ }
+ const isDirectExecution = typeof module === "undefined" || (typeof require !== "undefined" && typeof require.main !== "undefined" && require.main === module);
+ if (isDirectExecution) {
+ main();
+ }
- name: Upload Agent Stdio
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
diff --git a/.github/workflows/smoke-copilot.md b/.github/workflows/smoke-copilot.md
index 73cdee8d4c..a4bca74b7b 100644
--- a/.github/workflows/smoke-copilot.md
+++ b/.github/workflows/smoke-copilot.md
@@ -20,7 +20,7 @@ network:
- node
- github
sandbox:
- agent: false # Firewall disabled (migrated from network.firewall)
+ agent: awf # Firewall enabled
tools:
cache-memory: true
edit:
@@ -40,7 +40,7 @@ safe-outputs:
run-success: "📰 VERDICT: [{workflow_name}]({run_url}) has concluded. All systems operational. This is a developing story. 🎤"
run-failure: "📰 DEVELOPING STORY: [{workflow_name}]({run_url}) reports {status}. Our correspondents are investigating the incident..."
timeout-minutes: 5
-strict: true
+strict: false
---
# Smoke Test: Copilot Engine Validation
diff --git a/.github/workflows/spec-kit-execute.lock.yml b/.github/workflows/spec-kit-execute.lock.yml
index df22af31b6..5bef88aaca 100644
--- a/.github/workflows/spec-kit-execute.lock.yml
+++ b/.github/workflows/spec-kit-execute.lock.yml
@@ -292,8 +292,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/spec-kit-executor.lock.yml b/.github/workflows/spec-kit-executor.lock.yml
index d40beaca36..a3e6041b2d 100644
--- a/.github/workflows/spec-kit-executor.lock.yml
+++ b/.github/workflows/spec-kit-executor.lock.yml
@@ -293,8 +293,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/speckit-dispatcher.lock.yml b/.github/workflows/speckit-dispatcher.lock.yml
index b2c756f181..4b51a391bc 100644
--- a/.github/workflows/speckit-dispatcher.lock.yml
+++ b/.github/workflows/speckit-dispatcher.lock.yml
@@ -1008,8 +1008,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index 51b68d2e5a..c3a93a29db 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -343,8 +343,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/sub-issue-closer.lock.yml b/.github/workflows/sub-issue-closer.lock.yml
index 102048a4fb..b0427b96be 100644
--- a/.github/workflows/sub-issue-closer.lock.yml
+++ b/.github/workflows/sub-issue-closer.lock.yml
@@ -248,8 +248,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml
index ea9ee4fd2d..749fa7a53e 100644
--- a/.github/workflows/super-linter.lock.yml
+++ b/.github/workflows/super-linter.lock.yml
@@ -276,8 +276,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml
index 8884ea7747..ba2b2ccfa2 100644
--- a/.github/workflows/technical-doc-writer.lock.yml
+++ b/.github/workflows/technical-doc-writer.lock.yml
@@ -291,8 +291,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/tidy.lock.yml b/.github/workflows/tidy.lock.yml
index fa1ffb3102..34e00eac9a 100644
--- a/.github/workflows/tidy.lock.yml
+++ b/.github/workflows/tidy.lock.yml
@@ -680,8 +680,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/video-analyzer.lock.yml b/.github/workflows/video-analyzer.lock.yml
index 241d38c460..7efe8cff22 100644
--- a/.github/workflows/video-analyzer.lock.yml
+++ b/.github/workflows/video-analyzer.lock.yml
@@ -263,8 +263,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml
index 5178f5ecfd..03acd3233d 100644
--- a/.github/workflows/weekly-issue-summary.lock.yml
+++ b/.github/workflows/weekly-issue-summary.lock.yml
@@ -251,8 +251,8 @@ jobs:
copilot --version
- name: Install awf binary
run: |
- echo "Installing awf from release: v0.6.0"
- curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.6.0/awf-linux-x64 -o awf
+ echo "Installing awf from release: v0.7.0"
+ curl -L https://github.com/githubnext/gh-aw-firewall/releases/download/v0.7.0/awf-linux-x64 -o awf
chmod +x awf
sudo mv awf /usr/local/bin/
which awf
diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md
index c5b822b323..b5aade72c4 100644
--- a/docs/src/content/docs/reference/frontmatter-full.md
+++ b/docs/src/content/docs/reference/frontmatter-full.md
@@ -2863,9 +2863,8 @@ strict: true
# (JavaScript), 'run' (shell), or 'py' (Python) must be specified per tool.
# (optional)
safe-inputs:
- # Transport mode for the safe-inputs MCP server. 'http' starts the server as a
- # separate step (default), 'stdio' starts the server directly by the agent within
- # the firewall.
+ # Transport mode for the safe-inputs MCP server. Only 'http' mode is supported,
+ # which starts the server as a separate step.
# (optional)
mode: "http"
diff --git a/pkg/cli/download_workflow.go b/pkg/cli/download_workflow.go
index 76c308c613..f42c26f915 100644
--- a/pkg/cli/download_workflow.go
+++ b/pkg/cli/download_workflow.go
@@ -16,6 +16,8 @@ import (
var downloadLog = logger.New("cli:download_workflow")
// resolveLatestReleaseViaGit finds the latest release using git ls-remote
+//
+//nolint:unused // Fallback implementation for when GitHub API is unavailable
func resolveLatestReleaseViaGit(repo, currentRef string, allowMajor, verbose bool) (string, error) {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching latest release for %s via git ls-remote (current: %s, allow major: %v)", repo, currentRef, allowMajor)))
@@ -95,6 +97,8 @@ func resolveLatestReleaseViaGit(repo, currentRef string, allowMajor, verbose boo
}
// isBranchRefViaGit checks if a ref is a branch using git ls-remote
+//
+//nolint:unused // Fallback implementation for when GitHub API is unavailable
func isBranchRefViaGit(repo, ref string) (bool, error) {
downloadLog.Printf("Attempting git ls-remote to check if ref is branch: %s@%s", repo, ref)
@@ -125,6 +129,8 @@ func isBranchRefViaGit(repo, ref string) (bool, error) {
}
// isBranchRef checks if a ref is a branch in the repository
+//
+//nolint:unused // Reserved for future use
func isBranchRef(repo, ref string) (bool, error) {
// Use gh CLI to list branches
cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/branches", repo), "--jq", ".[].name")
@@ -155,6 +161,8 @@ func isBranchRef(repo, ref string) (bool, error) {
}
// resolveBranchHeadViaGit gets the latest commit SHA for a branch using git ls-remote
+//
+//nolint:unused // Fallback implementation for when GitHub API is unavailable
func resolveBranchHeadViaGit(repo, branch string, verbose bool) (string, error) {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching latest commit for branch %s in %s via git ls-remote", branch, repo)))
@@ -189,6 +197,8 @@ func resolveBranchHeadViaGit(repo, branch string, verbose bool) (string, error)
}
// resolveBranchHead gets the latest commit SHA for a branch
+//
+//nolint:unused // Reserved for future use
func resolveBranchHead(repo, branch string, verbose bool) (string, error) {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching latest commit for branch %s in %s", branch, repo)))
@@ -221,6 +231,8 @@ func resolveBranchHead(repo, branch string, verbose bool) (string, error) {
}
// resolveDefaultBranchHeadViaGit gets the latest commit SHA for the default branch using git ls-remote
+//
+//nolint:unused // Fallback implementation for when GitHub API is unavailable
func resolveDefaultBranchHeadViaGit(repo string, verbose bool) (string, error) {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching default branch for %s via git ls-remote", repo)))
@@ -275,6 +287,8 @@ func resolveDefaultBranchHeadViaGit(repo string, verbose bool) (string, error) {
}
// resolveDefaultBranchHead gets the latest commit SHA for the default branch
+//
+//nolint:unused // Reserved for future use
func resolveDefaultBranchHead(repo string, verbose bool) (string, error) {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching default branch for %s", repo)))
diff --git a/pkg/cli/fix_codemods.go b/pkg/cli/fix_codemods.go
index 2e2126f78f..eb334adc17 100644
--- a/pkg/cli/fix_codemods.go
+++ b/pkg/cli/fix_codemods.go
@@ -44,6 +44,7 @@ func GetAllCodemods() []Codemod {
getTimeoutMinutesCodemod(),
getNetworkFirewallCodemod(),
getCommandToSlashCommandCodemod(),
+ getSafeInputsModeCodemod(),
}
}
@@ -399,3 +400,91 @@ func getCommandToSlashCommandCodemod() Codemod {
},
}
}
+
+// getSafeInputsModeCodemod creates a codemod for removing the deprecated safe-inputs.mode field
+func getSafeInputsModeCodemod() Codemod {
+ return Codemod{
+ ID: "safe-inputs-mode-removal",
+ Name: "Remove deprecated safe-inputs.mode field",
+ Description: "Removes the deprecated 'safe-inputs.mode' field (HTTP is now the only supported mode)",
+ IntroducedIn: "0.2.0",
+ Apply: func(content string, frontmatter map[string]any) (string, bool, error) {
+ // Check if safe-inputs.mode exists
+ safeInputsValue, hasSafeInputs := frontmatter["safe-inputs"]
+ if !hasSafeInputs {
+ return content, false, nil
+ }
+
+ safeInputsMap, ok := safeInputsValue.(map[string]any)
+ if !ok {
+ return content, false, nil
+ }
+
+ // Check if mode field exists in safe-inputs
+ _, hasMode := safeInputsMap["mode"]
+ if !hasMode {
+ return content, false, nil
+ }
+
+ // Parse frontmatter to get raw lines
+ result, err := parser.ExtractFrontmatterFromContent(content)
+ if err != nil {
+ return content, false, fmt.Errorf("failed to parse frontmatter: %w", err)
+ }
+
+ // Find and remove the mode line within the safe-inputs block
+ var modified bool
+ var inSafeInputsBlock bool
+ var safeInputsIndent string
+
+ frontmatterLines := make([]string, 0, len(result.FrontmatterLines))
+
+ for i, line := range result.FrontmatterLines {
+ trimmedLine := strings.TrimSpace(line)
+
+ // Track if we're in the safe-inputs block
+ if strings.HasPrefix(trimmedLine, "safe-inputs:") {
+ inSafeInputsBlock = true
+ safeInputsIndent = line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+ frontmatterLines = append(frontmatterLines, line)
+ continue
+ }
+
+ // Check if we've left the safe-inputs block (new top-level key with same or less indentation)
+ if inSafeInputsBlock && len(trimmedLine) > 0 && !strings.HasPrefix(trimmedLine, "#") {
+ currentIndent := line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+ if len(currentIndent) <= len(safeInputsIndent) && strings.Contains(line, ":") {
+ inSafeInputsBlock = false
+ }
+ }
+
+ // Remove mode line if in safe-inputs block
+ if inSafeInputsBlock && strings.HasPrefix(trimmedLine, "mode:") {
+ modified = true
+ codemodsLog.Printf("Removed safe-inputs.mode on line %d", i+1)
+ continue
+ }
+
+ frontmatterLines = append(frontmatterLines, line)
+ }
+
+ if !modified {
+ return content, false, nil
+ }
+
+ // Reconstruct the content
+ var lines []string
+ lines = append(lines, "---")
+ lines = append(lines, frontmatterLines...)
+ lines = append(lines, "---")
+ if result.Markdown != "" {
+ lines = append(lines, "")
+ lines = append(lines, result.Markdown)
+ }
+
+ newContent := strings.Join(lines, "\n")
+ codemodsLog.Print("Applied safe-inputs.mode removal")
+ return newContent, true, nil
+ },
+ }
+}
diff --git a/pkg/cli/fix_command.go b/pkg/cli/fix_command.go
index 12ec50c366..ebb8e4b727 100644
--- a/pkg/cli/fix_command.go
+++ b/pkg/cli/fix_command.go
@@ -40,6 +40,7 @@ and migrate to new syntax. Codemods preserve formatting and comments as much as
Available codemods:
• timeout-minutes-migration: Replaces 'timeout_minutes' with 'timeout-minutes'
• network-firewall-migration: Replaces 'network.firewall' with 'sandbox.agent: false'
+ • safe-inputs-mode-removal: Removes deprecated 'safe-inputs.mode' field
If no workflows are specified, all Markdown files in .github/workflows will be processed.
diff --git a/pkg/cli/fix_command_test.go b/pkg/cli/fix_command_test.go
index dd20c0f5c7..7e75b112b0 100644
--- a/pkg/cli/fix_command_test.go
+++ b/pkg/cli/fix_command_test.go
@@ -443,6 +443,7 @@ func TestGetAllCodemods(t *testing.T) {
"timeout-minutes-migration",
"network-firewall-migration",
"command-to-slash-command-migration",
+ "safe-inputs-mode-removal",
}
foundIDs := make(map[string]bool)
@@ -539,3 +540,74 @@ This is a test workflow with slash command.
t.Errorf("Expected on.slash_command: my-bot in updated content, got:\n%s", updatedStr)
}
}
+
+func TestFixCommand_SafeInputsModeRemoval(t *testing.T) {
+// Create a temporary directory for test files
+tmpDir := t.TempDir()
+workflowFile := filepath.Join(tmpDir, "test-workflow.md")
+
+// Create a workflow with deprecated safe-inputs.mode field
+content := `---
+on: workflow_dispatch
+engine: copilot
+safe-inputs:
+ mode: http
+ test-tool:
+ description: Test tool
+ script: |
+ return { result: "test" };
+---
+
+# Test Workflow
+
+This is a test workflow with safe-inputs mode field.
+`
+
+if err := os.WriteFile(workflowFile, []byte(content), 0644); err != nil {
+t.Fatalf("Failed to create test file: %v", err)
+}
+
+// Get the safe-inputs mode removal codemod
+modeCodemod := getCodemodByID("safe-inputs-mode-removal")
+if modeCodemod == nil {
+t.Fatal("safe-inputs-mode-removal codemod not found")
+}
+
+// Process the file
+fixed, err := processWorkflowFile(workflowFile, []Codemod{*modeCodemod}, true, false)
+if err != nil {
+t.Fatalf("Failed to process workflow file: %v", err)
+}
+
+if !fixed {
+t.Error("Expected file to be fixed, but no changes were made")
+}
+
+// Read the updated content
+updatedContent, err := os.ReadFile(workflowFile)
+if err != nil {
+t.Fatalf("Failed to read updated file: %v", err)
+}
+
+updatedStr := string(updatedContent)
+
+t.Logf("Updated content:\n%s", updatedStr)
+
+// Verify the change - mode field should be removed
+if strings.Contains(updatedStr, "mode:") {
+t.Errorf("Expected mode field to be removed, but it still exists:\n%s", updatedStr)
+}
+
+// Verify safe-inputs block and test-tool are preserved
+if !strings.Contains(updatedStr, "safe-inputs:") {
+t.Error("Expected safe-inputs block to be preserved")
+}
+
+if !strings.Contains(updatedStr, "test-tool:") {
+t.Error("Expected test-tool to be preserved")
+}
+
+if !strings.Contains(updatedStr, "description: Test tool") {
+t.Error("Expected test-tool description to be preserved")
+}
+}
diff --git a/pkg/cli/update_actions.go b/pkg/cli/update_actions.go
new file mode 100644
index 0000000000..483c18d1af
--- /dev/null
+++ b/pkg/cli/update_actions.go
@@ -0,0 +1,374 @@
+package cli
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/githubnext/gh-aw/pkg/console"
+ "github.com/githubnext/gh-aw/pkg/gitutil"
+ "github.com/githubnext/gh-aw/pkg/workflow"
+)
+
+// UpdateActions updates GitHub Actions versions in .github/aw/actions-lock.json
+// It checks each action for newer releases and updates the SHA if a newer version is found
+func UpdateActions(allowMajor, verbose bool) error {
+ updateLog.Print("Starting action updates")
+
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Checking for GitHub Actions updates..."))
+ }
+
+ // Get the path to actions-lock.json
+ actionsLockPath := filepath.Join(".github", "aw", "actions-lock.json")
+
+ // Check if the file exists
+ if _, err := os.Stat(actionsLockPath); os.IsNotExist(err) {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Actions lock file not found: %s", actionsLockPath)))
+ }
+ return nil // Not an error, just skip
+ }
+
+ // Load the current actions lock file
+ data, err := os.ReadFile(actionsLockPath)
+ if err != nil {
+ return fmt.Errorf("failed to read actions lock file: %w", err)
+ }
+
+ var actionsLock actionsLockFile
+ if err := json.Unmarshal(data, &actionsLock); err != nil {
+ return fmt.Errorf("failed to parse actions lock file: %w", err)
+ }
+
+ updateLog.Printf("Loaded %d action entries from actions-lock.json", len(actionsLock.Entries))
+
+ // Track updates
+ var updatedActions []string
+ var failedActions []string
+ var skippedActions []string
+
+ // Update each action
+ for key, entry := range actionsLock.Entries {
+ updateLog.Printf("Checking action: %s@%s", entry.Repo, entry.Version)
+
+ // Check for latest release
+ latestVersion, latestSHA, err := getLatestActionRelease(entry.Repo, entry.Version, allowMajor, verbose)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to check %s: %v", entry.Repo, err)))
+ }
+ failedActions = append(failedActions, entry.Repo)
+ continue
+ }
+
+ // Check if update is available
+ if latestVersion == entry.Version && latestSHA == entry.SHA {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("%s@%s is up to date", entry.Repo, entry.Version)))
+ }
+ skippedActions = append(skippedActions, entry.Repo)
+ continue
+ }
+
+ // Update the entry
+ updateLog.Printf("Updating %s from %s (%s) to %s (%s)", entry.Repo, entry.Version, entry.SHA[:7], latestVersion, latestSHA[:7])
+ fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Updated %s from %s to %s", entry.Repo, entry.Version, latestVersion)))
+
+ // Update the map entry
+ actionsLock.Entries[key] = actionsLockEntry{
+ Repo: entry.Repo,
+ Version: latestVersion,
+ SHA: latestSHA,
+ }
+
+ updatedActions = append(updatedActions, entry.Repo)
+ }
+
+ // Show summary
+ fmt.Fprintln(os.Stderr, "")
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage("=== Actions Update Summary ==="))
+ fmt.Fprintln(os.Stderr, "")
+
+ if len(updatedActions) > 0 {
+ fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Updated %d action(s):", len(updatedActions))))
+ for _, action := range updatedActions {
+ fmt.Fprintln(os.Stderr, console.FormatListItem(action))
+ }
+ fmt.Fprintln(os.Stderr, "")
+ }
+
+ if len(skippedActions) > 0 && verbose {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("%d action(s) already up to date", len(skippedActions))))
+ fmt.Fprintln(os.Stderr, "")
+ }
+
+ if len(failedActions) > 0 {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to check %d action(s):", len(failedActions))))
+ for _, action := range failedActions {
+ fmt.Fprintf(os.Stderr, " %s\n", action)
+ }
+ fmt.Fprintln(os.Stderr, "")
+ }
+
+ // Save the updated actions lock file if there were any updates
+ if len(updatedActions) > 0 {
+ // Marshal with sorted keys and pretty printing
+ updatedData, err := marshalActionsLockSorted(&actionsLock)
+ if err != nil {
+ return fmt.Errorf("failed to marshal updated actions lock: %w", err)
+ }
+
+ // Add trailing newline for prettier compliance
+ updatedData = append(updatedData, '\n')
+
+ if err := os.WriteFile(actionsLockPath, updatedData, 0644); err != nil {
+ return fmt.Errorf("failed to write updated actions lock file: %w", err)
+ }
+
+ updateLog.Printf("Successfully wrote updated actions-lock.json with %d updates", len(updatedActions))
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Updated actions-lock.json file"))
+ }
+
+ return nil
+}
+
+// getLatestActionRelease gets the latest release for an action repository
+// It respects semantic versioning and the allowMajor flag
+func getLatestActionRelease(repo, currentVersion string, allowMajor, verbose bool) (string, string, error) {
+ updateLog.Printf("Getting latest release for %s@%s (allowMajor=%v)", repo, currentVersion, allowMajor)
+
+ // Use gh CLI to get releases
+ cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/releases", repo), "--jq", ".[].tag_name")
+ output, err := cmd.CombinedOutput()
+ if err != nil {
+ // Check if this is an authentication error
+ outputStr := string(output)
+ if gitutil.IsAuthError(outputStr) || gitutil.IsAuthError(err.Error()) {
+ updateLog.Printf("GitHub API authentication failed, attempting git ls-remote fallback for %s", repo)
+ // Try fallback using git ls-remote
+ latestRelease, latestSHA, gitErr := getLatestActionReleaseViaGit(repo, currentVersion, allowMajor, verbose)
+ if gitErr != nil {
+ return "", "", fmt.Errorf("failed to fetch releases via GitHub API and git: API error: %w, Git error: %v", err, gitErr)
+ }
+ return latestRelease, latestSHA, nil
+ }
+ return "", "", fmt.Errorf("failed to fetch releases: %w", err)
+ }
+
+ releases := strings.Split(strings.TrimSpace(string(output)), "\n")
+ if len(releases) == 0 || releases[0] == "" {
+ return "", "", fmt.Errorf("no releases found")
+ }
+
+ // Parse current version
+ currentVer := parseVersion(currentVersion)
+ if currentVer == nil {
+ // If current version is not a valid semantic version, just return the latest release
+ latestRelease := releases[0]
+ sha, err := getActionSHAForTag(repo, latestRelease)
+ if err != nil {
+ return "", "", fmt.Errorf("failed to get SHA for %s: %w", latestRelease, err)
+ }
+ return latestRelease, sha, nil
+ }
+
+ // Find the latest compatible release
+ var latestCompatible string
+ var latestCompatibleVersion *semanticVersion
+
+ for _, release := range releases {
+ releaseVer := parseVersion(release)
+ if releaseVer == nil {
+ continue
+ }
+
+ // Check if compatible based on major version
+ if !allowMajor && releaseVer.major != currentVer.major {
+ continue
+ }
+
+ // Check if this is newer than what we have
+ if latestCompatibleVersion == nil || releaseVer.isNewer(latestCompatibleVersion) {
+ latestCompatible = release
+ latestCompatibleVersion = releaseVer
+ }
+ }
+
+ if latestCompatible == "" {
+ return "", "", fmt.Errorf("no compatible release found")
+ }
+
+ // Get the SHA for the latest compatible release
+ sha, err := getActionSHAForTag(repo, latestCompatible)
+ if err != nil {
+ return "", "", fmt.Errorf("failed to get SHA for %s: %w", latestCompatible, err)
+ }
+
+ return latestCompatible, sha, nil
+}
+
+// getLatestActionReleaseViaGit gets the latest release using git ls-remote (fallback)
+func getLatestActionReleaseViaGit(repo, currentVersion string, allowMajor, verbose bool) (string, string, error) {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching latest release for %s via git ls-remote (current: %s, allow major: %v)", repo, currentVersion, allowMajor)))
+ }
+
+ repoURL := fmt.Sprintf("https://github.com/%s.git", repo)
+
+ // List all tags
+ cmd := exec.Command("git", "ls-remote", "--tags", repoURL)
+ output, err := cmd.Output()
+ if err != nil {
+ return "", "", fmt.Errorf("failed to fetch releases via git ls-remote: %w", err)
+ }
+
+ lines := strings.Split(strings.TrimSpace(string(output)), "\n")
+ var releases []string
+ tagToSHA := make(map[string]string)
+
+ for _, line := range lines {
+ // Parse: " refs/tags/"
+ parts := strings.Fields(line)
+ if len(parts) >= 2 {
+ sha := parts[0]
+ tagRef := parts[1]
+ // Skip ^{} annotations (they point to the commit object)
+ if strings.HasSuffix(tagRef, "^{}") {
+ continue
+ }
+ tag := strings.TrimPrefix(tagRef, "refs/tags/")
+ releases = append(releases, tag)
+ tagToSHA[tag] = sha
+ }
+ }
+
+ if len(releases) == 0 {
+ return "", "", fmt.Errorf("no releases found")
+ }
+
+ // Parse current version
+ currentVer := parseVersion(currentVersion)
+ if currentVer == nil {
+ // If current version is not a valid semantic version, just return the first release
+ latestRelease := releases[0]
+ sha := tagToSHA[latestRelease]
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Current version is not valid, using first release: %s (via git)", latestRelease)))
+ }
+ return latestRelease, sha, nil
+ }
+
+ // Find the latest compatible release
+ var latestCompatible string
+ var latestCompatibleVersion *semanticVersion
+
+ for _, release := range releases {
+ releaseVer := parseVersion(release)
+ if releaseVer == nil {
+ continue
+ }
+
+ // Check if compatible based on major version
+ if !allowMajor && releaseVer.major != currentVer.major {
+ continue
+ }
+
+ // Check if this is newer than what we have
+ if latestCompatibleVersion == nil || releaseVer.isNewer(latestCompatibleVersion) {
+ latestCompatible = release
+ latestCompatibleVersion = releaseVer
+ }
+ }
+
+ if latestCompatible == "" {
+ return "", "", fmt.Errorf("no compatible release found")
+ }
+
+ sha := tagToSHA[latestCompatible]
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Latest compatible release: %s (via git)", latestCompatible)))
+ }
+
+ return latestCompatible, sha, nil
+}
+
+// getActionSHAForTag gets the commit SHA for a given tag in an action repository
+func getActionSHAForTag(repo, tag string) (string, error) {
+ updateLog.Printf("Getting SHA for %s@%s", repo, tag)
+
+ // Use gh CLI to get the git ref for the tag
+ cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/git/ref/tags/%s", repo, tag), "--jq", ".object.sha")
+ output, err := cmd.Output()
+ if err != nil {
+ return "", fmt.Errorf("failed to resolve tag: %w", err)
+ }
+
+ sha := strings.TrimSpace(string(output))
+ if sha == "" {
+ return "", fmt.Errorf("empty SHA returned for tag")
+ }
+
+ // Validate SHA format (should be 40 hex characters)
+ if len(sha) != 40 {
+ return "", fmt.Errorf("invalid SHA format: %s", sha)
+ }
+
+ return sha, nil
+}
+
+// marshalActionsLockSorted marshals the actions lock with entries sorted by key
+func marshalActionsLockSorted(actionsLock *actionsLockFile) ([]byte, error) {
+ // Extract and sort the keys
+ keys := make([]string, 0, len(actionsLock.Entries))
+ for key := range actionsLock.Entries {
+ keys = append(keys, key)
+ }
+ sort.Strings(keys)
+
+ // Build JSON using json.Marshal for proper encoding
+ var buf strings.Builder
+ buf.WriteString("{\n \"entries\": {\n")
+
+ for i, key := range keys {
+ entry := actionsLock.Entries[key]
+
+ // Marshal the entry to JSON to ensure proper escaping
+ entryJSON, err := json.Marshal(entry)
+ if err != nil {
+ return nil, err
+ }
+
+ // Marshal the key to ensure proper escaping
+ keyJSON, err := json.Marshal(key)
+ if err != nil {
+ return nil, err
+ }
+
+ // Write the key-value pair with proper indentation
+ buf.WriteString(" ")
+ buf.WriteString(string(keyJSON))
+ buf.WriteString(": ")
+
+ // Pretty-print the entry JSON with proper indentation
+ var prettyEntry bytes.Buffer
+ if err := json.Indent(&prettyEntry, entryJSON, " ", " "); err != nil {
+ return nil, err
+ }
+ buf.WriteString(prettyEntry.String())
+
+ // Add comma if not the last entry
+ if i < len(keys)-1 {
+ buf.WriteString(",")
+ }
+ buf.WriteString("\n")
+ }
+
+ buf.WriteString(" }\n}")
+ return []byte(buf.String()), nil
+}
diff --git a/pkg/cli/update_command.go b/pkg/cli/update_command.go
index 637051c35b..d36e7f5a18 100644
--- a/pkg/cli/update_command.go
+++ b/pkg/cli/update_command.go
@@ -1,22 +1,10 @@
package cli
import (
- "bytes"
- "encoding/json"
"fmt"
- "math/rand"
- "os"
- "os/exec"
- "path/filepath"
- "sort"
- "strings"
- "github.com/githubnext/gh-aw/pkg/console"
"github.com/githubnext/gh-aw/pkg/constants"
- "github.com/githubnext/gh-aw/pkg/gitutil"
"github.com/githubnext/gh-aw/pkg/logger"
- "github.com/githubnext/gh-aw/pkg/parser"
- "github.com/githubnext/gh-aw/pkg/workflow"
"github.com/spf13/cobra"
)
@@ -97,47 +85,6 @@ Examples:
return cmd
}
-// checkExtensionUpdate checks if a newer version of gh-aw is available
-func checkExtensionUpdate(verbose bool) error {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Checking for gh-aw extension updates..."))
- }
-
- // Run gh extension upgrade --dry-run to check for updates
- cmd := workflow.ExecGH("extension", "upgrade", "githubnext/gh-aw", "--dry-run")
- output, err := cmd.CombinedOutput()
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to check for extension updates: %v", err)))
- }
- return nil // Don't fail the whole command if update check fails
- }
-
- outputStr := strings.TrimSpace(string(output))
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Extension update check output: %s", outputStr)))
- }
-
- // Parse the output to see if an update is available
- // Expected format: "[aw]: would have upgraded from v0.14.0 to v0.18.1"
- lines := strings.Split(outputStr, "\n")
- for _, line := range lines {
- if strings.Contains(line, "[aw]: would have upgraded from") {
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(line))
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Run 'gh extension upgrade githubnext/gh-aw' to update"))
- return nil
- }
- }
-
- if strings.Contains(outputStr, "✓ Successfully checked extension upgrades") {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatSuccessMessage("gh-aw extension is up to date"))
- }
- }
-
- return nil
-}
-
// UpdateWorkflowsWithExtensionCheck performs the complete update process:
// 1. Check for gh-aw extension updates
// 2. Update GitHub Actions versions (unless --no-actions flag is set)
@@ -173,1159 +120,3 @@ func UpdateWorkflowsWithExtensionCheck(workflowNames []string, allowMajor, force
return nil
}
-
-// hasGitChanges checks if there are any uncommitted changes
-func hasGitChanges() (bool, error) {
- cmd := exec.Command("git", "status", "--porcelain")
- output, err := cmd.Output()
- if err != nil {
- return false, fmt.Errorf("failed to check git status: %w", err)
- }
-
- return len(strings.TrimSpace(string(output))) > 0, nil
-}
-
-// runGitCommand runs a git command with the specified arguments
-func runGitCommand(args ...string) error {
- cmd := exec.Command("git", args...)
- if err := cmd.Run(); err != nil {
- return fmt.Errorf("git %s failed: %w", strings.Join(args, " "), err)
- }
- return nil
-}
-
-// createUpdatePR creates a pull request with the workflow changes
-func createUpdatePR(verbose bool) error {
- // Check if GitHub CLI is available
- if !isGHCLIAvailable() {
- return fmt.Errorf("GitHub CLI (gh) is required for PR creation but not found in PATH")
- }
-
- // Check if there are any changes to commit
- hasChanges, err := hasGitChanges()
- if err != nil {
- return fmt.Errorf("failed to check git status: %w", err)
- }
-
- if !hasChanges {
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage("No changes to create PR for"))
- return nil
- }
-
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Creating pull request with workflow updates..."))
- }
-
- // Create a branch name with timestamp
- randomNum := rand.Intn(9000) + 1000 // Generate number between 1000-9999
- branchName := fmt.Sprintf("update-workflows-%d", randomNum)
-
- // Create and checkout new branch
- if err := runGitCommand("checkout", "-b", branchName); err != nil {
- return fmt.Errorf("failed to create branch: %w", err)
- }
-
- // Add all changes
- if err := runGitCommand("add", "."); err != nil {
- return fmt.Errorf("failed to add changes: %w", err)
- }
-
- // Commit changes
- commitMsg := "Update workflows and recompile"
- if err := runGitCommand("commit", "-m", commitMsg); err != nil {
- return fmt.Errorf("failed to commit changes: %w", err)
- }
-
- // Push branch
- if err := runGitCommand("push", "-u", "origin", branchName); err != nil {
- return fmt.Errorf("failed to push branch: %w", err)
- }
-
- // Create PR
- cmd := workflow.ExecGH("pr", "create",
- "--title", "Update workflows and recompile",
- "--body", "This PR updates workflows from their source repositories and recompiles them.\n\nGenerated by `gh aw update --pr`")
-
- output, err := cmd.CombinedOutput()
- if err != nil {
- return fmt.Errorf("failed to create PR: %w\nOutput: %s", err, string(output))
- }
-
- fmt.Fprintln(os.Stderr, console.FormatSuccessMessage("Successfully created pull request"))
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(strings.TrimSpace(string(output))))
-
- return nil
-}
-
-// UpdateWorkflows updates workflows from their source repositories
-func UpdateWorkflows(workflowNames []string, allowMajor, force, verbose bool, engineOverride string, workflowsDir string, noStopAfter bool, stopAfter string, merge bool) error {
- updateLog.Printf("Scanning for workflows with source field: dir=%s, filter=%v, merge=%v", workflowsDir, workflowNames, merge)
-
- // Use provided workflows directory or default
- if workflowsDir == "" {
- workflowsDir = getWorkflowsDir()
- }
-
- // Find all workflows with source field
- workflows, err := findWorkflowsWithSource(workflowsDir, workflowNames, verbose)
- if err != nil {
- return err
- }
-
- updateLog.Printf("Found %d workflows with source field", len(workflows))
-
- if len(workflows) == 0 {
- if len(workflowNames) > 0 {
- return fmt.Errorf("no workflows found matching the specified names with source field")
- }
- return fmt.Errorf("no workflows found with source field")
- }
-
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Found %d workflow(s) to update", len(workflows))))
-
- // Track update results
- var successfulUpdates []string
- var failedUpdates []updateFailure
-
- // Update each workflow
- for _, wf := range workflows {
- if err := updateWorkflow(wf, allowMajor, force, verbose, engineOverride, noStopAfter, stopAfter, merge); err != nil {
- failedUpdates = append(failedUpdates, updateFailure{
- Name: wf.Name,
- Error: err.Error(),
- })
- continue
- }
- successfulUpdates = append(successfulUpdates, wf.Name)
- }
-
- // Show summary
- showUpdateSummary(successfulUpdates, failedUpdates)
-
- if len(successfulUpdates) == 0 {
- return fmt.Errorf("no workflows were successfully updated")
- }
-
- return nil
-}
-
-// workflowWithSource represents a workflow with its source information
-type workflowWithSource struct {
- Name string
- Path string
- SourceSpec string // e.g., "owner/repo/path@ref"
-}
-
-// updateFailure represents a failed workflow update
-type updateFailure struct {
- Name string
- Error string
-}
-
-// actionsLockEntry represents a single action pin entry
-type actionsLockEntry struct {
- Repo string `json:"repo"`
- Version string `json:"version"`
- SHA string `json:"sha"`
-}
-
-// actionsLockFile represents the structure of actions-lock.json
-type actionsLockFile struct {
- Entries map[string]actionsLockEntry `json:"entries"`
-}
-
-// showUpdateSummary displays a summary of workflow updates using console helpers
-func showUpdateSummary(successfulUpdates []string, failedUpdates []updateFailure) {
- fmt.Fprintln(os.Stderr, "")
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage("=== Update Summary ==="))
- fmt.Fprintln(os.Stderr, "")
-
- // Show successful updates
- if len(successfulUpdates) > 0 {
- fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Successfully updated and compiled %d workflow(s):", len(successfulUpdates))))
- for _, name := range successfulUpdates {
- fmt.Fprintln(os.Stderr, console.FormatListItem(name))
- }
- fmt.Fprintln(os.Stderr, "")
- }
-
- // Show failed updates
- if len(failedUpdates) > 0 {
- fmt.Fprintln(os.Stderr, console.FormatErrorMessage(fmt.Sprintf("Failed to update %d workflow(s):", len(failedUpdates))))
- for _, failure := range failedUpdates {
- fmt.Fprintf(os.Stderr, " %s %s: %s\n", console.FormatErrorMessage("✗"), failure.Name, failure.Error)
- }
- fmt.Fprintln(os.Stderr, "")
- }
-}
-
-// findWorkflowsWithSource finds all workflows that have a source field
-func findWorkflowsWithSource(workflowsDir string, filterNames []string, verbose bool) ([]*workflowWithSource, error) {
- var workflows []*workflowWithSource
-
- // Read all .md files in workflows directory
- entries, err := os.ReadDir(workflowsDir)
- if err != nil {
- return nil, fmt.Errorf("failed to read workflows directory: %w", err)
- }
-
- for _, entry := range entries {
- if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".md") {
- continue
- }
-
- // Skip .lock.yml files
- if strings.HasSuffix(entry.Name(), ".lock.yml") {
- continue
- }
-
- workflowPath := filepath.Join(workflowsDir, entry.Name())
- workflowName := strings.TrimSuffix(entry.Name(), ".md")
-
- // Filter by name if specified
- if len(filterNames) > 0 {
- matched := false
- for _, filterName := range filterNames {
- // Remove .md extension if present
- filterName = strings.TrimSuffix(filterName, ".md")
- if workflowName == filterName {
- matched = true
- break
- }
- }
- if !matched {
- continue
- }
- }
-
- // Read the workflow file and extract source field
- content, err := os.ReadFile(workflowPath)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to read %s: %v", workflowPath, err)))
- }
- continue
- }
-
- // Parse frontmatter
- result, err := parser.ExtractFrontmatterFromContent(string(content))
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to parse frontmatter in %s: %v", workflowPath, err)))
- }
- continue
- }
-
- // Check for source field
- sourceRaw, ok := result.Frontmatter["source"]
- if !ok {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Skipping %s: no source field", workflowName)))
- }
- continue
- }
-
- source, ok := sourceRaw.(string)
- if !ok || source == "" {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Skipping %s: invalid source field", workflowName)))
- }
- continue
- }
-
- workflows = append(workflows, &workflowWithSource{
- Name: workflowName,
- Path: workflowPath,
- SourceSpec: strings.TrimSpace(source),
- })
- }
-
- return workflows, nil
-}
-
-// resolveLatestRef resolves the latest ref for a workflow source
-func resolveLatestRef(repo, currentRef string, allowMajor, verbose bool) (string, error) {
- updateLog.Printf("Resolving latest ref: repo=%s, currentRef=%s, allowMajor=%v", repo, currentRef, allowMajor)
-
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Resolving latest ref for %s (current: %s)", repo, currentRef)))
- }
-
- // Check if current ref is a tag (looks like a semantic version)
- if isSemanticVersionTag(currentRef) {
- updateLog.Print("Current ref is semantic version tag, resolving latest release")
- return resolveLatestRelease(repo, currentRef, allowMajor, verbose)
- }
-
- // Check if current ref is a branch by checking if it exists as a branch
- isBranch, err := isBranchRef(repo, currentRef)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to check if ref is branch: %v", err)))
- }
- // If we can't determine, treat as default branch case
- return resolveDefaultBranchHead(repo, verbose)
- }
-
- if isBranch {
- updateLog.Printf("Current ref is branch: %s", currentRef)
- return resolveBranchHead(repo, currentRef, verbose)
- }
-
- // Otherwise, use default branch
- updateLog.Print("Using default branch for ref resolution")
- return resolveDefaultBranchHead(repo, verbose)
-}
-
-// isSemanticVersionTag checks if a ref looks like a semantic version tag
-// resolveLatestRelease finds the latest release, respecting semantic versioning
-func resolveLatestRelease(repo, currentRef string, allowMajor, verbose bool) (string, error) {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching latest release for %s (current: %s, allow major: %v)", repo, currentRef, allowMajor)))
- }
-
- // Use gh CLI to get releases
- cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/releases", repo), "--jq", ".[].tag_name")
- output, err := cmd.CombinedOutput()
- if err != nil {
- // Check if this is an authentication error
- outputStr := string(output)
- if gitutil.IsAuthError(outputStr) || gitutil.IsAuthError(err.Error()) {
- updateLog.Printf("GitHub API authentication failed, attempting git ls-remote fallback")
- // Try fallback using git ls-remote
- release, gitErr := resolveLatestReleaseViaGit(repo, currentRef, allowMajor, verbose)
- if gitErr != nil {
- return "", fmt.Errorf("failed to fetch releases via GitHub API and git: API error: %w, Git error: %v", err, gitErr)
- }
- return release, nil
- }
- return "", fmt.Errorf("failed to fetch releases: %w", err)
- }
-
- releases := strings.Split(strings.TrimSpace(string(output)), "\n")
- if len(releases) == 0 || releases[0] == "" {
- return "", fmt.Errorf("no releases found")
- }
-
- // Parse current version
- currentVersion := parseVersion(currentRef)
- if currentVersion == nil {
- // If current ref is not a valid version, just return the latest release
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Current ref is not a valid version, using latest release: %s", releases[0])))
- }
- return releases[0], nil
- }
-
- // Find the latest compatible release
- var latestCompatible string
- var latestCompatibleVersion *semanticVersion
-
- for _, release := range releases {
- releaseVersion := parseVersion(release)
- if releaseVersion == nil {
- continue
- }
-
- // Check if compatible based on major version
- if !allowMajor && releaseVersion.major != currentVersion.major {
- continue
- }
-
- // Check if this is newer than what we have
- if latestCompatibleVersion == nil || releaseVersion.isNewer(latestCompatibleVersion) {
- latestCompatible = release
- latestCompatibleVersion = releaseVersion
- }
- }
-
- if latestCompatible == "" {
- return "", fmt.Errorf("no compatible release found")
- }
-
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Latest compatible release: %s", latestCompatible)))
- }
-
- return latestCompatible, nil
-}
-
-// updateWorkflow updates a single workflow from its source
-func updateWorkflow(wf *workflowWithSource, allowMajor, force, verbose bool, engineOverride string, noStopAfter bool, stopAfter string, merge bool) error {
- updateLog.Printf("Updating workflow: name=%s, source=%s, force=%v, merge=%v", wf.Name, wf.SourceSpec, force, merge)
-
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("\nUpdating workflow: %s", wf.Name)))
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Source: %s", wf.SourceSpec)))
- }
-
- // Parse source spec
- sourceSpec, err := parseSourceSpec(wf.SourceSpec)
- if err != nil {
- updateLog.Printf("Failed to parse source spec: %v", err)
- return fmt.Errorf("failed to parse source spec: %w", err)
- }
-
- // If no ref specified, use default branch
- currentRef := sourceSpec.Ref
- if currentRef == "" {
- currentRef = "main"
- }
-
- // Resolve latest ref
- latestRef, err := resolveLatestRef(sourceSpec.Repo, currentRef, allowMajor, verbose)
- if err != nil {
- return fmt.Errorf("failed to resolve latest ref: %w", err)
- }
-
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Current ref: %s", currentRef)))
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Latest ref: %s", latestRef)))
- }
-
- // Check if update is needed
- if !force && currentRef == latestRef {
- updateLog.Printf("Workflow already at latest ref: %s, checking for local modifications", currentRef)
-
- // Download the source content to check if local file has been modified
- sourceContent, err := downloadWorkflowContent(sourceSpec.Repo, sourceSpec.Path, currentRef, verbose)
- if err != nil {
- // If we can't download for comparison, just show the up-to-date message
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to download source for comparison: %v", err)))
- }
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Workflow %s is already up to date (%s)", wf.Name, currentRef)))
- return nil
- }
-
- // Read current workflow content
- currentContent, err := os.ReadFile(wf.Path)
- if err != nil {
- return fmt.Errorf("failed to read current workflow: %w", err)
- }
-
- // Check if local file differs from source
- if hasLocalModifications(string(sourceContent), string(currentContent), wf.SourceSpec, verbose) {
- updateLog.Printf("Local modifications detected in workflow: %s", wf.Name)
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Workflow %s is already up to date (%s)", wf.Name, currentRef)))
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("⚠️ Local copy of %s has been modified from source", wf.Name)))
- return nil
- }
-
- updateLog.Printf("Workflow %s is up to date with no local modifications", wf.Name)
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Workflow %s is already up to date (%s)", wf.Name, currentRef)))
- return nil
- }
-
- // Download the latest version
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Downloading latest version from %s/%s@%s", sourceSpec.Repo, sourceSpec.Path, latestRef)))
- }
-
- newContent, err := downloadWorkflowContent(sourceSpec.Repo, sourceSpec.Path, latestRef, verbose)
- if err != nil {
- return fmt.Errorf("failed to download workflow: %w", err)
- }
-
- var finalContent string
- var hasConflicts bool
-
- // Decide whether to merge or override
- if merge {
- // Merge mode: perform 3-way merge to preserve local changes
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Using merge mode to preserve local changes"))
- }
-
- // Download the base version (current ref from source)
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Downloading base version from %s/%s@%s", sourceSpec.Repo, sourceSpec.Path, currentRef)))
- }
-
- baseContent, err := downloadWorkflowContent(sourceSpec.Repo, sourceSpec.Path, currentRef, verbose)
- if err != nil {
- return fmt.Errorf("failed to download base workflow: %w", err)
- }
-
- // Read current workflow content
- currentContent, err := os.ReadFile(wf.Path)
- if err != nil {
- return fmt.Errorf("failed to read current workflow: %w", err)
- }
-
- // Perform 3-way merge using git merge-file
- updateLog.Printf("Performing 3-way merge for workflow: %s", wf.Name)
- mergedContent, conflicts, err := MergeWorkflowContent(string(baseContent), string(currentContent), string(newContent), wf.SourceSpec, latestRef, verbose)
- if err != nil {
- updateLog.Printf("Merge failed for workflow %s: %v", wf.Name, err)
- return fmt.Errorf("failed to merge workflow content: %w", err)
- }
-
- finalContent = mergedContent
- hasConflicts = conflicts
-
- if hasConflicts {
- updateLog.Printf("Merge conflicts detected in workflow: %s", wf.Name)
- }
- } else {
- // Override mode (default): replace local file with new content from source
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Using override mode - local changes will be replaced"))
- }
-
- // Update the source field in the new content with the new ref
- newWithUpdatedSource, err := UpdateFieldInFrontmatter(string(newContent), "source", fmt.Sprintf("%s/%s@%s", sourceSpec.Repo, sourceSpec.Path, latestRef))
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to update source in new content: %v", err)))
- }
- // Continue with original new content
- finalContent = string(newContent)
- } else {
- finalContent = newWithUpdatedSource
- }
-
- // Process @include directives if present
- workflow := &WorkflowSpec{
- RepoSpec: RepoSpec{
- RepoSlug: sourceSpec.Repo,
- Version: latestRef,
- },
- WorkflowPath: sourceSpec.Path,
- }
-
- processedContent, err := processIncludesInContent(finalContent, workflow, latestRef, verbose)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to process includes: %v", err)))
- }
- // Continue with unprocessed content
- } else {
- finalContent = processedContent
- }
- }
-
- // Handle stop-after field modifications
- if noStopAfter {
- // Remove stop-after field if requested
- cleanedContent, err := RemoveFieldFromOnTrigger(finalContent, "stop-after")
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to remove stop-after field: %v", err)))
- }
- } else {
- finalContent = cleanedContent
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Removed stop-after field from workflow"))
- }
- }
- } else if stopAfter != "" {
- // Set custom stop-after value if provided
- updatedContent, err := SetFieldInOnTrigger(finalContent, "stop-after", stopAfter)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to set stop-after field: %v", err)))
- }
- } else {
- finalContent = updatedContent
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Set stop-after field to: %s", stopAfter)))
- }
- }
- }
-
- // Write updated content
- if err := os.WriteFile(wf.Path, []byte(finalContent), 0644); err != nil {
- return fmt.Errorf("failed to write updated workflow: %w", err)
- }
-
- if hasConflicts {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Updated %s from %s to %s with CONFLICTS - please review and resolve manually", wf.Name, currentRef, latestRef)))
- return nil // Not an error, but user needs to resolve conflicts
- }
-
- updateLog.Printf("Successfully updated workflow %s from %s to %s", wf.Name, currentRef, latestRef)
- fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Updated %s from %s to %s", wf.Name, currentRef, latestRef)))
-
- // Compile the updated workflow with refreshStopTime enabled
- updateLog.Printf("Compiling updated workflow: %s", wf.Name)
- if err := compileWorkflowWithRefresh(wf.Path, verbose, engineOverride, true); err != nil {
- updateLog.Printf("Compilation failed for workflow %s: %v", wf.Name, err)
- return fmt.Errorf("failed to compile updated workflow: %w", err)
- }
-
- return nil
-}
-
-// normalizeWhitespace normalizes trailing whitespace and newlines to reduce spurious conflicts
-func normalizeWhitespace(content string) string {
- // Split into lines and trim trailing whitespace from each line
- lines := strings.Split(content, "\n")
- for i, line := range lines {
- lines[i] = strings.TrimRight(line, " \t")
- }
-
- // Join back and ensure exactly one trailing newline if content is not empty
- normalized := strings.Join(lines, "\n")
- normalized = strings.TrimRight(normalized, "\n")
- if len(normalized) > 0 {
- normalized += "\n"
- }
-
- return normalized
-}
-
-// hasLocalModifications checks if the local workflow file has been modified from its source
-// It resolves the source field and imports on the remote content, then compares with local
-// Note: stop-after field is ignored during comparison as it's a deployment-specific setting
-func hasLocalModifications(sourceContent, localContent, sourceSpec string, verbose bool) bool {
- // Normalize both contents
- sourceNormalized := normalizeWhitespace(sourceContent)
- localNormalized := normalizeWhitespace(localContent)
-
- // Remove stop-after field from both contents for comparison
- // This field is deployment-specific and should not trigger "local modifications" warnings
- sourceNormalized, _ = RemoveFieldFromOnTrigger(sourceNormalized, "stop-after")
- localNormalized, _ = RemoveFieldFromOnTrigger(localNormalized, "stop-after")
-
- // Parse the source spec to get repo and ref information
- parsedSourceSpec, err := parseSourceSpec(sourceSpec)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Failed to parse source spec: %v", err)))
- }
- // Fall back to simple comparison
- return sourceNormalized != localNormalized
- }
-
- // Add the source field to the remote content
- sourceWithSource, err := UpdateFieldInFrontmatter(sourceNormalized, "source", sourceSpec)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Failed to add source field to remote content: %v", err)))
- }
- // Fall back to simple comparison
- return sourceNormalized != localNormalized
- }
-
- // Resolve imports on the remote content
- workflow := &WorkflowSpec{
- RepoSpec: RepoSpec{
- RepoSlug: parsedSourceSpec.Repo,
- Version: parsedSourceSpec.Ref,
- },
- WorkflowPath: parsedSourceSpec.Path,
- }
-
- sourceResolved, err := processIncludesInContent(sourceWithSource, workflow, parsedSourceSpec.Ref, verbose)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Failed to process imports on remote content: %v", err)))
- }
- // Use the version with source field but without resolved imports
- sourceResolved = sourceWithSource
- }
-
- // Normalize again after processing
- sourceResolvedNormalized := normalizeWhitespace(sourceResolved)
-
- // Compare the normalized contents
- hasModifications := sourceResolvedNormalized != localNormalized
-
- if verbose && hasModifications {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Local modifications detected"))
- }
-
- return hasModifications
-}
-
-// MergeWorkflowContent performs a 3-way merge of workflow content using git merge-file
-// It returns the merged content, whether conflicts exist, and any error
-func MergeWorkflowContent(base, current, new, oldSourceSpec, newRef string, verbose bool) (string, bool, error) {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Performing 3-way merge using git merge-file"))
- }
-
- // Parse the old source spec to get the current ref
- sourceSpec, err := parseSourceSpec(oldSourceSpec)
- if err != nil {
- return "", false, fmt.Errorf("failed to parse source spec: %w", err)
- }
- currentSourceSpec := fmt.Sprintf("%s/%s@%s", sourceSpec.Repo, sourceSpec.Path, sourceSpec.Ref)
-
- // Fix the base version by adding the source field to match what both current and new have
- // This prevents unnecessary conflicts over the source field
- baseWithSource, err := UpdateFieldInFrontmatter(base, "source", currentSourceSpec)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to add source to base content: %v", err)))
- }
- // Continue with original base content
- baseWithSource = base
- }
-
- // Update the source field in the new content with the new ref
- newWithUpdatedSource, err := UpdateFieldInFrontmatter(new, "source", fmt.Sprintf("%s/%s@%s", sourceSpec.Repo, sourceSpec.Path, newRef))
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to update source in new content: %v", err)))
- }
- // Continue with original new content
- newWithUpdatedSource = new
- }
-
- // Normalize whitespace in all three versions to reduce spurious conflicts
- baseNormalized := normalizeWhitespace(baseWithSource)
- currentNormalized := normalizeWhitespace(current)
- newNormalized := normalizeWhitespace(newWithUpdatedSource)
-
- // Create temporary directory for merge files
- tmpDir, err := os.MkdirTemp("", "gh-aw-merge-*")
- if err != nil {
- return "", false, fmt.Errorf("failed to create temp directory: %w", err)
- }
- defer os.RemoveAll(tmpDir)
-
- // Write base, current, and new versions to temporary files
- baseFile := filepath.Join(tmpDir, "base.md")
- currentFile := filepath.Join(tmpDir, "current.md")
- newFile := filepath.Join(tmpDir, "new.md")
-
- if err := os.WriteFile(baseFile, []byte(baseNormalized), 0644); err != nil {
- return "", false, fmt.Errorf("failed to write base file: %w", err)
- }
- if err := os.WriteFile(currentFile, []byte(currentNormalized), 0644); err != nil {
- return "", false, fmt.Errorf("failed to write current file: %w", err)
- }
- if err := os.WriteFile(newFile, []byte(newNormalized), 0644); err != nil {
- return "", false, fmt.Errorf("failed to write new file: %w", err)
- }
-
- // Execute git merge-file
- // Format: git merge-file
- cmd := exec.Command("git", "merge-file",
- "-L", "current (local changes)",
- "-L", "base (original)",
- "-L", "new (upstream)",
- "--diff3", // Use diff3 style conflict markers for better context
- currentFile, baseFile, newFile)
-
- output, err := cmd.CombinedOutput()
-
- // git merge-file returns:
- // - 0 if merge was successful without conflicts
- // - >0 if conflicts were found (appears to return number of conflicts, but file is still updated)
- // The exit code can be >1 for multiple conflicts, not just errors
- hasConflicts := false
- if err != nil {
- if exitErr, ok := err.(*exec.ExitError); ok {
- exitCode := exitErr.ExitCode()
- if exitCode > 0 && exitCode < 128 {
- // Conflicts found (exit codes 1-127 indicate conflicts)
- // Exit codes >= 128 typically indicate system errors
- hasConflicts = true
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Merge conflicts detected (exit code: %d)", exitCode)))
- }
- } else {
- // Real error (exit code >= 128)
- return "", false, fmt.Errorf("git merge-file failed: %w\nOutput: %s", err, output)
- }
- } else {
- return "", false, fmt.Errorf("failed to execute git merge-file: %w", err)
- }
- }
-
- // Read the merged content from the current file (git merge-file updates it in-place)
- mergedContent, err := os.ReadFile(currentFile)
- if err != nil {
- return "", false, fmt.Errorf("failed to read merged content: %w", err)
- }
-
- mergedStr := string(mergedContent)
-
- // Process @include directives if present and no conflicts
- // Skip include processing if there are conflicts to avoid errors
- if !hasConflicts {
- sourceSpec, err := parseSourceSpec(oldSourceSpec)
- if err == nil {
- workflow := &WorkflowSpec{
- RepoSpec: RepoSpec{
- RepoSlug: sourceSpec.Repo,
- Version: newRef,
- },
- WorkflowPath: sourceSpec.Path,
- }
-
- processedContent, err := processIncludesInContent(mergedStr, workflow, newRef, verbose)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to process includes: %v", err)))
- }
- // Return unprocessed content on error
- } else {
- mergedStr = processedContent
- }
- }
- }
-
- return mergedStr, hasConflicts, nil
-}
-
-// UpdateActions updates GitHub Actions versions in .github/aw/actions-lock.json
-// It checks each action for newer releases and updates the SHA if a newer version is found
-func UpdateActions(allowMajor, verbose bool) error {
- updateLog.Print("Starting action updates")
-
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Checking for GitHub Actions updates..."))
- }
-
- // Get the path to actions-lock.json
- actionsLockPath := filepath.Join(".github", "aw", "actions-lock.json")
-
- // Check if the file exists
- if _, err := os.Stat(actionsLockPath); os.IsNotExist(err) {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Actions lock file not found: %s", actionsLockPath)))
- }
- return nil // Not an error, just skip
- }
-
- // Load the current actions lock file
- data, err := os.ReadFile(actionsLockPath)
- if err != nil {
- return fmt.Errorf("failed to read actions lock file: %w", err)
- }
-
- var actionsLock actionsLockFile
- if err := json.Unmarshal(data, &actionsLock); err != nil {
- return fmt.Errorf("failed to parse actions lock file: %w", err)
- }
-
- updateLog.Printf("Loaded %d action entries from actions-lock.json", len(actionsLock.Entries))
-
- // Track updates
- var updatedActions []string
- var failedActions []string
- var skippedActions []string
-
- // Update each action
- for key, entry := range actionsLock.Entries {
- updateLog.Printf("Checking action: %s@%s", entry.Repo, entry.Version)
-
- // Check for latest release
- latestVersion, latestSHA, err := getLatestActionRelease(entry.Repo, entry.Version, allowMajor, verbose)
- if err != nil {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to check %s: %v", entry.Repo, err)))
- }
- failedActions = append(failedActions, entry.Repo)
- continue
- }
-
- // Check if update is available
- if latestVersion == entry.Version && latestSHA == entry.SHA {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("%s@%s is up to date", entry.Repo, entry.Version)))
- }
- skippedActions = append(skippedActions, entry.Repo)
- continue
- }
-
- // Update the entry
- updateLog.Printf("Updating %s from %s (%s) to %s (%s)", entry.Repo, entry.Version, entry.SHA[:7], latestVersion, latestSHA[:7])
- fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Updated %s from %s to %s", entry.Repo, entry.Version, latestVersion)))
-
- // Update the map entry
- actionsLock.Entries[key] = actionsLockEntry{
- Repo: entry.Repo,
- Version: latestVersion,
- SHA: latestSHA,
- }
-
- updatedActions = append(updatedActions, entry.Repo)
- }
-
- // Show summary
- fmt.Fprintln(os.Stderr, "")
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage("=== Actions Update Summary ==="))
- fmt.Fprintln(os.Stderr, "")
-
- if len(updatedActions) > 0 {
- fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Updated %d action(s):", len(updatedActions))))
- for _, action := range updatedActions {
- fmt.Fprintln(os.Stderr, console.FormatListItem(action))
- }
- fmt.Fprintln(os.Stderr, "")
- }
-
- if len(skippedActions) > 0 && verbose {
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("%d action(s) already up to date", len(skippedActions))))
- fmt.Fprintln(os.Stderr, "")
- }
-
- if len(failedActions) > 0 {
- fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to check %d action(s):", len(failedActions))))
- for _, action := range failedActions {
- fmt.Fprintf(os.Stderr, " %s\n", action)
- }
- fmt.Fprintln(os.Stderr, "")
- }
-
- // Save the updated actions lock file if there were any updates
- if len(updatedActions) > 0 {
- // Marshal with sorted keys and pretty printing
- updatedData, err := marshalActionsLockSorted(&actionsLock)
- if err != nil {
- return fmt.Errorf("failed to marshal updated actions lock: %w", err)
- }
-
- // Add trailing newline for prettier compliance
- updatedData = append(updatedData, '\n')
-
- if err := os.WriteFile(actionsLockPath, updatedData, 0644); err != nil {
- return fmt.Errorf("failed to write updated actions lock file: %w", err)
- }
-
- updateLog.Printf("Successfully wrote updated actions-lock.json with %d updates", len(updatedActions))
- fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Updated actions-lock.json file"))
- }
-
- return nil
-}
-
-// getLatestActionRelease gets the latest release for an action repository
-// It respects semantic versioning and the allowMajor flag
-func getLatestActionRelease(repo, currentVersion string, allowMajor, verbose bool) (string, string, error) {
- updateLog.Printf("Getting latest release for %s@%s (allowMajor=%v)", repo, currentVersion, allowMajor)
-
- // Use gh CLI to get releases
- cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/releases", repo), "--jq", ".[].tag_name")
- output, err := cmd.CombinedOutput()
- if err != nil {
- // Check if this is an authentication error
- outputStr := string(output)
- if gitutil.IsAuthError(outputStr) || gitutil.IsAuthError(err.Error()) {
- updateLog.Printf("GitHub API authentication failed, attempting git ls-remote fallback for %s", repo)
- // Try fallback using git ls-remote
- latestRelease, latestSHA, gitErr := getLatestActionReleaseViaGit(repo, currentVersion, allowMajor, verbose)
- if gitErr != nil {
- return "", "", fmt.Errorf("failed to fetch releases via GitHub API and git: API error: %w, Git error: %v", err, gitErr)
- }
- return latestRelease, latestSHA, nil
- }
- return "", "", fmt.Errorf("failed to fetch releases: %w", err)
- }
-
- releases := strings.Split(strings.TrimSpace(string(output)), "\n")
- if len(releases) == 0 || releases[0] == "" {
- return "", "", fmt.Errorf("no releases found")
- }
-
- // Parse current version
- currentVer := parseVersion(currentVersion)
- if currentVer == nil {
- // If current version is not a valid semantic version, just return the latest release
- latestRelease := releases[0]
- sha, err := getActionSHAForTag(repo, latestRelease)
- if err != nil {
- return "", "", fmt.Errorf("failed to get SHA for %s: %w", latestRelease, err)
- }
- return latestRelease, sha, nil
- }
-
- // Find the latest compatible release
- var latestCompatible string
- var latestCompatibleVersion *semanticVersion
-
- for _, release := range releases {
- releaseVer := parseVersion(release)
- if releaseVer == nil {
- continue
- }
-
- // Check if compatible based on major version
- if !allowMajor && releaseVer.major != currentVer.major {
- continue
- }
-
- // Check if this is newer than what we have
- if latestCompatibleVersion == nil || releaseVer.isNewer(latestCompatibleVersion) {
- latestCompatible = release
- latestCompatibleVersion = releaseVer
- }
- }
-
- if latestCompatible == "" {
- return "", "", fmt.Errorf("no compatible release found")
- }
-
- // Get the SHA for the latest compatible release
- sha, err := getActionSHAForTag(repo, latestCompatible)
- if err != nil {
- return "", "", fmt.Errorf("failed to get SHA for %s: %w", latestCompatible, err)
- }
-
- return latestCompatible, sha, nil
-}
-
-// getLatestActionReleaseViaGit gets the latest release using git ls-remote (fallback)
-func getLatestActionReleaseViaGit(repo, currentVersion string, allowMajor, verbose bool) (string, string, error) {
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Fetching latest release for %s via git ls-remote (current: %s, allow major: %v)", repo, currentVersion, allowMajor)))
- }
-
- repoURL := fmt.Sprintf("https://github.com/%s.git", repo)
-
- // List all tags
- cmd := exec.Command("git", "ls-remote", "--tags", repoURL)
- output, err := cmd.Output()
- if err != nil {
- return "", "", fmt.Errorf("failed to fetch releases via git ls-remote: %w", err)
- }
-
- lines := strings.Split(strings.TrimSpace(string(output)), "\n")
- var releases []string
- tagToSHA := make(map[string]string)
-
- for _, line := range lines {
- // Parse: " refs/tags/"
- parts := strings.Fields(line)
- if len(parts) >= 2 {
- sha := parts[0]
- tagRef := parts[1]
- // Skip ^{} annotations (they point to the commit object)
- if strings.HasSuffix(tagRef, "^{}") {
- continue
- }
- tag := strings.TrimPrefix(tagRef, "refs/tags/")
- releases = append(releases, tag)
- tagToSHA[tag] = sha
- }
- }
-
- if len(releases) == 0 {
- return "", "", fmt.Errorf("no releases found")
- }
-
- // Parse current version
- currentVer := parseVersion(currentVersion)
- if currentVer == nil {
- // If current version is not a valid semantic version, just return the first release
- latestRelease := releases[0]
- sha := tagToSHA[latestRelease]
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Current version is not valid, using first release: %s (via git)", latestRelease)))
- }
- return latestRelease, sha, nil
- }
-
- // Find the latest compatible release
- var latestCompatible string
- var latestCompatibleVersion *semanticVersion
-
- for _, release := range releases {
- releaseVer := parseVersion(release)
- if releaseVer == nil {
- continue
- }
-
- // Check if compatible based on major version
- if !allowMajor && releaseVer.major != currentVer.major {
- continue
- }
-
- // Check if this is newer than what we have
- if latestCompatibleVersion == nil || releaseVer.isNewer(latestCompatibleVersion) {
- latestCompatible = release
- latestCompatibleVersion = releaseVer
- }
- }
-
- if latestCompatible == "" {
- return "", "", fmt.Errorf("no compatible release found")
- }
-
- sha := tagToSHA[latestCompatible]
- if verbose {
- fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Latest compatible release: %s (via git)", latestCompatible)))
- }
-
- return latestCompatible, sha, nil
-}
-
-// getActionSHAForTag gets the commit SHA for a given tag in an action repository
-func getActionSHAForTag(repo, tag string) (string, error) {
- updateLog.Printf("Getting SHA for %s@%s", repo, tag)
-
- // Use gh CLI to get the git ref for the tag
- cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/git/ref/tags/%s", repo, tag), "--jq", ".object.sha")
- output, err := cmd.Output()
- if err != nil {
- return "", fmt.Errorf("failed to resolve tag: %w", err)
- }
-
- sha := strings.TrimSpace(string(output))
- if sha == "" {
- return "", fmt.Errorf("empty SHA returned for tag")
- }
-
- // Validate SHA format (should be 40 hex characters)
- if len(sha) != 40 {
- return "", fmt.Errorf("invalid SHA format: %s", sha)
- }
-
- return sha, nil
-}
-
-// marshalActionsLockSorted marshals the actions lock with entries sorted by key
-func marshalActionsLockSorted(actionsLock *actionsLockFile) ([]byte, error) {
- // Extract and sort the keys
- keys := make([]string, 0, len(actionsLock.Entries))
- for key := range actionsLock.Entries {
- keys = append(keys, key)
- }
- sort.Strings(keys)
-
- // Build JSON using json.Marshal for proper encoding
- var buf strings.Builder
- buf.WriteString("{\n \"entries\": {\n")
-
- for i, key := range keys {
- entry := actionsLock.Entries[key]
-
- // Marshal the entry to JSON to ensure proper escaping
- entryJSON, err := json.Marshal(entry)
- if err != nil {
- return nil, err
- }
-
- // Marshal the key to ensure proper escaping
- keyJSON, err := json.Marshal(key)
- if err != nil {
- return nil, err
- }
-
- // Write the key-value pair with proper indentation
- buf.WriteString(" ")
- buf.WriteString(string(keyJSON))
- buf.WriteString(": ")
-
- // Pretty-print the entry JSON with proper indentation
- var prettyEntry bytes.Buffer
- if err := json.Indent(&prettyEntry, entryJSON, " ", " "); err != nil {
- return nil, err
- }
- buf.WriteString(prettyEntry.String())
-
- // Add comma if not the last entry
- if i < len(keys)-1 {
- buf.WriteString(",")
- }
- buf.WriteString("\n")
- }
-
- buf.WriteString(" }\n}")
- return []byte(buf.String()), nil
-}
diff --git a/pkg/cli/update_display.go b/pkg/cli/update_display.go
new file mode 100644
index 0000000000..2338667c19
--- /dev/null
+++ b/pkg/cli/update_display.go
@@ -0,0 +1,33 @@
+package cli
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/githubnext/gh-aw/pkg/console"
+)
+
+// showUpdateSummary displays a summary of workflow updates using console helpers
+func showUpdateSummary(successfulUpdates []string, failedUpdates []updateFailure) {
+ fmt.Fprintln(os.Stderr, "")
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage("=== Update Summary ==="))
+ fmt.Fprintln(os.Stderr, "")
+
+ // Show successful updates
+ if len(successfulUpdates) > 0 {
+ fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Successfully updated and compiled %d workflow(s):", len(successfulUpdates))))
+ for _, name := range successfulUpdates {
+ fmt.Fprintln(os.Stderr, console.FormatListItem(name))
+ }
+ fmt.Fprintln(os.Stderr, "")
+ }
+
+ // Show failed updates
+ if len(failedUpdates) > 0 {
+ fmt.Fprintln(os.Stderr, console.FormatErrorMessage(fmt.Sprintf("Failed to update %d workflow(s):", len(failedUpdates))))
+ for _, failure := range failedUpdates {
+ fmt.Fprintf(os.Stderr, " %s %s: %s\n", console.FormatErrorMessage("✗"), failure.Name, failure.Error)
+ }
+ fmt.Fprintln(os.Stderr, "")
+ }
+}
diff --git a/pkg/cli/update_extension_check.go b/pkg/cli/update_extension_check.go
new file mode 100644
index 0000000000..b69fd0803f
--- /dev/null
+++ b/pkg/cli/update_extension_check.go
@@ -0,0 +1,51 @@
+package cli
+
+import (
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/githubnext/gh-aw/pkg/console"
+ "github.com/githubnext/gh-aw/pkg/workflow"
+)
+
+// checkExtensionUpdate checks if a newer version of gh-aw is available
+func checkExtensionUpdate(verbose bool) error {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Checking for gh-aw extension updates..."))
+ }
+
+ // Run gh extension upgrade --dry-run to check for updates
+ cmd := workflow.ExecGH("extension", "upgrade", "githubnext/gh-aw", "--dry-run")
+ output, err := cmd.CombinedOutput()
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to check for extension updates: %v", err)))
+ }
+ return nil // Don't fail the whole command if update check fails
+ }
+
+ outputStr := strings.TrimSpace(string(output))
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Extension update check output: %s", outputStr)))
+ }
+
+ // Parse the output to see if an update is available
+ // Expected format: "[aw]: would have upgraded from v0.14.0 to v0.18.1"
+ lines := strings.Split(outputStr, "\n")
+ for _, line := range lines {
+ if strings.Contains(line, "[aw]: would have upgraded from") {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(line))
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Run 'gh extension upgrade githubnext/gh-aw' to update"))
+ return nil
+ }
+ }
+
+ if strings.Contains(outputStr, "✓ Successfully checked extension upgrades") {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatSuccessMessage("gh-aw extension is up to date"))
+ }
+ }
+
+ return nil
+}
diff --git a/pkg/cli/update_git.go b/pkg/cli/update_git.go
new file mode 100644
index 0000000000..8dda59d920
--- /dev/null
+++ b/pkg/cli/update_git.go
@@ -0,0 +1,95 @@
+package cli
+
+import (
+ "fmt"
+ "math/rand"
+ "os"
+ "os/exec"
+ "strings"
+
+ "github.com/githubnext/gh-aw/pkg/console"
+ "github.com/githubnext/gh-aw/pkg/workflow"
+)
+
+// hasGitChanges checks if there are any uncommitted changes
+func hasGitChanges() (bool, error) {
+ cmd := exec.Command("git", "status", "--porcelain")
+ output, err := cmd.Output()
+ if err != nil {
+ return false, fmt.Errorf("failed to check git status: %w", err)
+ }
+
+ return len(strings.TrimSpace(string(output))) > 0, nil
+}
+
+// runGitCommand runs a git command with the specified arguments
+func runGitCommand(args ...string) error {
+ cmd := exec.Command("git", args...)
+ if err := cmd.Run(); err != nil {
+ return fmt.Errorf("git %s failed: %w", strings.Join(args, " "), err)
+ }
+ return nil
+}
+
+// createUpdatePR creates a pull request with the workflow changes
+func createUpdatePR(verbose bool) error {
+ // Check if GitHub CLI is available
+ if !isGHCLIAvailable() {
+ return fmt.Errorf("GitHub CLI (gh) is required for PR creation but not found in PATH")
+ }
+
+ // Check if there are any changes to commit
+ hasChanges, err := hasGitChanges()
+ if err != nil {
+ return fmt.Errorf("failed to check git status: %w", err)
+ }
+
+ if !hasChanges {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage("No changes to create PR for"))
+ return nil
+ }
+
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Creating pull request with workflow updates..."))
+ }
+
+ // Create a branch name with timestamp
+ randomNum := rand.Intn(9000) + 1000 // Generate number between 1000-9999
+ branchName := fmt.Sprintf("update-workflows-%d", randomNum)
+
+ // Create and checkout new branch
+ if err := runGitCommand("checkout", "-b", branchName); err != nil {
+ return fmt.Errorf("failed to create branch: %w", err)
+ }
+
+ // Add all changes
+ if err := runGitCommand("add", "."); err != nil {
+ return fmt.Errorf("failed to add changes: %w", err)
+ }
+
+ // Commit changes
+ commitMsg := "Update workflows and recompile"
+ if err := runGitCommand("commit", "-m", commitMsg); err != nil {
+ return fmt.Errorf("failed to commit changes: %w", err)
+ }
+
+ // Push branch
+ if err := runGitCommand("push", "-u", "origin", branchName); err != nil {
+ return fmt.Errorf("failed to push branch: %w", err)
+ }
+
+ // Create PR
+ cmd := workflow.ExecGH("pr", "create",
+ "--title", "Update workflows and recompile",
+ "--body", "This PR updates workflows from their source repositories and recompiles them.\n\nGenerated by `gh aw update --pr`")
+
+ output, err := cmd.CombinedOutput()
+ if err != nil {
+ return fmt.Errorf("failed to create PR: %w\nOutput: %s", err, string(output))
+ }
+
+ fmt.Fprintln(os.Stderr, console.FormatSuccessMessage("Successfully created pull request"))
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(strings.TrimSpace(string(output))))
+
+ return nil
+}
diff --git a/pkg/cli/update_merge.go b/pkg/cli/update_merge.go
new file mode 100644
index 0000000000..ecd30f02b0
--- /dev/null
+++ b/pkg/cli/update_merge.go
@@ -0,0 +1,226 @@
+package cli
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+
+ "github.com/githubnext/gh-aw/pkg/console"
+)
+
+// normalizeWhitespace normalizes trailing whitespace and newlines to reduce spurious conflicts
+func normalizeWhitespace(content string) string {
+ // Split into lines and trim trailing whitespace from each line
+ lines := strings.Split(content, "\n")
+ for i, line := range lines {
+ lines[i] = strings.TrimRight(line, " \t")
+ }
+
+ // Join back and ensure exactly one trailing newline if content is not empty
+ normalized := strings.Join(lines, "\n")
+ normalized = strings.TrimRight(normalized, "\n")
+ if len(normalized) > 0 {
+ normalized += "\n"
+ }
+
+ return normalized
+}
+
+// hasLocalModifications checks if the local workflow file has been modified from its source
+// It resolves the source field and imports on the remote content, then compares with local
+// Note: stop-after field is ignored during comparison as it's a deployment-specific setting
+func hasLocalModifications(sourceContent, localContent, sourceSpec string, verbose bool) bool {
+ // Normalize both contents
+ sourceNormalized := normalizeWhitespace(sourceContent)
+ localNormalized := normalizeWhitespace(localContent)
+
+ // Remove stop-after field from both contents for comparison
+ // This field is deployment-specific and should not trigger "local modifications" warnings
+ sourceNormalized, _ = RemoveFieldFromOnTrigger(sourceNormalized, "stop-after")
+ localNormalized, _ = RemoveFieldFromOnTrigger(localNormalized, "stop-after")
+
+ // Parse the source spec to get repo and ref information
+ parsedSourceSpec, err := parseSourceSpec(sourceSpec)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Failed to parse source spec: %v", err)))
+ }
+ // Fall back to simple comparison
+ return sourceNormalized != localNormalized
+ }
+
+ // Add the source field to the remote content
+ sourceWithSource, err := UpdateFieldInFrontmatter(sourceNormalized, "source", sourceSpec)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Failed to add source field to remote content: %v", err)))
+ }
+ // Fall back to simple comparison
+ return sourceNormalized != localNormalized
+ }
+
+ // Resolve imports on the remote content
+ workflow := &WorkflowSpec{
+ RepoSpec: RepoSpec{
+ RepoSlug: parsedSourceSpec.Repo,
+ Version: parsedSourceSpec.Ref,
+ },
+ WorkflowPath: parsedSourceSpec.Path,
+ }
+
+ sourceResolved, err := processIncludesInContent(sourceWithSource, workflow, parsedSourceSpec.Ref, verbose)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Failed to process imports on remote content: %v", err)))
+ }
+ // Use the version with source field but without resolved imports
+ sourceResolved = sourceWithSource
+ }
+
+ // Normalize again after processing
+ sourceResolvedNormalized := normalizeWhitespace(sourceResolved)
+
+ // Compare the normalized contents
+ hasModifications := sourceResolvedNormalized != localNormalized
+
+ if verbose && hasModifications {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Local modifications detected"))
+ }
+
+ return hasModifications
+}
+
+// MergeWorkflowContent performs a 3-way merge of workflow content using git merge-file
+// It returns the merged content, whether conflicts exist, and any error
+func MergeWorkflowContent(base, current, new, oldSourceSpec, newRef string, verbose bool) (string, bool, error) {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Performing 3-way merge using git merge-file"))
+ }
+
+ // Parse the old source spec to get the current ref
+ sourceSpec, err := parseSourceSpec(oldSourceSpec)
+ if err != nil {
+ return "", false, fmt.Errorf("failed to parse source spec: %w", err)
+ }
+ currentSourceSpec := fmt.Sprintf("%s/%s@%s", sourceSpec.Repo, sourceSpec.Path, sourceSpec.Ref)
+
+ // Fix the base version by adding the source field to match what both current and new have
+ // This prevents unnecessary conflicts over the source field
+ baseWithSource, err := UpdateFieldInFrontmatter(base, "source", currentSourceSpec)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to add source to base content: %v", err)))
+ }
+ // Continue with original base content
+ baseWithSource = base
+ }
+
+ // Update the source field in the new content with the new ref
+ newWithUpdatedSource, err := UpdateFieldInFrontmatter(new, "source", fmt.Sprintf("%s/%s@%s", sourceSpec.Repo, sourceSpec.Path, newRef))
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to update source in new content: %v", err)))
+ }
+ // Continue with original new content
+ newWithUpdatedSource = new
+ }
+
+ // Normalize whitespace in all three versions to reduce spurious conflicts
+ baseNormalized := normalizeWhitespace(baseWithSource)
+ currentNormalized := normalizeWhitespace(current)
+ newNormalized := normalizeWhitespace(newWithUpdatedSource)
+
+ // Create temporary directory for merge files
+ tmpDir, err := os.MkdirTemp("", "gh-aw-merge-*")
+ if err != nil {
+ return "", false, fmt.Errorf("failed to create temp directory: %w", err)
+ }
+ defer os.RemoveAll(tmpDir)
+
+ // Write base, current, and new versions to temporary files
+ baseFile := filepath.Join(tmpDir, "base.md")
+ currentFile := filepath.Join(tmpDir, "current.md")
+ newFile := filepath.Join(tmpDir, "new.md")
+
+ if err := os.WriteFile(baseFile, []byte(baseNormalized), 0644); err != nil {
+ return "", false, fmt.Errorf("failed to write base file: %w", err)
+ }
+ if err := os.WriteFile(currentFile, []byte(currentNormalized), 0644); err != nil {
+ return "", false, fmt.Errorf("failed to write current file: %w", err)
+ }
+ if err := os.WriteFile(newFile, []byte(newNormalized), 0644); err != nil {
+ return "", false, fmt.Errorf("failed to write new file: %w", err)
+ }
+
+ // Execute git merge-file
+ // Format: git merge-file
+ cmd := exec.Command("git", "merge-file",
+ "-L", "current (local changes)",
+ "-L", "base (original)",
+ "-L", "new (upstream)",
+ "--diff3", // Use diff3 style conflict markers for better context
+ currentFile, baseFile, newFile)
+
+ output, err := cmd.CombinedOutput()
+
+ // git merge-file returns:
+ // - 0 if merge was successful without conflicts
+ // - >0 if conflicts were found (appears to return number of conflicts, but file is still updated)
+ // The exit code can be >1 for multiple conflicts, not just errors
+ hasConflicts := false
+ if err != nil {
+ if exitErr, ok := err.(*exec.ExitError); ok {
+ exitCode := exitErr.ExitCode()
+ if exitCode > 0 && exitCode < 128 {
+ // Conflicts found (exit codes 1-127 indicate conflicts)
+ // Exit codes >= 128 typically indicate system errors
+ hasConflicts = true
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Merge conflicts detected (exit code: %d)", exitCode)))
+ }
+ } else {
+ // Real error (exit code >= 128)
+ return "", false, fmt.Errorf("git merge-file failed: %w\nOutput: %s", err, output)
+ }
+ } else {
+ return "", false, fmt.Errorf("failed to execute git merge-file: %w", err)
+ }
+ }
+
+ // Read the merged content from the current file (git merge-file updates it in-place)
+ mergedContent, err := os.ReadFile(currentFile)
+ if err != nil {
+ return "", false, fmt.Errorf("failed to read merged content: %w", err)
+ }
+
+ mergedStr := string(mergedContent)
+
+ // Process @include directives if present and no conflicts
+ // Skip include processing if there are conflicts to avoid errors
+ if !hasConflicts {
+ sourceSpec, err := parseSourceSpec(oldSourceSpec)
+ if err == nil {
+ workflow := &WorkflowSpec{
+ RepoSpec: RepoSpec{
+ RepoSlug: sourceSpec.Repo,
+ Version: newRef,
+ },
+ WorkflowPath: sourceSpec.Path,
+ }
+
+ processedContent, err := processIncludesInContent(mergedStr, workflow, newRef, verbose)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to process includes: %v", err)))
+ }
+ // Return unprocessed content on error
+ } else {
+ mergedStr = processedContent
+ }
+ }
+ }
+
+ return mergedStr, hasConflicts, nil
+}
diff --git a/pkg/cli/update_types.go b/pkg/cli/update_types.go
new file mode 100644
index 0000000000..0142857a73
--- /dev/null
+++ b/pkg/cli/update_types.go
@@ -0,0 +1,26 @@
+package cli
+
+// workflowWithSource represents a workflow with its source information
+type workflowWithSource struct {
+ Name string
+ Path string
+ SourceSpec string // e.g., "owner/repo/path@ref"
+}
+
+// updateFailure represents a failed workflow update
+type updateFailure struct {
+ Name string
+ Error string
+}
+
+// actionsLockEntry represents a single action pin entry
+type actionsLockEntry struct {
+ Repo string `json:"repo"`
+ Version string `json:"version"`
+ SHA string `json:"sha"`
+}
+
+// actionsLockFile represents the structure of actions-lock.json
+type actionsLockFile struct {
+ Entries map[string]actionsLockEntry `json:"entries"`
+}
diff --git a/pkg/cli/update_workflows.go b/pkg/cli/update_workflows.go
new file mode 100644
index 0000000000..dfc05907bf
--- /dev/null
+++ b/pkg/cli/update_workflows.go
@@ -0,0 +1,453 @@
+package cli
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/githubnext/gh-aw/pkg/console"
+ "github.com/githubnext/gh-aw/pkg/parser"
+ "github.com/githubnext/gh-aw/pkg/workflow"
+)
+
+// UpdateWorkflows updates workflows from their source repositories
+func UpdateWorkflows(workflowNames []string, allowMajor, force, verbose bool, engineOverride string, workflowsDir string, noStopAfter bool, stopAfter string, merge bool) error {
+ updateLog.Printf("Scanning for workflows with source field: dir=%s, filter=%v, merge=%v", workflowsDir, workflowNames, merge)
+
+ // Use provided workflows directory or default
+ if workflowsDir == "" {
+ workflowsDir = getWorkflowsDir()
+ }
+
+ // Find all workflows with source field
+ workflows, err := findWorkflowsWithSource(workflowsDir, workflowNames, verbose)
+ if err != nil {
+ return err
+ }
+
+ updateLog.Printf("Found %d workflows with source field", len(workflows))
+
+ if len(workflows) == 0 {
+ if len(workflowNames) > 0 {
+ return fmt.Errorf("no workflows found matching the specified names with source field")
+ }
+ return fmt.Errorf("no workflows found with source field")
+ }
+
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Found %d workflow(s) to update", len(workflows))))
+
+ // Track update results
+ var successfulUpdates []string
+ var failedUpdates []updateFailure
+
+ // Update each workflow
+ for _, wf := range workflows {
+ if err := updateWorkflow(wf, allowMajor, force, verbose, engineOverride, noStopAfter, stopAfter, merge); err != nil {
+ failedUpdates = append(failedUpdates, updateFailure{
+ Name: wf.Name,
+ Error: err.Error(),
+ })
+ continue
+ }
+ successfulUpdates = append(successfulUpdates, wf.Name)
+ }
+
+ // Show summary
+ showUpdateSummary(successfulUpdates, failedUpdates)
+
+ if len(successfulUpdates) == 0 {
+ return fmt.Errorf("no workflows were successfully updated")
+ }
+
+ return nil
+}
+
+// findWorkflowsWithSource finds all workflows that have a source field
+func findWorkflowsWithSource(workflowsDir string, filterNames []string, verbose bool) ([]*workflowWithSource, error) {
+ var workflows []*workflowWithSource
+
+ // Read all .md files in workflows directory
+ entries, err := os.ReadDir(workflowsDir)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read workflows directory: %w", err)
+ }
+
+ for _, entry := range entries {
+ if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".md") {
+ continue
+ }
+
+ // Skip .lock.yml files
+ if strings.HasSuffix(entry.Name(), ".lock.yml") {
+ continue
+ }
+
+ workflowPath := filepath.Join(workflowsDir, entry.Name())
+ workflowName := strings.TrimSuffix(entry.Name(), ".md")
+
+ // Filter by name if specified
+ if len(filterNames) > 0 {
+ matched := false
+ for _, filterName := range filterNames {
+ // Remove .md extension if present
+ filterName = strings.TrimSuffix(filterName, ".md")
+ if workflowName == filterName {
+ matched = true
+ break
+ }
+ }
+ if !matched {
+ continue
+ }
+ }
+
+ // Read the workflow file and extract source field
+ content, err := os.ReadFile(workflowPath)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to read %s: %v", workflowPath, err)))
+ }
+ continue
+ }
+
+ // Parse frontmatter
+ result, err := parser.ExtractFrontmatterFromContent(string(content))
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to parse frontmatter in %s: %v", workflowPath, err)))
+ }
+ continue
+ }
+
+ // Check for source field
+ sourceRaw, ok := result.Frontmatter["source"]
+ if !ok {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Skipping %s: no source field", workflowName)))
+ }
+ continue
+ }
+
+ source, ok := sourceRaw.(string)
+ if !ok || source == "" {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Skipping %s: invalid source field", workflowName)))
+ }
+ continue
+ }
+
+ workflows = append(workflows, &workflowWithSource{
+ Name: workflowName,
+ Path: workflowPath,
+ SourceSpec: strings.TrimSpace(source),
+ })
+ }
+
+ return workflows, nil
+}
+
+// resolveLatestRef resolves the latest ref for a workflow source
+func resolveLatestRef(repo, currentRef string, allowMajor, verbose bool) (string, error) {
+ updateLog.Printf("Resolving latest ref: repo=%s, currentRef=%s, allowMajor=%v", repo, currentRef, allowMajor)
+
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Resolving latest ref for %s (current: %s)", repo, currentRef)))
+ }
+
+ // Check if current ref is a tag (looks like a semantic version)
+ if isSemanticVersionTag(currentRef) {
+ updateLog.Print("Current ref is semantic version tag, resolving latest release")
+ return resolveLatestRelease(repo, currentRef, allowMajor, verbose)
+ }
+
+ // Otherwise, treat as branch and get latest commit
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Treating %s as branch, getting latest commit", currentRef)))
+ }
+
+ // Get the latest commit SHA for the branch
+ cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/branches/%s", repo, currentRef), "--jq", ".commit.sha")
+ output, err := cmd.Output()
+ if err != nil {
+ return "", fmt.Errorf("failed to get latest commit for branch %s: %w", currentRef, err)
+ }
+
+ latestSHA := strings.TrimSpace(string(output))
+ updateLog.Printf("Latest commit for branch %s: %s", currentRef, latestSHA)
+
+ // For branches, we return the branch name, not the SHA
+ // The source spec will remain as branch@branchname
+ return currentRef, nil
+}
+
+// resolveLatestRelease resolves the latest compatible release for a workflow source
+func resolveLatestRelease(repo, currentRef string, allowMajor, verbose bool) (string, error) {
+ updateLog.Printf("Resolving latest release for repo %s (current: %s, allowMajor=%v)", repo, currentRef, allowMajor)
+
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Checking for latest release (current: %s, allow major: %v)", currentRef, allowMajor)))
+ }
+
+ // Get all releases using gh CLI
+ cmd := workflow.ExecGH("api", fmt.Sprintf("/repos/%s/releases", repo), "--jq", ".[].tag_name")
+ output, err := cmd.Output()
+ if err != nil {
+ return "", fmt.Errorf("failed to fetch releases: %w", err)
+ }
+
+ releases := strings.Split(strings.TrimSpace(string(output)), "\n")
+ if len(releases) == 0 || releases[0] == "" {
+ return "", fmt.Errorf("no releases found")
+ }
+
+ // Parse current version
+ currentVer := parseVersion(currentRef)
+ if currentVer == nil {
+ // If current version is not a valid semantic version, just return the latest release
+ latestRelease := releases[0]
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Current version is not valid, using latest release: %s", latestRelease)))
+ }
+ return latestRelease, nil
+ }
+
+ // Find the latest compatible release
+ var latestCompatible string
+ var latestCompatibleVersion *semanticVersion
+
+ for _, release := range releases {
+ releaseVer := parseVersion(release)
+ if releaseVer == nil {
+ continue
+ }
+
+ // Check if compatible based on major version
+ if !allowMajor && releaseVer.major != currentVer.major {
+ continue
+ }
+
+ // Check if this is newer than what we have
+ if latestCompatibleVersion == nil || releaseVer.isNewer(latestCompatibleVersion) {
+ latestCompatible = release
+ latestCompatibleVersion = releaseVer
+ }
+ }
+
+ if latestCompatible == "" {
+ return "", fmt.Errorf("no compatible release found")
+ }
+
+ if verbose && latestCompatible != currentRef {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Found newer release: %s", latestCompatible)))
+ }
+
+ return latestCompatible, nil
+}
+
+// updateWorkflow updates a single workflow from its source
+func updateWorkflow(wf *workflowWithSource, allowMajor, force, verbose bool, engineOverride string, noStopAfter bool, stopAfter string, merge bool) error {
+ updateLog.Printf("Updating workflow: name=%s, source=%s, force=%v, merge=%v", wf.Name, wf.SourceSpec, force, merge)
+
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("\nUpdating workflow: %s", wf.Name)))
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Source: %s", wf.SourceSpec)))
+ }
+
+ // Parse source spec
+ sourceSpec, err := parseSourceSpec(wf.SourceSpec)
+ if err != nil {
+ updateLog.Printf("Failed to parse source spec: %v", err)
+ return fmt.Errorf("failed to parse source spec: %w", err)
+ }
+
+ // If no ref specified, use default branch
+ currentRef := sourceSpec.Ref
+ if currentRef == "" {
+ currentRef = "main"
+ }
+
+ // Resolve latest ref
+ latestRef, err := resolveLatestRef(sourceSpec.Repo, currentRef, allowMajor, verbose)
+ if err != nil {
+ return fmt.Errorf("failed to resolve latest ref: %w", err)
+ }
+
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Current ref: %s", currentRef)))
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Latest ref: %s", latestRef)))
+ }
+
+ // Check if update is needed
+ if !force && currentRef == latestRef {
+ updateLog.Printf("Workflow already at latest ref: %s, checking for local modifications", currentRef)
+
+ // Download the source content to check if local file has been modified
+ sourceContent, err := downloadWorkflowContent(sourceSpec.Repo, sourceSpec.Path, currentRef, verbose)
+ if err != nil {
+ // If we can't download for comparison, just show the up-to-date message
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to download source for comparison: %v", err)))
+ }
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Workflow %s is already up to date (%s)", wf.Name, currentRef)))
+ return nil
+ }
+
+ // Read current workflow content
+ currentContent, err := os.ReadFile(wf.Path)
+ if err != nil {
+ return fmt.Errorf("failed to read current workflow: %w", err)
+ }
+
+ // Check if local file differs from source
+ if hasLocalModifications(string(sourceContent), string(currentContent), wf.SourceSpec, verbose) {
+ updateLog.Printf("Local modifications detected in workflow: %s", wf.Name)
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Workflow %s is already up to date (%s)", wf.Name, currentRef)))
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("⚠️ Local copy of %s has been modified from source", wf.Name)))
+ return nil
+ }
+
+ updateLog.Printf("Workflow %s is up to date with no local modifications", wf.Name)
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Workflow %s is already up to date (%s)", wf.Name, currentRef)))
+ return nil
+ }
+
+ // Download the latest version
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Downloading latest version from %s/%s@%s", sourceSpec.Repo, sourceSpec.Path, latestRef)))
+ }
+
+ newContent, err := downloadWorkflowContent(sourceSpec.Repo, sourceSpec.Path, latestRef, verbose)
+ if err != nil {
+ return fmt.Errorf("failed to download workflow: %w", err)
+ }
+
+ var finalContent string
+ var hasConflicts bool
+
+ // Decide whether to merge or override
+ if merge {
+ // Merge mode: perform 3-way merge to preserve local changes
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Using merge mode to preserve local changes"))
+ }
+
+ // Download the base version (current ref from source)
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage(fmt.Sprintf("Downloading base version from %s/%s@%s", sourceSpec.Repo, sourceSpec.Path, currentRef)))
+ }
+
+ baseContent, err := downloadWorkflowContent(sourceSpec.Repo, sourceSpec.Path, currentRef, verbose)
+ if err != nil {
+ return fmt.Errorf("failed to download base workflow: %w", err)
+ }
+
+ // Read current workflow content
+ currentContent, err := os.ReadFile(wf.Path)
+ if err != nil {
+ return fmt.Errorf("failed to read current workflow: %w", err)
+ }
+
+ // Perform 3-way merge using git merge-file
+ updateLog.Printf("Performing 3-way merge for workflow: %s", wf.Name)
+ mergedContent, conflicts, err := MergeWorkflowContent(string(baseContent), string(currentContent), string(newContent), wf.SourceSpec, latestRef, verbose)
+ if err != nil {
+ updateLog.Printf("Merge failed for workflow %s: %v", wf.Name, err)
+ return fmt.Errorf("failed to merge workflow content: %w", err)
+ }
+
+ finalContent = mergedContent
+ hasConflicts = conflicts
+
+ if hasConflicts {
+ updateLog.Printf("Merge conflicts detected in workflow: %s", wf.Name)
+ }
+ } else {
+ // Override mode (default): replace local file with new content from source
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Using override mode - local changes will be replaced"))
+ }
+
+ // Update the source field in the new content with the new ref
+ newWithUpdatedSource, err := UpdateFieldInFrontmatter(string(newContent), "source", fmt.Sprintf("%s/%s@%s", sourceSpec.Repo, sourceSpec.Path, latestRef))
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to update source in new content: %v", err)))
+ }
+ // Continue with original new content
+ finalContent = string(newContent)
+ } else {
+ finalContent = newWithUpdatedSource
+ }
+
+ // Process @include directives if present
+ workflow := &WorkflowSpec{
+ RepoSpec: RepoSpec{
+ RepoSlug: sourceSpec.Repo,
+ Version: latestRef,
+ },
+ WorkflowPath: sourceSpec.Path,
+ }
+
+ processedContent, err := processIncludesInContent(finalContent, workflow, latestRef, verbose)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to process includes: %v", err)))
+ }
+ // Continue with unprocessed content
+ } else {
+ finalContent = processedContent
+ }
+ }
+
+ // Handle stop-after field modifications
+ if noStopAfter {
+ // Remove stop-after field if requested
+ cleanedContent, err := RemoveFieldFromOnTrigger(finalContent, "stop-after")
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to remove stop-after field: %v", err)))
+ }
+ } else {
+ finalContent = cleanedContent
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Removed stop-after field from workflow"))
+ }
+ }
+ } else if stopAfter != "" {
+ // Set custom stop-after value if provided
+ updatedContent, err := SetFieldInOnTrigger(finalContent, "stop-after", stopAfter)
+ if err != nil {
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to set stop-after field: %v", err)))
+ }
+ } else {
+ finalContent = updatedContent
+ if verbose {
+ fmt.Fprintln(os.Stderr, console.FormatInfoMessage(fmt.Sprintf("Set stop-after field to: %s", stopAfter)))
+ }
+ }
+ }
+
+ // Write updated content
+ if err := os.WriteFile(wf.Path, []byte(finalContent), 0644); err != nil {
+ return fmt.Errorf("failed to write updated workflow: %w", err)
+ }
+
+ if hasConflicts {
+ fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Updated %s from %s to %s with CONFLICTS - please review and resolve manually", wf.Name, currentRef, latestRef)))
+ return nil // Not an error, but user needs to resolve conflicts
+ }
+
+ updateLog.Printf("Successfully updated workflow %s from %s to %s", wf.Name, currentRef, latestRef)
+ fmt.Fprintln(os.Stderr, console.FormatSuccessMessage(fmt.Sprintf("Updated %s from %s to %s", wf.Name, currentRef, latestRef)))
+
+ // Compile the updated workflow with refreshStopTime enabled
+ updateLog.Printf("Compiling updated workflow: %s", wf.Name)
+ if err := compileWorkflowWithRefresh(wf.Path, verbose, engineOverride, true); err != nil {
+ updateLog.Printf("Compilation failed for workflow %s: %v", wf.Name, err)
+ return fmt.Errorf("failed to compile updated workflow: %w", err)
+ }
+
+ return nil
+}
diff --git a/pkg/constants/constants.go b/pkg/constants/constants.go
index 89bb9238d6..d1ea7cad2f 100644
--- a/pkg/constants/constants.go
+++ b/pkg/constants/constants.go
@@ -89,7 +89,7 @@ const DefaultCodexVersion Version = "0.73.0"
const DefaultGitHubMCPServerVersion Version = "v0.25.0"
// DefaultFirewallVersion is the default version of the gh-aw-firewall (AWF) binary
-const DefaultFirewallVersion Version = "v0.6.0"
+const DefaultFirewallVersion Version = "v0.7.0"
// DefaultPlaywrightMCPVersion is the default version of the @playwright/mcp package
const DefaultPlaywrightMCPVersion Version = "0.0.52"
diff --git a/pkg/constants/constants_test.go b/pkg/constants/constants_test.go
index 7790b1aaba..7b88fbe5eb 100644
--- a/pkg/constants/constants_test.go
+++ b/pkg/constants/constants_test.go
@@ -249,7 +249,7 @@ func TestVersionConstants(t *testing.T) {
{"DefaultCopilotVersion", DefaultCopilotVersion, "0.0.369"},
{"DefaultCodexVersion", DefaultCodexVersion, "0.73.0"},
{"DefaultGitHubMCPServerVersion", DefaultGitHubMCPServerVersion, "v0.25.0"},
- {"DefaultFirewallVersion", DefaultFirewallVersion, "v0.6.0"},
+ {"DefaultFirewallVersion", DefaultFirewallVersion, "v0.7.0"},
{"DefaultPlaywrightMCPVersion", DefaultPlaywrightMCPVersion, "0.0.52"},
{"DefaultPlaywrightBrowserVersion", DefaultPlaywrightBrowserVersion, "v1.57.0"},
{"DefaultBunVersion", DefaultBunVersion, "1.1"},
diff --git a/pkg/parser/schemas/included_file_schema.json b/pkg/parser/schemas/included_file_schema.json
index 8c77641ce5..ca317a40c3 100644
--- a/pkg/parser/schemas/included_file_schema.json
+++ b/pkg/parser/schemas/included_file_schema.json
@@ -592,9 +592,11 @@
"properties": {
"mode": {
"type": "string",
- "enum": ["http", "stdio"],
+ "enum": ["http"],
"default": "http",
- "description": "Transport mode for the safe-inputs MCP server. 'http' starts the server as a separate step (default), 'stdio' starts the server directly by the agent within the firewall."
+ "description": "Deprecated: Transport mode for the safe-inputs MCP server. This field is ignored as only 'http' mode is supported. The server always starts as a separate step.",
+ "deprecated": true,
+ "x-deprecation-message": "The mode field is no longer used. Safe-inputs always uses HTTP transport."
}
}
},
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index ec1e21f25b..31612322f5 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -4828,9 +4828,11 @@
"properties": {
"mode": {
"type": "string",
- "enum": ["http", "stdio"],
+ "enum": ["http"],
"default": "http",
- "description": "Transport mode for the safe-inputs MCP server. 'http' starts the server as a separate step (default), 'stdio' starts the server directly by the agent within the firewall."
+ "description": "Deprecated: Transport mode for the safe-inputs MCP server. This field is ignored as only 'http' mode is supported. The server always starts as a separate step.",
+ "deprecated": true,
+ "x-deprecation-message": "The mode field is no longer used. Safe-inputs always uses HTTP transport."
}
}
},
diff --git a/pkg/workflow/aw_info_steps_test.go b/pkg/workflow/aw_info_steps_test.go
index bee2ddb53c..72bfcdd063 100644
--- a/pkg/workflow/aw_info_steps_test.go
+++ b/pkg/workflow/aw_info_steps_test.go
@@ -47,6 +47,7 @@ permissions:
engine: copilot
network:
firewall: false
+strict: false
---
# Test firewall disabled
diff --git a/pkg/workflow/compiler_parse.go b/pkg/workflow/compiler_parse.go
index 54896af019..476430b21c 100644
--- a/pkg/workflow/compiler_parse.go
+++ b/pkg/workflow/compiler_parse.go
@@ -211,14 +211,9 @@ func (c *Compiler) ParseWorkflowFile(markdownPath string) (*WorkflowData, error)
// (unless SRT sandbox is configured, since AWF and SRT are mutually exclusive)
enableFirewallByDefaultForCopilot(engineSetting, networkPermissions, sandboxConfig)
- // Validate firewall is enabled in strict mode for copilot with network restrictions
- if err := c.validateStrictFirewall(engineSetting, networkPermissions, sandboxConfig); err != nil {
- return nil, err
- }
-
- // Save the initial strict mode state again for network support check
+ // Re-evaluate strict mode for firewall and network validation
// (it was restored after validateStrictMode but we need it again)
- initialStrictModeForNetwork := c.strictMode
+ initialStrictModeForFirewall := c.strictMode
if !c.strictMode {
// CLI flag not set, check frontmatter
if strictValue, exists := result.Frontmatter["strict"]; exists {
@@ -232,15 +227,21 @@ func (c *Compiler) ParseWorkflowFile(markdownPath string) (*WorkflowData, error)
}
}
+ // Validate firewall is enabled in strict mode for copilot with network restrictions
+ if err := c.validateStrictFirewall(engineSetting, networkPermissions, sandboxConfig); err != nil {
+ c.strictMode = initialStrictModeForFirewall
+ return nil, err
+ }
+
// Check if the engine supports network restrictions when they are defined
if err := c.checkNetworkSupport(agenticEngine, networkPermissions); err != nil {
// Restore strict mode before returning error
- c.strictMode = initialStrictModeForNetwork
+ c.strictMode = initialStrictModeForFirewall
return nil, err
}
// Restore the strict mode state after network check
- c.strictMode = initialStrictModeForNetwork
+ c.strictMode = initialStrictModeForFirewall
log.Print("Processing tools and includes...")
diff --git a/pkg/workflow/firewall_default_enablement_test.go b/pkg/workflow/firewall_default_enablement_test.go
index 1d32765a28..842d65c8ec 100644
--- a/pkg/workflow/firewall_default_enablement_test.go
+++ b/pkg/workflow/firewall_default_enablement_test.go
@@ -402,7 +402,7 @@ func TestStrictModeFirewallValidation(t *testing.T) {
}
})
- t.Run("strict mode skips validation when sandbox.agent is false", func(t *testing.T) {
+ t.Run("strict mode refuses sandbox.agent: false for copilot", func(t *testing.T) {
compiler := NewCompiler(false, "", "test")
compiler.SetStrictMode(true)
@@ -419,8 +419,39 @@ func TestStrictModeFirewallValidation(t *testing.T) {
}
err := compiler.validateStrictFirewall("copilot", networkPerms, sandboxConfig)
- if err != nil {
- t.Errorf("Expected no error when sandbox.agent is false, got: %v", err)
+ if err == nil {
+ t.Error("Expected error when sandbox.agent is false in strict mode for copilot")
+ }
+ expectedMsg := "sandbox.agent: false"
+ if !strings.Contains(err.Error(), expectedMsg) {
+ t.Errorf("Expected error message to contain '%s', got: %v", expectedMsg, err)
+ }
+ })
+
+ t.Run("strict mode refuses sandbox.agent: false for all engines", func(t *testing.T) {
+ compiler := NewCompiler(false, "", "test")
+ compiler.SetStrictMode(true)
+
+ networkPerms := &NetworkPermissions{
+ Allowed: []string{"example.com"},
+ ExplicitlyDefined: true,
+ Firewall: nil,
+ }
+
+ sandboxConfig := &SandboxConfig{
+ Agent: &AgentSandboxConfig{
+ Disabled: true,
+ },
+ }
+
+ // All engines should refuse sandbox.agent: false in strict mode
+ err := compiler.validateStrictFirewall("claude", networkPerms, sandboxConfig)
+ if err == nil {
+ t.Error("Expected error for non-copilot engine with sandbox.agent: false in strict mode")
+ }
+ expectedMsg := "sandbox.agent: false"
+ if !strings.Contains(err.Error(), expectedMsg) {
+ t.Errorf("Expected error message to contain '%s', got: %v", expectedMsg, err)
}
})
@@ -459,4 +490,26 @@ func TestStrictModeFirewallValidation(t *testing.T) {
t.Errorf("Expected no error in non-strict mode, got: %v", err)
}
})
+
+ t.Run("non-strict mode allows sandbox.agent: false for copilot", func(t *testing.T) {
+ compiler := NewCompiler(false, "", "test")
+ compiler.SetStrictMode(false)
+
+ networkPerms := &NetworkPermissions{
+ Allowed: []string{"example.com"},
+ ExplicitlyDefined: true,
+ Firewall: nil,
+ }
+
+ sandboxConfig := &SandboxConfig{
+ Agent: &AgentSandboxConfig{
+ Disabled: true,
+ },
+ }
+
+ err := compiler.validateStrictFirewall("copilot", networkPerms, sandboxConfig)
+ if err != nil {
+ t.Errorf("Expected no error in non-strict mode with sandbox.agent: false, got: %v", err)
+ }
+ })
}
diff --git a/pkg/workflow/mcp_renderer.go b/pkg/workflow/mcp_renderer.go
index dd49128cf0..e5e8e50f1c 100644
--- a/pkg/workflow/mcp_renderer.go
+++ b/pkg/workflow/mcp_renderer.go
@@ -222,7 +222,7 @@ func (r *MCPConfigRendererUnified) RenderSafeInputsMCP(yaml *strings.Builder, sa
}
// renderSafeInputsTOML generates Safe Inputs MCP configuration in TOML format
-// Uses HTTP transport for consistency with JSON format (Copilot/Claude)
+// Uses HTTP transport exclusively
func (r *MCPConfigRendererUnified) renderSafeInputsTOML(yaml *strings.Builder, safeInputs *SafeInputsConfig) {
envVars := getSafeInputsEnvVars(safeInputs)
diff --git a/pkg/workflow/mcp_servers.go b/pkg/workflow/mcp_servers.go
index 4ba1385e97..d2fb3805fa 100644
--- a/pkg/workflow/mcp_servers.go
+++ b/pkg/workflow/mcp_servers.go
@@ -503,46 +503,43 @@ func (c *Compiler) generateMCPSetup(yaml *strings.Builder, tools map[string]any,
}
yaml.WriteString(" \n")
- // Steps 3-4: Generate API key and start HTTP server (only for HTTP mode)
- if IsSafeInputsHTTPMode(workflowData.SafeInputs) {
- // Step 3: Generate API key and choose port for HTTP server using JavaScript
- yaml.WriteString(" - name: Generate Safe Inputs MCP Server Config\n")
- yaml.WriteString(" id: safe-inputs-config\n")
- yaml.WriteString(" uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1\n")
- yaml.WriteString(" with:\n")
- yaml.WriteString(" script: |\n")
-
- // Get the bundled script
- configScript := getGenerateSafeInputsConfigScript()
- for _, line := range FormatJavaScriptForYAML(configScript) {
- yaml.WriteString(line)
- }
- yaml.WriteString(" \n")
- yaml.WriteString(" // Execute the function\n")
- yaml.WriteString(" const crypto = require('crypto');\n")
- yaml.WriteString(" generateSafeInputsConfig({ core, crypto });\n")
- yaml.WriteString(" \n")
-
- // Step 4: Start the HTTP server in the background
- yaml.WriteString(" - name: Start Safe Inputs MCP HTTP Server\n")
- yaml.WriteString(" id: safe-inputs-start\n")
- yaml.WriteString(" run: |\n")
- yaml.WriteString(" # Set environment variables for the server\n")
- yaml.WriteString(" export GH_AW_SAFE_INPUTS_PORT=${{ steps.safe-inputs-config.outputs.safe_inputs_port }}\n")
- yaml.WriteString(" export GH_AW_SAFE_INPUTS_API_KEY=${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }}\n")
- yaml.WriteString(" \n")
-
- // Pass through environment variables from safe-inputs config
- envVars := getSafeInputsEnvVars(workflowData.SafeInputs)
- for _, envVar := range envVars {
- yaml.WriteString(fmt.Sprintf(" export %s=\"${%s}\"\n", envVar, envVar))
- }
- yaml.WriteString(" \n")
+ // Step 3: Generate API key and choose port for HTTP server using JavaScript
+ yaml.WriteString(" - name: Generate Safe Inputs MCP Server Config\n")
+ yaml.WriteString(" id: safe-inputs-config\n")
+ yaml.WriteString(" uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1\n")
+ yaml.WriteString(" with:\n")
+ yaml.WriteString(" script: |\n")
+
+ // Get the bundled script
+ configScript := getGenerateSafeInputsConfigScript()
+ for _, line := range FormatJavaScriptForYAML(configScript) {
+ yaml.WriteString(line)
+ }
+ yaml.WriteString(" \n")
+ yaml.WriteString(" // Execute the function\n")
+ yaml.WriteString(" const crypto = require('crypto');\n")
+ yaml.WriteString(" generateSafeInputsConfig({ core, crypto });\n")
+ yaml.WriteString(" \n")
- // Use the embedded shell script to start the server
- WriteShellScriptToYAML(yaml, startSafeInputsServerScript, " ")
- yaml.WriteString(" \n")
+ // Step 4: Start the HTTP server in the background
+ yaml.WriteString(" - name: Start Safe Inputs MCP HTTP Server\n")
+ yaml.WriteString(" id: safe-inputs-start\n")
+ yaml.WriteString(" run: |\n")
+ yaml.WriteString(" # Set environment variables for the server\n")
+ yaml.WriteString(" export GH_AW_SAFE_INPUTS_PORT=${{ steps.safe-inputs-config.outputs.safe_inputs_port }}\n")
+ yaml.WriteString(" export GH_AW_SAFE_INPUTS_API_KEY=${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }}\n")
+ yaml.WriteString(" \n")
+
+ // Pass through environment variables from safe-inputs config
+ envVars := getSafeInputsEnvVars(workflowData.SafeInputs)
+ for _, envVar := range envVars {
+ yaml.WriteString(fmt.Sprintf(" export %s=\"${%s}\"\n", envVar, envVar))
}
+ yaml.WriteString(" \n")
+
+ // Use the embedded shell script to start the server
+ WriteShellScriptToYAML(yaml, startSafeInputsServerScript, " ")
+ yaml.WriteString(" \n")
}
// Use the engine's RenderMCPConfig method
@@ -612,13 +609,11 @@ func (c *Compiler) generateMCPSetup(yaml *strings.Builder, tools map[string]any,
// Add safe-inputs env vars if present
if hasSafeInputs {
- // Add server configuration env vars from step outputs (HTTP mode only)
- if IsSafeInputsHTTPMode(workflowData.SafeInputs) {
- yaml.WriteString(" GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}\n")
- yaml.WriteString(" GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}\n")
- }
+ // Add server configuration env vars from step outputs
+ yaml.WriteString(" GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}\n")
+ yaml.WriteString(" GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}\n")
- // Add tool-specific env vars (secrets passthrough) - needed for both modes
+ // Add tool-specific env vars (secrets passthrough)
safeInputsSecrets := collectSafeInputsSecrets(workflowData.SafeInputs)
if len(safeInputsSecrets) > 0 {
// Sort env var names for consistent output
diff --git a/pkg/workflow/safe_inputs.go b/pkg/workflow/safe_inputs.go
index 00231f37d6..e5c1061224 100644
--- a/pkg/workflow/safe_inputs.go
+++ b/pkg/workflow/safe_inputs.go
@@ -59,8 +59,7 @@ type SafeInputParam struct {
// SafeInputsMode constants define the available transport modes
const (
- SafeInputsModeHTTP = "http"
- SafeInputsModeStdio = "stdio"
+ SafeInputsModeHTTP = "http"
)
// HasSafeInputs checks if safe-inputs are configured
@@ -68,14 +67,10 @@ func HasSafeInputs(safeInputs *SafeInputsConfig) bool {
return safeInputs != nil && len(safeInputs.Tools) > 0
}
-// IsSafeInputsStdioMode checks if safe-inputs is configured to use stdio mode
-func IsSafeInputsStdioMode(safeInputs *SafeInputsConfig) bool {
- return safeInputs != nil && safeInputs.Mode == SafeInputsModeStdio
-}
-
// IsSafeInputsHTTPMode checks if safe-inputs is configured to use HTTP mode
+// Note: All safe-inputs configurations now use HTTP mode exclusively
func IsSafeInputsHTTPMode(safeInputs *SafeInputsConfig) bool {
- return safeInputs != nil && (safeInputs.Mode == SafeInputsModeHTTP || safeInputs.Mode == "")
+ return safeInputs != nil
}
// IsSafeInputsEnabled checks if safe-inputs are configured.
@@ -90,19 +85,12 @@ func IsSafeInputsEnabled(safeInputs *SafeInputsConfig, workflowData *WorkflowDat
// Returns the config and a boolean indicating whether any tools were found.
func parseSafeInputsMap(safeInputsMap map[string]any) (*SafeInputsConfig, bool) {
config := &SafeInputsConfig{
- Mode: "http", // Default to HTTP mode
+ Mode: "http", // Only HTTP mode is supported
Tools: make(map[string]*SafeInputToolConfig),
}
- // Parse mode if specified (optional field)
- if mode, exists := safeInputsMap["mode"]; exists {
- if modeStr, ok := mode.(string); ok {
- // Validate mode value
- if modeStr == "stdio" || modeStr == "http" {
- config.Mode = modeStr
- }
- }
- }
+ // Mode field is ignored - only HTTP mode is supported
+ // All safe-inputs configurations use HTTP transport
for toolName, toolValue := range safeInputsMap {
// Skip the "mode" field as it's not a tool definition
@@ -387,37 +375,12 @@ func generateSafeInputsToolsConfig(safeInputs *SafeInputsConfig) string {
}
// generateSafeInputsMCPServerScript generates the entry point script for the safe-inputs MCP server
-// This script chooses the transport based on mode: HTTP or stdio
+// This script uses HTTP transport exclusively
func generateSafeInputsMCPServerScript(safeInputs *SafeInputsConfig) string {
var sb strings.Builder
- if IsSafeInputsStdioMode(safeInputs) {
- // Stdio transport - server started by agent
- sb.WriteString(`// @ts-check
-// Auto-generated safe-inputs MCP server entry point (stdio transport)
-// This script uses the reusable safe_inputs_mcp_server module with stdio transport
-
-const path = require("path");
-const { startSafeInputsServer } = require("./safe_inputs_mcp_server.cjs");
-
-// Configuration file path (generated alongside this script)
-const configPath = path.join(__dirname, "tools.json");
-
-// Start the stdio server
-// Note: skipCleanup is true for stdio mode to allow agent restarts
-try {
- startSafeInputsServer(configPath, {
- logDir: "/tmp/gh-aw/safe-inputs/logs",
- skipCleanup: true
- });
-} catch (error) {
- console.error("Failed to start safe-inputs stdio server:", error);
- process.exit(1);
-}
-`)
- } else {
- // HTTP transport - server started in separate step
- sb.WriteString(`// @ts-check
+ // HTTP transport - server started in separate step
+ sb.WriteString(`// @ts-check
// Auto-generated safe-inputs MCP server entry point (HTTP transport)
// This script uses the reusable safe_inputs_mcp_server_http module
@@ -441,7 +404,6 @@ startHttpServer(configPath, {
process.exit(1);
});
`)
- }
return sb.String()
}
@@ -633,105 +595,65 @@ func collectSafeInputsSecrets(safeInputs *SafeInputsConfig) map[string]string {
}
// renderSafeInputsMCPConfigWithOptions generates the Safe Inputs MCP server configuration with engine-specific options
-// Supports both HTTP and stdio transport modes
+// Only supports HTTP transport mode
func renderSafeInputsMCPConfigWithOptions(yaml *strings.Builder, safeInputs *SafeInputsConfig, isLast bool, includeCopilotFields bool) {
envVars := getSafeInputsEnvVars(safeInputs)
yaml.WriteString(" \"" + constants.SafeInputsMCPServerID + "\": {\n")
- // Choose transport based on mode
- if IsSafeInputsStdioMode(safeInputs) {
- // Stdio transport configuration - server started by agent
- // Use "local" for Copilot CLI, "stdio" for other engines
- typeValue := "stdio"
- if includeCopilotFields {
- typeValue = "local"
- }
- yaml.WriteString(" \"type\": \"" + typeValue + "\",\n")
- yaml.WriteString(" \"command\": \"node\",\n")
- yaml.WriteString(" \"args\": [\"/tmp/gh-aw/safe-inputs/mcp-server.cjs\"],\n")
-
- // Add tools field for Copilot
- if includeCopilotFields {
- yaml.WriteString(" \"tools\": [\"*\"],\n")
- }
+ // HTTP transport configuration - server started in separate step
+ // Add type field for HTTP (required by MCP specification for HTTP transport)
+ yaml.WriteString(" \"type\": \"http\",\n")
- // Add env block for environment variable passthrough
- yaml.WriteString(" \"env\": {\n")
+ // HTTP URL using environment variable
+ // Use host.docker.internal to allow access from firewall container
+ if includeCopilotFields {
+ // Copilot format: backslash-escaped shell variable reference
+ yaml.WriteString(" \"url\": \"http://host.docker.internal:\\${GH_AW_SAFE_INPUTS_PORT}\",\n")
+ } else {
+ // Claude/Custom format: direct shell variable reference
+ yaml.WriteString(" \"url\": \"http://host.docker.internal:$GH_AW_SAFE_INPUTS_PORT\",\n")
+ }
- // Write environment variables with appropriate escaping
- for i, envVar := range envVars {
- isLastEnvVar := i == len(envVars)-1
- comma := ""
- if !isLastEnvVar {
- comma = ","
- }
+ // Add Authorization header with API key
+ yaml.WriteString(" \"headers\": {\n")
+ if includeCopilotFields {
+ // Copilot format: backslash-escaped shell variable reference
+ yaml.WriteString(" \"Authorization\": \"Bearer \\${GH_AW_SAFE_INPUTS_API_KEY}\"\n")
+ } else {
+ // Claude/Custom format: direct shell variable reference
+ yaml.WriteString(" \"Authorization\": \"Bearer $GH_AW_SAFE_INPUTS_API_KEY\"\n")
+ }
+ yaml.WriteString(" },\n")
- if includeCopilotFields {
- // Copilot format: backslash-escaped shell variable reference
- yaml.WriteString(" \"" + envVar + "\": \"\\${" + envVar + "}\"" + comma + "\n")
- } else {
- // Claude/Custom format: direct shell variable reference
- yaml.WriteString(" \"" + envVar + "\": \"$" + envVar + "\"" + comma + "\n")
- }
- }
+ // Add tools field for Copilot
+ if includeCopilotFields {
+ yaml.WriteString(" \"tools\": [\"*\"],\n")
+ }
- yaml.WriteString(" }\n")
- } else {
- // HTTP transport configuration - server started in separate step
- // Add type field for HTTP (required by MCP specification for HTTP transport)
- yaml.WriteString(" \"type\": \"http\",\n")
+ // Add env block for environment variable passthrough
+ envVarsWithServerConfig := append([]string{"GH_AW_SAFE_INPUTS_PORT", "GH_AW_SAFE_INPUTS_API_KEY"}, envVars...)
+ yaml.WriteString(" \"env\": {\n")
- // HTTP URL using environment variable
- // Use host.docker.internal to allow access from firewall container
- if includeCopilotFields {
- // Copilot format: backslash-escaped shell variable reference
- yaml.WriteString(" \"url\": \"http://host.docker.internal:\\${GH_AW_SAFE_INPUTS_PORT}\",\n")
- } else {
- // Claude/Custom format: direct shell variable reference
- yaml.WriteString(" \"url\": \"http://host.docker.internal:$GH_AW_SAFE_INPUTS_PORT\",\n")
+ // Write environment variables with appropriate escaping
+ for i, envVar := range envVarsWithServerConfig {
+ isLastEnvVar := i == len(envVarsWithServerConfig)-1
+ comma := ""
+ if !isLastEnvVar {
+ comma = ","
}
- // Add Authorization header with API key
- yaml.WriteString(" \"headers\": {\n")
if includeCopilotFields {
// Copilot format: backslash-escaped shell variable reference
- yaml.WriteString(" \"Authorization\": \"Bearer \\${GH_AW_SAFE_INPUTS_API_KEY}\"\n")
+ yaml.WriteString(" \"" + envVar + "\": \"\\${" + envVar + "}\"" + comma + "\n")
} else {
// Claude/Custom format: direct shell variable reference
- yaml.WriteString(" \"Authorization\": \"Bearer $GH_AW_SAFE_INPUTS_API_KEY\"\n")
- }
- yaml.WriteString(" },\n")
-
- // Add tools field for Copilot
- if includeCopilotFields {
- yaml.WriteString(" \"tools\": [\"*\"],\n")
+ yaml.WriteString(" \"" + envVar + "\": \"$" + envVar + "\"" + comma + "\n")
}
-
- // Add env block for environment variable passthrough
- envVarsWithServerConfig := append([]string{"GH_AW_SAFE_INPUTS_PORT", "GH_AW_SAFE_INPUTS_API_KEY"}, envVars...)
- yaml.WriteString(" \"env\": {\n")
-
- // Write environment variables with appropriate escaping
- for i, envVar := range envVarsWithServerConfig {
- isLastEnvVar := i == len(envVarsWithServerConfig)-1
- comma := ""
- if !isLastEnvVar {
- comma = ","
- }
-
- if includeCopilotFields {
- // Copilot format: backslash-escaped shell variable reference
- yaml.WriteString(" \"" + envVar + "\": \"\\${" + envVar + "}\"" + comma + "\n")
- } else {
- // Claude/Custom format: direct shell variable reference
- yaml.WriteString(" \"" + envVar + "\": \"$" + envVar + "\"" + comma + "\n")
- }
- }
-
- yaml.WriteString(" }\n")
}
+ yaml.WriteString(" }\n")
+
if isLast {
yaml.WriteString(" }\n")
} else {
@@ -753,21 +675,15 @@ func (c *Compiler) mergeSafeInputs(main *SafeInputsConfig, importedConfigs []str
continue
}
- // Parse the imported JSON config
+ // Merge the imported JSON config
var importedMap map[string]any
if err := json.Unmarshal([]byte(configJSON), &importedMap); err != nil {
safeInputsLog.Printf("Warning: failed to parse imported safe-inputs config: %v", err)
continue
}
- // Merge mode if present in imported config and not set in main
- if mode, exists := importedMap["mode"]; exists && main.Mode == "http" {
- if modeStr, ok := mode.(string); ok {
- if modeStr == "stdio" || modeStr == "http" {
- main.Mode = modeStr
- }
- }
- }
+ // Mode field is ignored - only HTTP mode is supported
+ // All safe-inputs configurations use HTTP transport
// Merge each tool from the imported config
for toolName, toolValue := range importedMap {
diff --git a/pkg/workflow/safe_inputs_mode_test.go b/pkg/workflow/safe_inputs_mode_test.go
index 947095ad7d..9fb2d60b3b 100644
--- a/pkg/workflow/safe_inputs_mode_test.go
+++ b/pkg/workflow/safe_inputs_mode_test.go
@@ -7,105 +7,6 @@ import (
"testing"
)
-// TestSafeInputsStdioMode verifies that stdio mode generates correct configuration
-func TestSafeInputsStdioMode(t *testing.T) {
- // Create a temporary workflow file
- tempDir := t.TempDir()
- workflowPath := filepath.Join(tempDir, "test-workflow.md")
-
- workflowContent := `---
-on: workflow_dispatch
-engine: copilot
-safe-inputs:
- mode: stdio
- test-tool:
- description: Test tool
- script: |
- return { result: "test" };
----
-
-Test safe-inputs stdio mode
-`
-
- err := os.WriteFile(workflowPath, []byte(workflowContent), 0644)
- if err != nil {
- t.Fatalf("Failed to write workflow file: %v", err)
- }
-
- // Compile the workflow
- compiler := NewCompiler(false, "", "test")
- err = compiler.CompileWorkflow(workflowPath)
- if err != nil {
- t.Fatalf("Failed to compile workflow: %v", err)
- }
-
- // Read the generated lock file
- lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
- lockContent, err := os.ReadFile(lockPath)
- if err != nil {
- t.Fatalf("Failed to read lock file: %v", err)
- }
-
- yamlStr := string(lockContent)
-
- // Verify that HTTP server startup steps are NOT present
- unexpectedSteps := []string{
- "Generate Safe Inputs MCP Server Config",
- "Start Safe Inputs MCP HTTP Server",
- }
-
- for _, stepName := range unexpectedSteps {
- if strings.Contains(yamlStr, stepName) {
- t.Errorf("Unexpected HTTP server step found in stdio mode: %q", stepName)
- }
- }
-
- // Verify stdio configuration in MCP setup
- if !strings.Contains(yamlStr, `"safeinputs"`) {
- t.Error("Safe-inputs MCP server config not found")
- }
-
- // Should use local transport for Copilot (stdio is converted to local for Copilot CLI compatibility)
- if !strings.Contains(yamlStr, `"type": "local"`) {
- t.Error("Expected type field set to 'local' in MCP config for Copilot engine")
- }
-
- if !strings.Contains(yamlStr, `"command": "node"`) {
- t.Error("Expected command field in stdio config")
- }
-
- if !strings.Contains(yamlStr, `"/tmp/gh-aw/safe-inputs/mcp-server.cjs"`) {
- t.Error("Expected mcp-server.cjs in args for stdio mode")
- }
-
- // Should NOT have HTTP-specific fields
- safeinputsConfig := extractSafeinputsConfigSection(yamlStr)
- if strings.Contains(safeinputsConfig, `"url"`) {
- t.Error("Stdio mode should not have URL field")
- }
-
- if strings.Contains(safeinputsConfig, `"headers"`) {
- t.Error("Stdio mode should not have headers field")
- }
-
- // Verify the entry point script uses stdio
- if !strings.Contains(yamlStr, "startSafeInputsServer") {
- t.Error("Expected stdio entry point to use startSafeInputsServer")
- }
-
- // Check the actual mcp-server.cjs entry point uses stdio server
- entryPointSection := extractMCPServerEntryPoint(yamlStr)
- if !strings.Contains(entryPointSection, "startSafeInputsServer(configPath") {
- t.Error("Entry point should call startSafeInputsServer for stdio mode")
- }
-
- if strings.Contains(entryPointSection, "startHttpServer") {
- t.Error("Stdio mode entry point should not call startHttpServer")
- }
-
- t.Logf("✓ Stdio mode correctly configured without HTTP server steps")
-}
-
// TestSafeInputsHTTPMode verifies that HTTP mode generates correct configuration
func TestSafeInputsHTTPMode(t *testing.T) {
testCases := []struct {
@@ -208,166 +109,11 @@ Test safe-inputs HTTP mode
t.Error("Entry point should call startHttpServer for HTTP mode")
}
- if strings.Contains(entryPointSection, "startSafeInputsServer(configPath") {
- t.Error("HTTP mode entry point should not call startSafeInputsServer")
- }
-
t.Logf("✓ HTTP mode correctly configured with HTTP server steps")
})
}
}
-// TestSafeInputsModeInImport verifies that mode can be set via imports
-func TestSafeInputsModeInImport(t *testing.T) {
- // Create a temporary directory structure
- tempDir := t.TempDir()
- sharedDir := filepath.Join(tempDir, "shared")
- err := os.Mkdir(sharedDir, 0755)
- if err != nil {
- t.Fatalf("Failed to create shared directory: %v", err)
- }
-
- // Create import file with stdio mode
- importPath := filepath.Join(sharedDir, "tool.md")
- importContent := `---
-safe-inputs:
- mode: stdio
- imported-tool:
- description: Imported tool
- script: |
- return { result: "imported" };
----
-
-Imported tool
-`
-
- err = os.WriteFile(importPath, []byte(importContent), 0644)
- if err != nil {
- t.Fatalf("Failed to write import file: %v", err)
- }
-
- // Create main workflow that imports the tool
- workflowPath := filepath.Join(tempDir, "workflow.md")
- workflowContent := `---
-on: workflow_dispatch
-engine: copilot
-imports:
- - shared/tool.md
----
-
-Test mode via import
-`
-
- err = os.WriteFile(workflowPath, []byte(workflowContent), 0644)
- if err != nil {
- t.Fatalf("Failed to write workflow file: %v", err)
- }
-
- // Compile the workflow
- compiler := NewCompiler(false, "", "test")
- err = compiler.CompileWorkflow(workflowPath)
- if err != nil {
- t.Fatalf("Failed to compile workflow: %v", err)
- }
-
- // Read the generated lock file
- lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
- lockContent, err := os.ReadFile(lockPath)
- if err != nil {
- t.Fatalf("Failed to read lock file: %v", err)
- }
-
- yamlStr := string(lockContent)
-
- // Verify local mode is used from import (converted from stdio for Copilot CLI)
- if !strings.Contains(yamlStr, `"type": "local"`) {
- t.Error("Expected local mode (converted from stdio) from imported configuration for Copilot engine")
- }
-
- // Verify HTTP server steps are NOT present
- if strings.Contains(yamlStr, "Start Safe Inputs MCP HTTP Server") {
- t.Error("Should not have HTTP server step when mode is stdio via import")
- }
-
- t.Logf("✓ Mode correctly inherited from import")
-}
-
-// TestSafeInputsStdioModeWithClaudeEngine verifies that stdio mode with Claude engine uses "stdio" not "local"
-func TestSafeInputsStdioModeWithClaudeEngine(t *testing.T) {
- // Create a temporary workflow file
- tempDir := t.TempDir()
- workflowPath := filepath.Join(tempDir, "test-workflow.md")
-
- workflowContent := `---
-on: workflow_dispatch
-engine: claude
-safe-inputs:
- mode: stdio
- test-tool:
- description: Test tool
- script: |
- return { result: "test" };
----
-
-Test safe-inputs stdio mode with Claude engine
-`
-
- err := os.WriteFile(workflowPath, []byte(workflowContent), 0644)
- if err != nil {
- t.Fatalf("Failed to write workflow file: %v", err)
- }
-
- // Compile the workflow
- compiler := NewCompiler(false, "", "test")
- err = compiler.CompileWorkflow(workflowPath)
- if err != nil {
- t.Fatalf("Failed to compile workflow: %v", err)
- }
-
- // Read the generated lock file
- lockPath := strings.TrimSuffix(workflowPath, ".md") + ".lock.yml"
- lockContent, err := os.ReadFile(lockPath)
- if err != nil {
- t.Fatalf("Failed to read lock file: %v", err)
- }
-
- yamlStr := string(lockContent)
-
- // Verify that type field is "stdio" for Claude engine (not converted to "local")
- if !strings.Contains(yamlStr, `"type": "stdio"`) {
- t.Error("Expected type field set to 'stdio' in MCP config for Claude engine")
- }
-
- // Verify "local" is NOT used
- safeinputsConfig := extractSafeinputsConfigSection(yamlStr)
- if strings.Contains(safeinputsConfig, `"type": "local"`) {
- t.Error("Claude engine should use 'stdio' type, not 'local'")
- }
-
- t.Logf("✓ Stdio mode correctly uses 'stdio' type for Claude engine")
-}
-
-// extractSafeinputsConfigSection extracts the safeinputs configuration section from the YAML
-func extractSafeinputsConfigSection(yamlStr string) string {
- start := strings.Index(yamlStr, `"safeinputs"`)
- if start == -1 {
- return ""
- }
-
- // Find the closing brace for the safeinputs object
- // This is a simple heuristic - we look for the next server or closing brace
- end := strings.Index(yamlStr[start:], `},`)
- if end == -1 {
- end = strings.Index(yamlStr[start:], `}`)
- }
-
- if end == -1 {
- return yamlStr[start:]
- }
-
- return yamlStr[start : start+end+1]
-}
-
// extractMCPServerEntryPoint extracts the mcp-server.cjs entry point script from the YAML
func extractMCPServerEntryPoint(yamlStr string) string {
// Find the mcp-server.cjs section
diff --git a/pkg/workflow/sandbox_agent_false_test.go b/pkg/workflow/sandbox_agent_false_test.go
index db1cb9d9ed..96a0180626 100644
--- a/pkg/workflow/sandbox_agent_false_test.go
+++ b/pkg/workflow/sandbox_agent_false_test.go
@@ -20,6 +20,7 @@ network:
- github.com
sandbox:
agent: false
+strict: false
on: workflow_dispatch
---
@@ -118,6 +119,7 @@ network:
- github.com
sandbox:
agent: false
+strict: false
on: workflow_dispatch
---
diff --git a/pkg/workflow/strict_mode_validation.go b/pkg/workflow/strict_mode_validation.go
index a068077d26..d743f22a0e 100644
--- a/pkg/workflow/strict_mode_validation.go
+++ b/pkg/workflow/strict_mode_validation.go
@@ -214,7 +214,7 @@ func (c *Compiler) validateStrictMode(frontmatter map[string]any, networkPermiss
return nil
}
-// validateStrictFirewall requires firewall to be enabled in strict mode for copilot engine
+// validateStrictFirewall requires firewall to be enabled in strict mode
// when network domains are provided (non-wildcard)
func (c *Compiler) validateStrictFirewall(engineID string, networkPermissions *NetworkPermissions, sandboxConfig *SandboxConfig) error {
if !c.strictMode {
@@ -222,7 +222,14 @@ func (c *Compiler) validateStrictFirewall(engineID string, networkPermissions *N
return nil
}
- // Only apply to copilot engine
+ // Check if sandbox.agent: false is set (explicitly disabled)
+ // In strict mode, this is not allowed for any engine as it disables the agent sandbox
+ if sandboxConfig != nil && sandboxConfig.Agent != nil && sandboxConfig.Agent.Disabled {
+ strictModeValidationLog.Printf("sandbox.agent: false is set, refusing in strict mode")
+ return fmt.Errorf("strict mode: 'sandbox.agent: false' is not allowed because it disables the agent sandbox. Remove 'sandbox.agent: false' or set 'strict: false' to disable strict mode. See: https://githubnext.github.io/gh-aw/reference/network/")
+ }
+
+ // Only apply firewall validation to copilot engine
if engineID != "copilot" {
strictModeValidationLog.Printf("Non-copilot engine, skipping firewall validation")
return nil
@@ -245,12 +252,6 @@ func (c *Compiler) validateStrictFirewall(engineID string, networkPermissions *N
}
}
- // Check if sandbox.agent: false is set (explicitly disabled)
- if sandboxConfig != nil && sandboxConfig.Agent != nil && sandboxConfig.Agent.Disabled {
- strictModeValidationLog.Printf("sandbox.agent: false is set, skipping firewall validation")
- return nil
- }
-
// If network permissions don't exist, that's fine (will default to "defaults")
if networkPermissions == nil {
strictModeValidationLog.Printf("No network permissions, skipping firewall validation")