diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index cf774288d7..9a6d1c2b17 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -24,12 +24,10 @@ # Resolved workflow manifest: # Imports: # - shared/gh.md -# - shared/github-queries-safe-input.md -# - shared/mcp/tavily.md # - shared/mood.md # - shared/reporting.md # -# frontmatter-hash: ac29812a15c891a9fb12b2d8e331ba0cbce798cc8d23e5e8b9718a6dfea56e8f +# frontmatter-hash: 3854250bdfed2a509135850e0617de23d37184ebc8395ab1c19e997046734794 name: "Smoke Codex" "on": @@ -106,7 +104,6 @@ jobs: runs-on: ubuntu-latest permissions: contents: read - discussions: read issues: read pull-requests: read env: @@ -593,92 +590,6 @@ jobs: "GH_DEBUG": "GH_DEBUG" }, "timeout": 60 - }, - { - "name": "github-discussion-query", - "description": "Query GitHub discussions with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.", - "inputSchema": { - "properties": { - "jq": { - "description": "jq filter expression to apply to output. If not provided, returns schema info instead of full data.", - "type": "string" - }, - "limit": { - "description": "Maximum number of discussions to fetch (default: 30)", - "type": "number" - }, - "repo": { - "description": "Repository in owner/repo format (defaults to current repository)", - "type": "string" - } - }, - "type": "object" - }, - "handler": "github-discussion-query.sh", - "env": { - "GH_TOKEN": "GH_TOKEN" - }, - "timeout": 60 - }, - { - "name": "github-issue-query", - "description": "Query GitHub issues with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.", - "inputSchema": { - "properties": { - "jq": { - "description": "jq filter expression to apply to output. If not provided, returns schema info instead of full data.", - "type": "string" - }, - "limit": { - "description": "Maximum number of issues to fetch (default: 30)", - "type": "number" - }, - "repo": { - "description": "Repository in owner/repo format (defaults to current repository)", - "type": "string" - }, - "state": { - "description": "Issue state: open, closed, all (default: open)", - "type": "string" - } - }, - "type": "object" - }, - "handler": "github-issue-query.sh", - "env": { - "GH_TOKEN": "GH_TOKEN" - }, - "timeout": 60 - }, - { - "name": "github-pr-query", - "description": "Query GitHub pull requests with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.", - "inputSchema": { - "properties": { - "jq": { - "description": "jq filter expression to apply to output. If not provided, returns schema info instead of full data.", - "type": "string" - }, - "limit": { - "description": "Maximum number of PRs to fetch (default: 30)", - "type": "number" - }, - "repo": { - "description": "Repository in owner/repo format (defaults to current repository)", - "type": "string" - }, - "state": { - "description": "PR state: open, closed, merged, all (default: open)", - "type": "string" - } - }, - "type": "object" - }, - "handler": "github-pr-query.sh", - "env": { - "GH_TOKEN": "GH_TOKEN" - }, - "timeout": 60 } ] } @@ -715,311 +626,6 @@ jobs: EOFSH_gh chmod +x /opt/gh-aw/safe-inputs/gh.sh - cat > /opt/gh-aw/safe-inputs/github-discussion-query.sh << 'EOFSH_github-discussion-query' - #!/bin/bash - # Auto-generated safe-input tool: github-discussion-query - # Query GitHub discussions with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter. - - set -euo pipefail - - set -e - - # Default values - REPO="${INPUT_REPO:-}" - LIMIT="${INPUT_LIMIT:-30}" - JQ_FILTER="${INPUT_JQ:-}" - - # Parse repository owner and name - if [[ -n "$REPO" ]]; then - OWNER=$(echo "$REPO" | cut -d'/' -f1) - NAME=$(echo "$REPO" | cut -d'/' -f2) - else - # Get current repository from GitHub context - OWNER="${GITHUB_REPOSITORY_OWNER:-}" - NAME=$(echo "${GITHUB_REPOSITORY:-}" | cut -d'/' -f2) - fi - - # Validate owner and name - if [[ -z "$OWNER" || -z "$NAME" ]]; then - echo "Error: Could not determine repository owner and name" >&2 - exit 1 - fi - - # Build GraphQL query for discussions - GRAPHQL_QUERY=$(cat < /opt/gh-aw/safe-inputs/github-issue-query.sh << 'EOFSH_github-issue-query' - #!/bin/bash - # Auto-generated safe-input tool: github-issue-query - # Query GitHub issues with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter. - - set -euo pipefail - - set -e - - # Default values - REPO="${INPUT_REPO:-}" - STATE="${INPUT_STATE:-open}" - LIMIT="${INPUT_LIMIT:-30}" - JQ_FILTER="${INPUT_JQ:-}" - - # JSON fields to fetch - JSON_FIELDS="number,title,state,author,createdAt,updatedAt,closedAt,body,labels,assignees,comments,milestone,url" - - # Build and execute gh command - if [[ -n "$REPO" ]]; then - OUTPUT=$(gh issue list --state "$STATE" --limit "$LIMIT" --json "$JSON_FIELDS" --repo "$REPO") - else - OUTPUT=$(gh issue list --state "$STATE" --limit "$LIMIT" --json "$JSON_FIELDS") - fi - - # Apply jq filter if specified - if [[ -n "$JQ_FILTER" ]]; then - jq "$JQ_FILTER" <<< "$OUTPUT" - else - # Return schema and size instead of full data - ITEM_COUNT=$(jq 'length' <<< "$OUTPUT") - DATA_SIZE=${#OUTPUT} - - # Validate values are numeric - if ! [[ "$ITEM_COUNT" =~ ^[0-9]+$ ]]; then - ITEM_COUNT=0 - fi - if ! [[ "$DATA_SIZE" =~ ^[0-9]+$ ]]; then - DATA_SIZE=0 - fi - - cat << EOF - { - "message": "No --jq filter provided. Use --jq to filter and retrieve data.", - "item_count": $ITEM_COUNT, - "data_size_bytes": $DATA_SIZE, - "schema": { - "type": "array", - "description": "Array of issue objects", - "item_fields": { - "number": "integer - Issue number", - "title": "string - Issue title", - "state": "string - Issue state (OPEN, CLOSED)", - "author": "object - Author info with login field", - "createdAt": "string - ISO timestamp of creation", - "updatedAt": "string - ISO timestamp of last update", - "closedAt": "string|null - ISO timestamp of close", - "body": "string - Issue body content", - "labels": "array - Array of label objects with name field", - "assignees": "array - Array of assignee objects with login field", - "comments": "object - Comments info with totalCount field", - "milestone": "object|null - Milestone info with title field", - "url": "string - Issue URL" - } - }, - "suggested_queries": [ - {"description": "Get all data", "query": "."}, - {"description": "Get issue numbers and titles", "query": ".[] | {number, title}"}, - {"description": "Get open issues only", "query": ".[] | select(.state == \"OPEN\")"}, - {"description": "Get issues by author", "query": ".[] | select(.author.login == \"USERNAME\")"}, - {"description": "Get issues with label", "query": ".[] | select(.labels | map(.name) | index(\"bug\"))"}, - {"description": "Get issues with many comments", "query": ".[] | select(.comments.totalCount > 5) | {number, title, comments: .comments.totalCount}"}, - {"description": "Count by state", "query": "group_by(.state) | map({state: .[0].state, count: length})"} - ] - } - EOF - fi - - - EOFSH_github-issue-query - chmod +x /opt/gh-aw/safe-inputs/github-issue-query.sh - cat > /opt/gh-aw/safe-inputs/github-pr-query.sh << 'EOFSH_github-pr-query' - #!/bin/bash - # Auto-generated safe-input tool: github-pr-query - # Query GitHub pull requests with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter. - - set -euo pipefail - - set -e - - # Default values - REPO="${INPUT_REPO:-}" - STATE="${INPUT_STATE:-open}" - LIMIT="${INPUT_LIMIT:-30}" - JQ_FILTER="${INPUT_JQ:-}" - - # JSON fields to fetch - JSON_FIELDS="number,title,state,author,createdAt,updatedAt,mergedAt,closedAt,headRefName,baseRefName,isDraft,reviewDecision,additions,deletions,changedFiles,labels,assignees,reviewRequests,url" - - # Build and execute gh command - if [[ -n "$REPO" ]]; then - OUTPUT=$(gh pr list --state "$STATE" --limit "$LIMIT" --json "$JSON_FIELDS" --repo "$REPO") - else - OUTPUT=$(gh pr list --state "$STATE" --limit "$LIMIT" --json "$JSON_FIELDS") - fi - - # Apply jq filter if specified - if [[ -n "$JQ_FILTER" ]]; then - jq "$JQ_FILTER" <<< "$OUTPUT" - else - # Return schema and size instead of full data - ITEM_COUNT=$(jq 'length' <<< "$OUTPUT") - DATA_SIZE=${#OUTPUT} - - # Validate values are numeric - if ! [[ "$ITEM_COUNT" =~ ^[0-9]+$ ]]; then - ITEM_COUNT=0 - fi - if ! [[ "$DATA_SIZE" =~ ^[0-9]+$ ]]; then - DATA_SIZE=0 - fi - - cat << EOF - { - "message": "No --jq filter provided. Use --jq to filter and retrieve data.", - "item_count": $ITEM_COUNT, - "data_size_bytes": $DATA_SIZE, - "schema": { - "type": "array", - "description": "Array of pull request objects", - "item_fields": { - "number": "integer - PR number", - "title": "string - PR title", - "state": "string - PR state (OPEN, CLOSED, MERGED)", - "author": "object - Author info with login field", - "createdAt": "string - ISO timestamp of creation", - "updatedAt": "string - ISO timestamp of last update", - "mergedAt": "string|null - ISO timestamp of merge", - "closedAt": "string|null - ISO timestamp of close", - "headRefName": "string - Source branch name", - "baseRefName": "string - Target branch name", - "isDraft": "boolean - Whether PR is a draft", - "reviewDecision": "string|null - Review decision (APPROVED, CHANGES_REQUESTED, REVIEW_REQUIRED)", - "additions": "integer - Lines added", - "deletions": "integer - Lines deleted", - "changedFiles": "integer - Number of files changed", - "labels": "array - Array of label objects with name field", - "assignees": "array - Array of assignee objects with login field", - "reviewRequests": "array - Array of review request objects", - "url": "string - PR URL" - } - }, - "suggested_queries": [ - {"description": "Get all data", "query": "."}, - {"description": "Get PR numbers and titles", "query": ".[] | {number, title}"}, - {"description": "Get open PRs only", "query": ".[] | select(.state == \"OPEN\")"}, - {"description": "Get merged PRs", "query": ".[] | select(.mergedAt != null)"}, - {"description": "Get PRs by author", "query": ".[] | select(.author.login == \"USERNAME\")"}, - {"description": "Get large PRs", "query": ".[] | select(.changedFiles > 10) | {number, title, changedFiles}"}, - {"description": "Count by state", "query": "group_by(.state) | map({state: .[0].state, count: length})"} - ] - } - EOF - fi - - - EOFSH_github-pr-query - chmod +x /opt/gh-aw/safe-inputs/github-pr-query.sh - name: Generate Safe Inputs MCP Server Config id: safe-inputs-config @@ -1048,7 +654,6 @@ jobs: GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-config.outputs.safe_inputs_api_key }} GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_DEBUG: 1 - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | # Environment variables are set above to prevent template injection export DEBUG @@ -1067,10 +672,8 @@ jobs: GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} GH_DEBUG: 1 - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} run: | set -eo pipefail mkdir -p /tmp/gh-aw/mcp-config @@ -1088,7 +691,7 @@ jobs: # Register API key as secret to mask it from logs echo "::add-mask::${MCP_GATEWAY_API_KEY}" export GH_AW_ENGINE="codex" - export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -e GH_TOKEN -e TAVILY_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.0.103' + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_INPUTS_PORT -e GH_AW_SAFE_INPUTS_API_KEY -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -e GH_AW_GH_TOKEN -e GH_DEBUG -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.0.103' cat > /tmp/gh-aw/mcp-config/config.toml << EOF [history] @@ -1150,10 +753,6 @@ jobs: "${{ github.workspace }}" ] mounts = ["${{ github.workspace }}:${{ github.workspace }}:rw"] - - [mcp_servers.tavily] - url = "https://mcp.tavily.com/mcp/" - http_headers = { "Authorization" = "Bearer ${{ secrets.TAVILY_API_KEY }}" } EOF # Generate JSON config for MCP gateway @@ -1215,13 +814,6 @@ jobs: "${{ github.workspace }}" ], "mounts": ["${{ github.workspace }}:${{ github.workspace }}:rw"] - }, - "tavily": { - "type": "http", - "url": "https://mcp.tavily.com/mcp/", - "headers": { - "Authorization": "Bearer ${{ secrets.TAVILY_API_KEY }}" - } } }, "gateway": { @@ -1358,15 +950,9 @@ jobs: {{#runtime-import .github/workflows/shared/gh.md}} PROMPT_EOF cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" - {{#runtime-import .github/workflows/shared/mcp/tavily.md}} - PROMPT_EOF - cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" {{#runtime-import .github/workflows/shared/reporting.md}} PROMPT_EOF cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" - {{#runtime-import .github/workflows/shared/github-queries-safe-input.md}} - PROMPT_EOF - cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" {{#runtime-import .github/workflows/smoke-codex.md}} PROMPT_EOF - name: Substitute placeholders @@ -1428,7 +1014,7 @@ jobs: run: | set -o pipefail mkdir -p "$CODEX_HOME/logs" - sudo -E awf --enable-chroot --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains '*.githubusercontent.com,172.30.0.1,api.openai.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.githubassets.com,go.dev,golang.org,goproxy.io,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,mcp.tavily.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,openai.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pkg.go.dev,playwright.download.prss.microsoft.com,ppa.launchpad.net,proxy.golang.org,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,sum.golang.org,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.13.12 --skip-pull \ + sudo -E awf --enable-chroot --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains '*.githubusercontent.com,172.30.0.1,api.openai.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,cdn.playwright.dev,codeload.github.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.githubassets.com,go.dev,golang.org,goproxy.io,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,openai.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pkg.go.dev,playwright.download.prss.microsoft.com,ppa.launchpad.net,proxy.golang.org,raw.githubusercontent.com,s.symcb.com,s.symcd.com,security.ubuntu.com,sum.golang.org,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com' --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.13.12 --skip-pull \ -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" && codex ${GH_AW_MODEL_AGENT_CODEX:+-c model="$GH_AW_MODEL_AGENT_CODEX" }exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check "$INSTRUCTION"' \ 2>&1 | tee /tmp/gh-aw/agent-stdio.log env: @@ -1461,13 +1047,12 @@ jobs: const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); await main(); env: - GH_AW_SECRET_NAMES: 'CODEX_API_KEY,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN,OPENAI_API_KEY,TAVILY_API_KEY' + GH_AW_SECRET_NAMES: 'CODEX_API_KEY,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN,OPENAI_API_KEY' SECRET_CODEX_API_KEY: ${{ secrets.CODEX_API_KEY }} SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SECRET_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - SECRET_TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} - name: Upload Safe Outputs if: always() uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 diff --git a/.github/workflows/smoke-codex.md b/.github/workflows/smoke-codex.md index 6b19065d5e..f09867a255 100644 --- a/.github/workflows/smoke-codex.md +++ b/.github/workflows/smoke-codex.md @@ -11,16 +11,13 @@ permissions: contents: read issues: read pull-requests: read - discussions: read name: Smoke Codex engine: codex strict: true imports: - shared/mood.md - shared/gh.md - - shared/mcp/tavily.md - shared/reporting.md - - shared/github-queries-safe-input.md network: allowed: - defaults @@ -66,24 +63,22 @@ timeout-minutes: 15 # Smoke Test: Codex Engine Validation -**IMPORTANT: Keep all outputs extremely short and concise. Use single-line responses where possible. No verbose explanations.** +**CRITICAL EFFICIENCY REQUIREMENTS:** +- Keep ALL outputs extremely short and concise. Use single-line responses. +- NO verbose explanations or unnecessary context. +- Minimize file reading - only read what is absolutely necessary for the task. +- Use targeted, specific queries - avoid broad searches or large data retrievals. ## Test Requirements -1. **GitHub MCP Testing**: Review the last 2 merged pull requests in ${{ github.repository }} -2. **Safe Inputs GH CLI Testing**: Use the `safeinputs-gh` tool to query 2 pull requests from ${{ github.repository }} (use args: "pr list --repo ${{ github.repository }} --limit 2 --json number,title,author") -3. **Serena MCP Testing**: - - Use the Serena MCP server tool `activate_project` to initialize the workspace at `${{ github.workspace }}` and verify it succeeds (do NOT use bash to run go commands - use Serena's MCP tools) - - After initialization, use the `find_symbol` tool to search for symbols (find which tool to call) and verify that at least 3 symbols are found in the results -4. **Playwright Testing**: Use the playwright tools to navigate to https://github.com and verify the page title contains "GitHub" (do NOT try to install playwright - use the provided MCP tools) -5. **Tavily Web Search Testing**: Use the Tavily MCP server to perform a web search for "GitHub Agentic Workflows" and verify that results are returned with at least one item -6. **File Writing Testing**: Create a test file `/tmp/gh-aw/agent/smoke-test-codex-${{ github.run_id }}.txt` with content "Smoke test passed for Codex at $(date)" (create the directory if it doesn't exist) -7. **Bash Tool Testing**: Execute bash commands to verify file creation was successful (use `cat` to read the file back) -8. **Discussion Interaction Testing**: - - Use the `github-discussion-query` safe-input tool with params: `limit=1, jq=".[0]"` to get the latest discussion from ${{ github.repository }} - - Extract the discussion number from the result (e.g., if the result is `{"number": 123, "title": "...", ...}`, extract 123) - - Use the `add_comment` tool with `discussion_number: ` to add a mystical, oracle-themed comment stating that the smoke test agent was here -9. **Build gh-aw**: Run `GOCACHE=/tmp/go-cache GOMODCACHE=/tmp/go-mod make build` to verify the agent can successfully build the gh-aw project (both caches must be set to /tmp because the default cache locations are not writable). If the command fails, mark this test as ❌ and report the failure. +1. **GitHub MCP Testing**: Use GitHub MCP tools to fetch details of exactly 2 merged pull requests from ${{ github.repository }} (title and number only, no descriptions) +2. **Serena MCP Testing**: + - Use the Serena MCP server tool `activate_project` to initialize the workspace at `${{ github.workspace }}` and verify it succeeds (do NOT use bash to run go commands) + - After initialization, use the `find_symbol` tool to search for symbols and verify that at least 3 symbols are found in the results +3. **Playwright Testing**: Use the playwright tools to navigate to https://github.com and verify the page title contains "GitHub" (do NOT try to install playwright - use the provided MCP tools) +4. **File Writing Testing**: Create a test file `/tmp/gh-aw/agent/smoke-test-codex-${{ github.run_id }}.txt` with content "Smoke test passed for Codex at $(date)" (create the directory if it doesn't exist) +5. **Bash Tool Testing**: Execute bash commands to verify file creation was successful (use `cat` to read the file back) +6. **Build gh-aw**: Run `GOCACHE=/tmp/go-cache GOMODCACHE=/tmp/go-mod make build` to verify the agent can successfully build the gh-aw project (both caches must be set to /tmp because the default cache locations are not writable). If the command fails, mark this test as ❌ and report the failure. ## Output @@ -92,8 +87,6 @@ Add a **very brief** comment (max 5-10 lines) to the current pull request with: - ✅ or ❌ for each test result - Overall status: PASS or FAIL -Use the `add_comment` tool to add a **mystical oracle-themed comment** to the latest discussion (using the `discussion_number` you extracted in step 8) - be creative and use mystical language like "🔮 The ancient spirits stir..." - If all tests pass: - Use the `add_labels` safe-output tool to add the label `smoke-codex` to the pull request - Use the `remove_labels` safe-output tool to remove the label `smoke` from the pull request diff --git a/pkg/parser/import_processor.go b/pkg/parser/import_processor.go index f71ee766a9..c73573aab5 100644 --- a/pkg/parser/import_processor.go +++ b/pkg/parser/import_processor.go @@ -169,7 +169,7 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a var toolsBuilder strings.Builder var mcpServersBuilder strings.Builder var markdownBuilder strings.Builder // Only used for imports WITH inputs (compile-time substitution) - var importPaths []string // NEW: Track import paths for runtime-import macro generation + var importPaths []string // NEW: Track import paths for runtime-import macro generation var stepsBuilder strings.Builder var copilotSetupStepsBuilder strings.Builder // Track copilot-setup-steps.yml separately var runtimesBuilder strings.Builder @@ -317,7 +317,7 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a } else { // Has inputs - must inline for compile-time substitution log.Printf("Agent file has inputs - will be inlined instead of runtime-imported") - + // For agent files, extract markdown content (only when inputs are present) markdownContent, err := processIncludedFileWithVisited(item.fullPath, item.sectionName, false, visited) if err != nil { @@ -489,7 +489,7 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a } else { // Has inputs - must inline for compile-time substitution log.Printf("Import %s has inputs - will be inlined for compile-time substitution", importRelPath) - + // Extract markdown content from imported file (only for imports with inputs) markdownContent, err := processIncludedFileWithVisited(item.fullPath, item.sectionName, false, visited) if err != nil { @@ -639,7 +639,7 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a MergedSafeOutputs: safeOutputs, MergedSafeInputs: safeInputs, MergedMarkdown: markdownBuilder.String(), // Only imports WITH inputs (for compile-time substitution) - ImportPaths: importPaths, // Import paths for runtime-import macro generation + ImportPaths: importPaths, // Import paths for runtime-import macro generation MergedSteps: stepsBuilder.String(), CopilotSetupSteps: copilotSetupStepsBuilder.String(), MergedRuntimes: runtimesBuilder.String(),