diff --git a/.github/aw/actions-lock.json b/.github/aw/actions-lock.json index 80130e4a1a..edbd2e8785 100644 --- a/.github/aw/actions-lock.json +++ b/.github/aw/actions-lock.json @@ -25,6 +25,11 @@ "version": "v4.3.0", "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" }, + "actions/checkout@v3": { + "repo": "actions/checkout", + "version": "v3", + "sha": "f43a0e5ff2bd294095638e18286ca9a3d1956744" + }, "actions/checkout@v4": { "repo": "actions/checkout", "version": "v4", @@ -100,7 +105,7 @@ "version": "v5.6.0", "sha": "a26af69be951a213d495a4c3e4e4022e16d87065" }, - "actions/upload-artifact@v4": { + "actions/upload-artifact@v4.6.2": { "repo": "actions/upload-artifact", "version": "v4.6.2", "sha": "ea165f8d65b6e75b540449e92b4886f43607fa02" diff --git a/.github/workflows/test-yaml-import.lock.yml b/.github/workflows/test-yaml-import.lock.yml new file mode 100644 index 0000000000..5831461337 --- /dev/null +++ b/.github/workflows/test-yaml-import.lock.yml @@ -0,0 +1,519 @@ +# +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw. DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# For more information: https://github.com/githubnext/gh-aw/blob/main/.github/aw/github-agentic-workflows.md +# +# +# Resolved workflow manifest: +# Imports: +# - license-check.yml +# +# frontmatter-hash: 83f31132f451df6f4c3a39b4ad4cc696f31f1257849dbafd4dd0c8a7ac8beac8 + +name: "Test YAML Import" +"on": issue_comment + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}-${{ github.event.issue.number }}" + +run-name: "Test YAML Import" + +jobs: + activation: + needs: pre_activation + if: (needs.pre_activation.result == 'skipped') || (needs.pre_activation.outputs.activated == 'true') + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + comment_id: "" + comment_repo: "" + steps: + - name: Checkout actions folder + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + - name: Check workflow file timestamps + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + GH_AW_WORKFLOW_FILE: "test-yaml-import.lock.yml" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + + agent: + needs: + - activation + - license-check + runs-on: ubuntu-latest + permissions: + contents: read + outputs: + model: ${{ steps.generate_aw_info.outputs.model }} + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + steps: + - name: Checkout actions folder + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + with: + persist-credentials: false + - name: Create gh-aw temp directory + run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + if: | + github.event.pull_request + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://githubnext.github.io/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.397 + - name: Install awf binary + run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.11.2 + - name: Determine automatic lockdown mode for GitHub MCP server + id: determine-automatic-lockdown + env: + TOKEN_CHECK: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + if: env.TOKEN_CHECK != '' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download container images + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/github-mcp-server:v0.30.2 ghcr.io/githubnext/gh-aw-mcpg:v0.0.84 + - name: Start MCP gateway + id: start-mcp-gateway + env: + GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY="" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + export MCP_GATEWAY_API_KEY + + # Register API key as secret to mask it from logs + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export GH_AW_ENGINE="copilot" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e DEBUG="*" -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/githubnext/gh-aw-mcpg:v0.0.84' + + mkdir -p /home/runner/.copilot + cat << MCPCONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh + { + "mcpServers": { + "github": { + "type": "stdio", + "container": "ghcr.io/github/github-mcp-server:v0.30.2", + "env": { + "GITHUB_LOCKDOWN_MODE": "$GITHUB_MCP_LOCKDOWN", + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "context,repos,issues,pull_requests" + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}" + } + } + MCPCONFIG_EOF + - name: Generate agentic run info + id: generate_aw_info + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const fs = require('fs'); + + const awInfo = { + engine_id: "copilot", + engine_name: "GitHub Copilot CLI", + model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", + version: "", + agent_version: "0.0.397", + workflow_name: "Test YAML Import", + experimental: false, + supports_tools_allowlist: true, + supports_http_transport: true, + run_id: context.runId, + run_number: context.runNumber, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + repository: context.repo.owner + '/' + context.repo.repo, + ref: context.ref, + sha: context.sha, + actor: context.actor, + event_name: context.eventName, + staged: false, + allowed_domains: ["defaults"], + firewall_enabled: true, + awf_version: "v0.11.2", + awmg_version: "v0.0.84", + steps: { + firewall: "squid" + }, + created_at: new Date().toISOString() + }; + + // Write to /tmp/gh-aw directory to avoid inclusion in PR + const tmpPath = '/tmp/gh-aw/aw_info.json'; + fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); + console.log('Generated aw_info.json at:', tmpPath); + console.log(JSON.stringify(awInfo, null, 2)); + + // Set model as output for reuse in other steps/jobs + core.setOutput('model', awInfo.model); + - name: Generate workflow overview + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); + await generateWorkflowOverview(core); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + run: | + bash /opt/gh-aw/actions/create_prompt_first.sh + cat << 'PROMPT_EOF' > "$GH_AW_PROMPT" + + PROMPT_EOF + cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" + cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + PROMPT_EOF + if [ "$GITHUB_EVENT_NAME" = "issue_comment" ] && [ -n "$GH_AW_IS_PR_COMMENT" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review_comment" ] || [ "$GITHUB_EVENT_NAME" = "pull_request_review" ]; then + cat "/opt/gh-aw/prompts/pr_context_prompt.md" >> "$GH_AW_PROMPT" + fi + cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" + + PROMPT_EOF + cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT" + {{#runtime-import workflows/test-yaml-import.md}} + PROMPT_EOF + - name: Substitute placeholders + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_IS_PR_COMMENT: ${{ github.event.issue.pull_request && 'true' || '' }} + with: + script: | + const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_IS_PR_COMMENT: process.env.GH_AW_IS_PR_COMMENT + } + }); + - name: Interpolate variables and render templates + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/print_prompt_summary.sh + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 20 + run: | + set -o pipefail + GH_AW_TOOL_BINS=""; command -v go >/dev/null 2>&1 && GH_AW_TOOL_BINS="$(go env GOROOT)/bin:$GH_AW_TOOL_BINS"; [ -n "$JAVA_HOME" ] && GH_AW_TOOL_BINS="$JAVA_HOME/bin:$GH_AW_TOOL_BINS"; [ -n "$CARGO_HOME" ] && GH_AW_TOOL_BINS="$CARGO_HOME/bin:$GH_AW_TOOL_BINS"; [ -n "$GEM_HOME" ] && GH_AW_TOOL_BINS="$GEM_HOME/bin:$GH_AW_TOOL_BINS"; [ -n "$CONDA" ] && GH_AW_TOOL_BINS="$CONDA/bin:$GH_AW_TOOL_BINS"; [ -n "$PIPX_BIN_DIR" ] && GH_AW_TOOL_BINS="$PIPX_BIN_DIR:$GH_AW_TOOL_BINS"; [ -n "$SWIFT_PATH" ] && GH_AW_TOOL_BINS="$SWIFT_PATH:$GH_AW_TOOL_BINS"; [ -n "$DOTNET_ROOT" ] && GH_AW_TOOL_BINS="$DOTNET_ROOT:$GH_AW_TOOL_BINS"; export GH_AW_TOOL_BINS + mkdir -p "$HOME/.cache" + sudo -E awf --env-all --env "ANDROID_HOME=${ANDROID_HOME}" --env "ANDROID_NDK=${ANDROID_NDK}" --env "ANDROID_NDK_HOME=${ANDROID_NDK_HOME}" --env "ANDROID_NDK_LATEST_HOME=${ANDROID_NDK_LATEST_HOME}" --env "ANDROID_NDK_ROOT=${ANDROID_NDK_ROOT}" --env "ANDROID_SDK_ROOT=${ANDROID_SDK_ROOT}" --env "AZURE_EXTENSION_DIR=${AZURE_EXTENSION_DIR}" --env "CARGO_HOME=${CARGO_HOME}" --env "CHROMEWEBDRIVER=${CHROMEWEBDRIVER}" --env "CONDA=${CONDA}" --env "DOTNET_ROOT=${DOTNET_ROOT}" --env "EDGEWEBDRIVER=${EDGEWEBDRIVER}" --env "GECKOWEBDRIVER=${GECKOWEBDRIVER}" --env "GEM_HOME=${GEM_HOME}" --env "GEM_PATH=${GEM_PATH}" --env "GOPATH=${GOPATH}" --env "GOROOT=${GOROOT}" --env "HOMEBREW_CELLAR=${HOMEBREW_CELLAR}" --env "HOMEBREW_PREFIX=${HOMEBREW_PREFIX}" --env "HOMEBREW_REPOSITORY=${HOMEBREW_REPOSITORY}" --env "JAVA_HOME=${JAVA_HOME}" --env "JAVA_HOME_11_X64=${JAVA_HOME_11_X64}" --env "JAVA_HOME_17_X64=${JAVA_HOME_17_X64}" --env "JAVA_HOME_21_X64=${JAVA_HOME_21_X64}" --env "JAVA_HOME_25_X64=${JAVA_HOME_25_X64}" --env "JAVA_HOME_8_X64=${JAVA_HOME_8_X64}" --env "NVM_DIR=${NVM_DIR}" --env "PIPX_BIN_DIR=${PIPX_BIN_DIR}" --env "PIPX_HOME=${PIPX_HOME}" --env "RUSTUP_HOME=${RUSTUP_HOME}" --env "SELENIUM_JAR_PATH=${SELENIUM_JAR_PATH}" --env "SWIFT_PATH=${SWIFT_PATH}" --env "VCPKG_INSTALLATION_ROOT=${VCPKG_INSTALLATION_ROOT}" --env "GH_AW_TOOL_BINS=$GH_AW_TOOL_BINS" --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${HOME}/.cache:${HOME}/.cache:rw" --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/cat:/usr/bin/cat:ro --mount /usr/bin/curl:/usr/bin/curl:ro --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/find:/usr/bin/find:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/grep:/usr/bin/grep:ro --mount /usr/bin/jq:/usr/bin/jq:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/bin/cp:/usr/bin/cp:ro --mount /usr/bin/cut:/usr/bin/cut:ro --mount /usr/bin/diff:/usr/bin/diff:ro --mount /usr/bin/head:/usr/bin/head:ro --mount /usr/bin/ls:/usr/bin/ls:ro --mount /usr/bin/mkdir:/usr/bin/mkdir:ro --mount /usr/bin/rm:/usr/bin/rm:ro --mount /usr/bin/sed:/usr/bin/sed:ro --mount /usr/bin/sort:/usr/bin/sort:ro --mount /usr/bin/tail:/usr/bin/tail:ro --mount /usr/bin/wc:/usr/bin/wc:ro --mount /usr/bin/which:/usr/bin/which:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --mount /home/runner/.copilot:/home/runner/.copilot:rw --mount /opt/hostedtoolcache:/opt/hostedtoolcache:ro --mount /opt/gh-aw:/opt/gh-aw:ro --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.11.2 --agent-image act \ + -- 'source /opt/gh-aw/actions/sanitize_path.sh "$GH_AW_TOOL_BINS$(find /opt/hostedtoolcache -maxdepth 4 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH" && /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"}' \ + 2>&1 | tee /tmp/gh-aw/agent-stdio.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: | + # Copy Copilot session state files to logs folder for artifact collection + # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them + SESSION_STATE_DIR="$HOME/.copilot/session-state" + LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" + + if [ -d "$SESSION_STATE_DIR" ]; then + echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" + mkdir -p "$LOGS_DIR" + cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true + echo "Session state files copied successfully" + else + echo "No session-state directory found at $SESSION_STATE_DIR" + fi + - name: Stop MCP gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload engine output files + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: agent_outputs + path: | + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + if-no-files-found: ignore + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP gateway logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: agent-artifacts + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/agent-stdio.log + if-no-files-found: ignore + + license-check: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 11bd71901bbe5b1630ceea73d27597364c9af683 + - uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # 3041bf56c941b39c61721a86cd11f3bb1338122a + with: + go-version-file: go.mod + - name: Install go-licenses + run: go install github.com/google/go-licenses@latest + - name: Check licenses + run: | + echo "Checking dependency licenses for compliance..." + go-licenses check --disallowed_types=forbidden,reciprocal,restricted,unknown ./... + - name: Generate license report + if: always() + run: | + echo "Generating license report..." + go-licenses csv ./... > licenses.csv 2>&1 || true + - name: Upload license report + if: always() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: license-report + path: licenses.csv + + pre_activation: + if: > + ((github.event_name != 'schedule') && (github.event_name != 'merge_group')) && (github.event_name != 'workflow_dispatch') + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + activated: ${{ steps.check_membership.outputs.is_team_member == 'true' }} + steps: + - name: Checkout actions folder + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6 + with: + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + uses: ./actions/setup + with: + destination: /opt/gh-aw/actions + - name: Check team membership for workflow + id: check_membership + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + env: + GH_AW_REQUIRED_ROLES: admin,maintainer,write + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_membership.cjs'); + await main(); + diff --git a/.github/workflows/test-yaml-import.md b/.github/workflows/test-yaml-import.md new file mode 100644 index 0000000000..9e147d9b55 --- /dev/null +++ b/.github/workflows/test-yaml-import.md @@ -0,0 +1,13 @@ +--- +name: Test YAML Import +on: issue_comment +imports: + - license-check.yml +engine: copilot +--- + +# Test YAML Import + +This workflow imports the existing License Check workflow (license-check.yml) to demonstrate the YAML import feature. + +The imported workflow contains a job (license-check) that will be merged with any jobs defined in this workflow. diff --git a/actions/setup/js/runtime_import.cjs b/actions/setup/js/runtime_import.cjs index 366a28bea5..911735bf25 100644 --- a/actions/setup/js/runtime_import.cjs +++ b/actions/setup/js/runtime_import.cjs @@ -544,6 +544,60 @@ function wrapExpressionsInTemplateConditionals(content) { }); } +/** + * Extracts GitHub expressions from wrapped template conditionals and replaces them with placeholders + * Transforms {{#if ${{ expression }} }} to {{#if __GH_AW_PLACEHOLDER__ }} + * @param {string} content - The markdown content with wrapped expressions + * @returns {string} - Content with expressions replaced by placeholders + */ +function extractAndReplacePlaceholders(content) { + // Pattern to match {{#if ${{ expression }} }} where expression needs to be extracted + const pattern = /\{\{#if\s+\$\{\{\s*(.*?)\s*\}\}\s*\}\}/g; + + return content.replace(pattern, (match, expr) => { + const trimmed = expr.trim(); + + // Generate placeholder name from expression + // Convert dots and special chars to underscores and uppercase + const placeholder = generatePlaceholderName(trimmed); + + // Return the conditional with placeholder + return `{{#if __${placeholder}__ }}`; + }); +} + +/** + * Generates a placeholder name from a GitHub expression + * @param {string} expr - The GitHub expression (e.g., "github.event.issue.number") + * @returns {string} - The placeholder name (e.g., "GH_AW_GITHUB_EVENT_ISSUE_NUMBER") + */ +function generatePlaceholderName(expr) { + // Check if it's a simple property access chain (e.g., github.event.issue.number) + const simplePattern = /^[a-zA-Z][a-zA-Z0-9_.]*$/; + + if (simplePattern.test(expr)) { + // Convert dots to underscores and uppercase + // e.g., "github.event.issue.number" -> "GH_AW_GITHUB_EVENT_ISSUE_NUMBER" + return "GH_AW_" + expr.replace(/\./g, "_").toUpperCase(); + } + + // For boolean literals, use special placeholders + if (expr === "true") { + return "GH_AW_TRUE"; + } + if (expr === "false") { + return "GH_AW_FALSE"; + } + if (expr === "null") { + return "GH_AW_NULL"; + } + + // For complex expressions or unknown variables, create a generic placeholder + // Replace non-alphanumeric characters with underscores + const sanitized = expr.replace(/[^a-zA-Z0-9_]/g, "_").toUpperCase(); + return "GH_AW_" + sanitized; +} + /** * Reads and processes a file or URL for runtime import * @param {string} filepathOrUrl - The path to the file (relative to GITHUB_WORKSPACE) or URL to import @@ -661,6 +715,10 @@ async function processRuntimeImport(filepathOrUrl, optional, workspaceDir, start // This handles {{#if expression}} where expression is not already wrapped in ${{ }} content = wrapExpressionsInTemplateConditionals(content); + // Extract and replace GitHub expressions in template conditionals with placeholders + // This transforms {{#if ${{ expression }} }} to {{#if __GH_AW_PLACEHOLDER__ }} + content = extractAndReplacePlaceholders(content); + // Process GitHub Actions expressions (validate and render safe ones) if (hasGitHubActionsMacros(content)) { content = processExpressions(content, `File ${filepath}`); @@ -781,4 +839,6 @@ module.exports = { evaluateExpression, processExpressions, wrapExpressionsInTemplateConditionals, + extractAndReplacePlaceholders, + generatePlaceholderName, }; diff --git a/docs/src/content/docs/agent-factory-status.mdx b/docs/src/content/docs/agent-factory-status.mdx index f010167abf..4ef6b6f2b5 100644 --- a/docs/src/content/docs/agent-factory-status.mdx +++ b/docs/src/content/docs/agent-factory-status.mdx @@ -140,6 +140,7 @@ These are experimental agentic workflows used by the GitHub Next team to learn, | [Terminal Stylist](https://github.com/githubnext/gh-aw/blob/main/.github/workflows/terminal-stylist.md) | copilot | [![Terminal Stylist](https://github.com/githubnext/gh-aw/actions/workflows/terminal-stylist.lock.yml/badge.svg)](https://github.com/githubnext/gh-aw/actions/workflows/terminal-stylist.lock.yml) | - | - | | [Test Create PR Error Handling](https://github.com/githubnext/gh-aw/blob/main/.github/workflows/test-create-pr-error-handling.md) | claude | [![Test Create PR Error Handling](https://github.com/githubnext/gh-aw/actions/workflows/test-create-pr-error-handling.lock.yml/badge.svg)](https://github.com/githubnext/gh-aw/actions/workflows/test-create-pr-error-handling.lock.yml) | - | - | | [Test Project URL Default](https://github.com/githubnext/gh-aw/blob/main/.github/workflows/test-project-url-default.md) | copilot | [![Test Project URL Default](https://github.com/githubnext/gh-aw/actions/workflows/test-project-url-default.lock.yml/badge.svg)](https://github.com/githubnext/gh-aw/actions/workflows/test-project-url-default.lock.yml) | - | - | +| [Test YAML Import](https://github.com/githubnext/gh-aw/blob/main/.github/workflows/test-yaml-import.md) | copilot | [![Test YAML Import](https://github.com/githubnext/gh-aw/actions/workflows/test-yaml-import.lock.yml/badge.svg)](https://github.com/githubnext/gh-aw/actions/workflows/test-yaml-import.lock.yml) | - | - | | [The Daily Repository Chronicle](https://github.com/githubnext/gh-aw/blob/main/.github/workflows/daily-repo-chronicle.md) | copilot | [![The Daily Repository Chronicle](https://github.com/githubnext/gh-aw/actions/workflows/daily-repo-chronicle.lock.yml/badge.svg)](https://github.com/githubnext/gh-aw/actions/workflows/daily-repo-chronicle.lock.yml) | `0 16 * * 1-5` | - | | [The Great Escapi](https://github.com/githubnext/gh-aw/blob/main/.github/workflows/firewall-escape.md) | copilot | [![The Great Escapi](https://github.com/githubnext/gh-aw/actions/workflows/firewall-escape.lock.yml/badge.svg)](https://github.com/githubnext/gh-aw/actions/workflows/firewall-escape.lock.yml) | - | - | | [Tidy](https://github.com/githubnext/gh-aw/blob/main/.github/workflows/tidy.md) | copilot | [![Tidy](https://github.com/githubnext/gh-aw/actions/workflows/tidy.lock.yml/badge.svg)](https://github.com/githubnext/gh-aw/actions/workflows/tidy.lock.yml) | `0 7 * * *` | - | diff --git a/pkg/parser/import_processor.go b/pkg/parser/import_processor.go index 6ac457a414..39e509e3b9 100644 --- a/pkg/parser/import_processor.go +++ b/pkg/parser/import_processor.go @@ -8,6 +8,7 @@ import ( "strings" "github.com/githubnext/gh-aw/pkg/logger" + "github.com/goccy/go-yaml" ) var importLog = logger.New("parser:import_processor") @@ -30,6 +31,7 @@ type ImportsResult struct { MergedPostSteps string // Merged post-steps configuration from all imports (appended in order) MergedLabels []string // Merged labels from all imports (union of label names) MergedCaches []string // Merged cache configurations from all imports (appended in order) + MergedJobs string // Merged jobs from imported YAML workflows (JSON format) ImportedFiles []string // List of imported file paths (for manifest) AgentFile string // Path to custom agent file (if imported) // ImportInputs uses map[string]any because input values can be different types (string, number, boolean). @@ -177,6 +179,7 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a var labels []string // Track unique labels labelsSet := make(map[string]bool) // Set for deduplicating labels var caches []string // Track cache configurations (appended in order) + var jobsBuilder strings.Builder // Track jobs from imported YAML workflows var agentFile string // Track custom agent file importInputs := make(map[string]any) // Aggregated input values from all imports @@ -212,6 +215,22 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a return nil, fmt.Errorf("failed to resolve import '%s': %w", filePath, err) } + // Validate that .lock.yml files are not imported + if strings.HasSuffix(strings.ToLower(fullPath), ".lock.yml") { + if workflowFilePath != "" && yamlContent != "" { + line, column := findImportItemLocation(yamlContent, importPath) + importErr := &ImportError{ + ImportPath: importPath, + FilePath: workflowFilePath, + Line: line, + Column: column, + Cause: fmt.Errorf("cannot import .lock.yml files. Lock files are compiled outputs from gh-aw. Import the source .md file instead"), + } + return nil, FormatImportError(importErr, yamlContent) + } + return nil, fmt.Errorf("cannot import .lock.yml files: '%s'. Lock files are compiled outputs from gh-aw. Import the source .md file instead", importPath) + } + // Check for duplicates before adding to queue if !visited[fullPath] { visited[fullPath] = true @@ -282,6 +301,41 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a continue } + // Check if this is a YAML workflow file (not .lock.yml) + if isYAMLWorkflowFile(item.fullPath) { + log.Printf("Detected YAML workflow file: %s", item.fullPath) + + // Process YAML workflow import to extract jobs and services + jobsJSON, servicesJSON, err := processYAMLWorkflowImport(item.fullPath) + if err != nil { + return nil, fmt.Errorf("failed to process YAML workflow '%s': %w", item.importPath, err) + } + + // Append jobs to merged jobs + if jobsJSON != "" && jobsJSON != "{}" { + jobsBuilder.WriteString(jobsJSON + "\n") + log.Printf("Added jobs from YAML workflow: %s", item.importPath) + } + + // Append services to merged services (services from YAML are already in JSON format) + // Need to convert to YAML format for consistency with other services + if servicesJSON != "" && servicesJSON != "{}" { + // Convert JSON services to YAML format + var services map[string]any + if err := json.Unmarshal([]byte(servicesJSON), &services); err == nil { + servicesWrapper := map[string]any{"services": services} + servicesYAML, err := yaml.Marshal(servicesWrapper) + if err == nil { + servicesBuilder.WriteString(string(servicesYAML) + "\n") + log.Printf("Added services from YAML workflow: %s", item.importPath) + } + } + } + + // YAML workflows don't have nested imports or markdown content, skip to next item + continue + } + // Read the imported file to extract nested imports content, err := os.ReadFile(item.fullPath) if err != nil { @@ -510,6 +564,7 @@ func processImportsFromFrontmatterWithManifestAndSource(frontmatter map[string]a MergedPostSteps: postStepsBuilder.String(), MergedLabels: labels, MergedCaches: caches, + MergedJobs: jobsBuilder.String(), ImportedFiles: topologicalOrder, AgentFile: agentFile, ImportInputs: importInputs, diff --git a/pkg/parser/schema_deprecated_test.go b/pkg/parser/schema_deprecated_test.go index e442144522..9dbe15fdf8 100644 --- a/pkg/parser/schema_deprecated_test.go +++ b/pkg/parser/schema_deprecated_test.go @@ -12,18 +12,25 @@ func TestGetMainWorkflowDeprecatedFields(t *testing.T) { t.Fatalf("GetMainWorkflowDeprecatedFields() error = %v", err) } - // Check that timeout_minutes is NOT in the list (it was removed from schema completely) - // Users should use the timeout-minutes-migration codemod to migrate their workflows + // Check that timeout_minutes IS in the list as a deprecated field + // This allows strict mode to properly detect and reject it found := false + var timeoutMinutesField *DeprecatedField for _, field := range deprecatedFields { if field.Name == "timeout_minutes" { found = true + timeoutMinutesField = &field break } } - if found { - t.Error("timeout_minutes should NOT be in the deprecated fields list (removed from schema)") + if !found { + t.Error("timeout_minutes should be in the deprecated fields list to support strict mode validation") + } else { + // Verify it has the correct replacement + if timeoutMinutesField.Replacement != "timeout-minutes" { + t.Errorf("timeout_minutes replacement = %v, want 'timeout-minutes'", timeoutMinutesField.Replacement) + } } } diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json index 601a4ce0ff..45fedd88d1 100644 --- a/pkg/parser/schemas/main_workflow_schema.json +++ b/pkg/parser/schemas/main_workflow_schema.json @@ -1791,6 +1791,12 @@ "description": "Workflow timeout in minutes (GitHub Actions standard field). Defaults to 20 minutes for agentic workflows. Has sensible defaults and can typically be omitted.", "examples": [5, 10, 30] }, + "timeout_minutes": { + "type": "integer", + "deprecated": true, + "description": "DEPRECATED: Use 'timeout-minutes' instead. Workflow timeout in minutes.", + "x-deprecation-message": "Use 'timeout-minutes' (with hyphen) instead of 'timeout_minutes' (with underscore) to follow GitHub Actions naming conventions." + }, "concurrency": { "description": "Concurrency control to limit concurrent workflow runs (GitHub Actions standard field). Supports two forms: simple string for basic group isolation, or object with cancel-in-progress option for advanced control. Agentic workflows enhance this with automatic per-engine concurrency policies (defaults to single job per engine across all workflows) and token-based rate limiting. Default behavior: workflows in the same group queue sequentially unless cancel-in-progress is true. See https://docs.github.com/en/actions/using-jobs/using-concurrency", "oneOf": [ diff --git a/pkg/parser/yaml_import.go b/pkg/parser/yaml_import.go new file mode 100644 index 0000000000..d9da3f9cca --- /dev/null +++ b/pkg/parser/yaml_import.go @@ -0,0 +1,140 @@ +package parser + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/githubnext/gh-aw/pkg/logger" + "github.com/goccy/go-yaml" +) + +var yamlImportLog = logger.New("parser:yaml_import") + +// isYAMLWorkflowFile checks if a file path points to a GitHub Actions workflow YAML file +// Returns true for .yml and .yaml files, but false for .lock.yml files +func isYAMLWorkflowFile(filePath string) bool { + // Normalize to lowercase for case-insensitive extension check + lower := strings.ToLower(filePath) + + // Reject .lock.yml files (these are compiled outputs from gh-aw) + if strings.HasSuffix(lower, ".lock.yml") { + return false + } + + // Accept .yml and .yaml files + return strings.HasSuffix(lower, ".yml") || strings.HasSuffix(lower, ".yaml") +} + +// isActionDefinitionFile checks if a YAML file is a GitHub Action definition (action.yml) +// rather than a workflow file. Action definitions have different structure with 'runs' field. +func isActionDefinitionFile(filePath string, content []byte) (bool, error) { + // Quick check: action.yml or action.yaml filename + base := filepath.Base(filePath) + if strings.ToLower(base) == "action.yml" || strings.ToLower(base) == "action.yaml" { + return true, nil + } + + // Parse YAML to check structure + var doc map[string]any + if err := yaml.Unmarshal(content, &doc); err != nil { + return false, fmt.Errorf("failed to parse YAML: %w", err) + } + + // Action definitions have 'runs' field, workflows have 'jobs' field + _, hasRuns := doc["runs"] + _, hasJobs := doc["jobs"] + + // If it has 'runs' but no 'jobs', it's likely an action definition + if hasRuns && !hasJobs { + return true, nil + } + + return false, nil +} + +// processYAMLWorkflowImport processes an imported YAML workflow file +// Returns the extracted jobs in JSON format for merging +func processYAMLWorkflowImport(filePath string) (jobs string, services string, err error) { + yamlImportLog.Printf("Processing YAML workflow import: %s", filePath) + + // Read the YAML file + content, err := os.ReadFile(filePath) + if err != nil { + return "", "", fmt.Errorf("failed to read YAML file: %w", err) + } + + // Check if this is an action definition file (not a workflow) + isAction, err := isActionDefinitionFile(filePath, content) + if err != nil { + return "", "", fmt.Errorf("failed to check if file is action definition: %w", err) + } + if isAction { + return "", "", fmt.Errorf("cannot import action definition file (action.yml). Only workflow files (.yml) can be imported") + } + + // Parse the YAML workflow + var workflow map[string]any + if err := yaml.Unmarshal(content, &workflow); err != nil { + return "", "", fmt.Errorf("failed to parse YAML workflow: %w", err) + } + + // Validate this is a GitHub Actions workflow (has 'on' or 'jobs' field) + _, hasOn := workflow["on"] + _, hasJobs := workflow["jobs"] + if !hasOn && !hasJobs { + return "", "", fmt.Errorf("not a valid GitHub Actions workflow: missing 'on' or 'jobs' field") + } + + // Extract jobs section + var jobsJSON string + if jobsValue, ok := workflow["jobs"]; ok { + if jobsMap, ok := jobsValue.(map[string]any); ok { + jobsBytes, err := json.Marshal(jobsMap) + if err != nil { + return "", "", fmt.Errorf("failed to marshal jobs to JSON: %w", err) + } + jobsJSON = string(jobsBytes) + yamlImportLog.Printf("Extracted %d jobs from YAML workflow", len(jobsMap)) + } + } + + // Extract services from job definitions + var servicesJSON string + if jobsValue, ok := workflow["jobs"]; ok { + if jobsMap, ok := jobsValue.(map[string]any); ok { + // Collect all services from all jobs + allServices := make(map[string]any) + for jobName, jobValue := range jobsMap { + if jobMap, ok := jobValue.(map[string]any); ok { + if servicesValue, ok := jobMap["services"]; ok { + if servicesMap, ok := servicesValue.(map[string]any); ok { + // Merge services from this job + for serviceName, serviceConfig := range servicesMap { + // Use job name as prefix to avoid conflicts + prefixedName := fmt.Sprintf("%s_%s", jobName, serviceName) + allServices[prefixedName] = serviceConfig + yamlImportLog.Printf("Found service: %s in job %s (stored as %s)", serviceName, jobName, prefixedName) + } + } + } + } + } + + if len(allServices) > 0 { + // Marshal to JSON for merging + servicesBytes, err := json.Marshal(allServices) + if err != nil { + yamlImportLog.Printf("Failed to marshal services to JSON: %v", err) + } else { + servicesJSON = string(servicesBytes) + yamlImportLog.Printf("Extracted %d services from YAML workflow", len(allServices)) + } + } + } + } + + return jobsJSON, servicesJSON, nil +} diff --git a/pkg/parser/yaml_import_e2e_test.go b/pkg/parser/yaml_import_e2e_test.go new file mode 100644 index 0000000000..e0574a9859 --- /dev/null +++ b/pkg/parser/yaml_import_e2e_test.go @@ -0,0 +1,193 @@ +//go:build !integration + +package parser + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestYAMLWorkflowE2EImport(t *testing.T) { + tmpDir := t.TempDir() + + // Create a YAML workflow with multiple jobs + yamlWorkflow := `name: CI Workflow +on: + push: + branches: [main] + pull_request: + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Run linter + run: npm run lint + + test: + runs-on: ubuntu-latest + needs: lint + steps: + - uses: actions/checkout@v3 + - name: Run tests + run: npm test + + build: + runs-on: ubuntu-latest + needs: test + steps: + - uses: actions/checkout@v3 + - name: Build + run: npm run build` + + yamlFile := filepath.Join(tmpDir, "ci-workflow.yml") + err := os.WriteFile(yamlFile, []byte(yamlWorkflow), 0644) + require.NoError(t, err, "Should create YAML workflow file") + + // Create a markdown workflow that imports the YAML workflow + mdWorkflow := `--- +name: Main Workflow +on: issue_comment +imports: + - ci-workflow.yml +jobs: + deploy: + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy + run: echo "Deploying..." +--- + +# Main Workflow + +This workflow imports a YAML workflow and adds additional jobs.` + + mdFile := filepath.Join(tmpDir, "main-workflow.md") + err = os.WriteFile(mdFile, []byte(mdWorkflow), 0644) + require.NoError(t, err, "Should create markdown workflow file") + + // Extract frontmatter and process imports + result, err := ExtractFrontmatterFromContent(mdWorkflow) + require.NoError(t, err, "Should extract frontmatter") + + importsResult, err := ProcessImportsFromFrontmatterWithManifest(result.Frontmatter, tmpDir, nil) + require.NoError(t, err, "Should process imports") + + // Verify that jobs were imported + assert.NotEmpty(t, importsResult.MergedJobs, "Should have merged jobs from YAML workflow") + + // Parse the merged jobs JSON and merge all lines + allJobs := make(map[string]any) + lines := []string{} + for _, line := range []string{importsResult.MergedJobs} { + if line != "" && line != "{}" { + lines = append(lines, line) + } + } + + // Since we might have multiple JSON objects on separate lines, merge them + for _, line := range lines { + // Split by newlines in case there are multiple JSON objects + for _, jsonLine := range []string{line} { + if jsonLine == "" || jsonLine == "{}" { + continue + } + var jobs map[string]any + if err := json.Unmarshal([]byte(jsonLine), &jobs); err == nil { + for k, v := range jobs { + allJobs[k] = v + } + } + } + } + + // Verify all three jobs from YAML workflow were imported + assert.Contains(t, allJobs, "lint", "Should contain lint job from YAML workflow") + assert.Contains(t, allJobs, "test", "Should contain test job from YAML workflow") + assert.Contains(t, allJobs, "build", "Should contain build job from YAML workflow") + + // Verify job details + lintJob, ok := allJobs["lint"].(map[string]any) + require.True(t, ok, "lint job should be a map") + assert.Equal(t, "ubuntu-latest", lintJob["runs-on"], "lint job should have correct runs-on") + + testJob, ok := allJobs["test"].(map[string]any) + require.True(t, ok, "test job should be a map") + assert.Equal(t, "ubuntu-latest", testJob["runs-on"], "test job should have correct runs-on") + + // Verify job dependencies + if needs, ok := testJob["needs"].(string); ok { + assert.Equal(t, "lint", needs, "test job should depend on lint") + } else if needsArr, ok := testJob["needs"].([]any); ok { + assert.Contains(t, needsArr, "lint", "test job should depend on lint") + } +} + +func TestYAMLWorkflowImportWithServices(t *testing.T) { + tmpDir := t.TempDir() + + // Create a YAML workflow with services + yamlWorkflow := `name: Database Test Workflow +on: push + +jobs: + db-test: + runs-on: ubuntu-latest + services: + postgres: + image: postgres:13 + env: + POSTGRES_PASSWORD: password + ports: + - 5432:5432 + redis: + image: redis:alpine + ports: + - 6379:6379 + steps: + - uses: actions/checkout@v3 + - name: Run database tests + run: npm run test:db` + + yamlFile := filepath.Join(tmpDir, "db-test.yml") + err := os.WriteFile(yamlFile, []byte(yamlWorkflow), 0644) + require.NoError(t, err, "Should create YAML workflow file") + + // Create a markdown workflow that imports the YAML workflow + mdWorkflow := `--- +name: Main Workflow +on: issue_comment +imports: + - db-test.yml +--- + +# Main Workflow + +This workflow imports a YAML workflow with services.` + + mdFile := filepath.Join(tmpDir, "main-workflow.md") + err = os.WriteFile(mdFile, []byte(mdWorkflow), 0644) + require.NoError(t, err, "Should create markdown workflow file") + + // Extract frontmatter and process imports + result, err := ExtractFrontmatterFromContent(mdWorkflow) + require.NoError(t, err, "Should extract frontmatter") + + importsResult, err := ProcessImportsFromFrontmatterWithManifest(result.Frontmatter, tmpDir, nil) + require.NoError(t, err, "Should process imports") + + // Verify that jobs were imported + assert.NotEmpty(t, importsResult.MergedJobs, "Should have merged jobs from YAML workflow") + + // Verify that services were imported + assert.NotEmpty(t, importsResult.MergedServices, "Should have merged services from YAML workflow") + assert.Contains(t, importsResult.MergedServices, "db-test_postgres", "Should contain prefixed postgres service") + assert.Contains(t, importsResult.MergedServices, "db-test_redis", "Should contain prefixed redis service") +} diff --git a/pkg/parser/yaml_import_test.go b/pkg/parser/yaml_import_test.go new file mode 100644 index 0000000000..309e1f9fa5 --- /dev/null +++ b/pkg/parser/yaml_import_test.go @@ -0,0 +1,279 @@ +//go:build !integration + +package parser + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestIsYAMLWorkflowFile(t *testing.T) { + tests := []struct { + name string + filePath string + expected bool + }{ + { + name: "yml file", + filePath: "workflow.yml", + expected: true, + }, + { + name: "yaml file", + filePath: "workflow.yaml", + expected: true, + }, + { + name: "lock yml file - should be rejected", + filePath: "workflow.lock.yml", + expected: false, + }, + { + name: "markdown file", + filePath: "workflow.md", + expected: false, + }, + { + name: "uppercase YML", + filePath: "workflow.YML", + expected: true, + }, + { + name: "uppercase LOCK.YML", + filePath: "workflow.LOCK.YML", + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := isYAMLWorkflowFile(tt.filePath) + assert.Equal(t, tt.expected, result, "File: %s", tt.filePath) + }) + } +} + +func TestIsActionDefinitionFile(t *testing.T) { + tests := []struct { + name string + filename string + content string + expected bool + }{ + { + name: "action.yml by name", + filename: "action.yml", + content: `name: Test Action +runs: + using: node20 + main: index.js`, + expected: true, + }, + { + name: "action.yaml by name", + filename: "action.yaml", + content: `name: Test Action +runs: + using: node20 + main: index.js`, + expected: true, + }, + { + name: "workflow with jobs", + filename: "workflow.yml", + content: `name: Test Workflow +on: push +jobs: + test: + runs-on: ubuntu-latest`, + expected: false, + }, + { + name: "action by structure - has runs, no jobs", + filename: "my-action.yml", + content: `name: My Action +runs: + using: composite + steps: + - run: echo "test"`, + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := isActionDefinitionFile(tt.filename, []byte(tt.content)) + require.NoError(t, err, "Should not error on valid YAML") + assert.Equal(t, tt.expected, result, "Filename: %s", tt.filename) + }) + } +} + +func TestProcessYAMLWorkflowImport(t *testing.T) { + tmpDir := t.TempDir() + + t.Run("simple workflow with jobs", func(t *testing.T) { + workflowContent := `name: Test Workflow +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - run: echo "test" + build: + runs-on: ubuntu-latest + steps: + - run: echo "build"` + + workflowFile := filepath.Join(tmpDir, "test-workflow.yml") + err := os.WriteFile(workflowFile, []byte(workflowContent), 0644) + require.NoError(t, err, "Should write test workflow file") + + jobs, services, err := processYAMLWorkflowImport(workflowFile) + require.NoError(t, err, "Should process YAML workflow") + assert.NotEmpty(t, jobs, "Should extract jobs") + assert.Contains(t, jobs, "test", "Should contain test job") + assert.Contains(t, jobs, "build", "Should contain build job") + assert.Empty(t, services, "Should not have services") + }) + + t.Run("workflow with services", func(t *testing.T) { + workflowContent := `name: Test Workflow +on: push +jobs: + test: + runs-on: ubuntu-latest + services: + postgres: + image: postgres:13 + env: + POSTGRES_PASSWORD: password + steps: + - run: echo "test"` + + workflowFile := filepath.Join(tmpDir, "test-services.yml") + err := os.WriteFile(workflowFile, []byte(workflowContent), 0644) + require.NoError(t, err, "Should write test workflow file") + + jobs, services, err := processYAMLWorkflowImport(workflowFile) + require.NoError(t, err, "Should process YAML workflow") + assert.NotEmpty(t, jobs, "Should extract jobs") + assert.NotEmpty(t, services, "Should extract services") + assert.Contains(t, services, "test_postgres", "Should contain prefixed service name") + }) + + t.Run("reject action definition", func(t *testing.T) { + actionContent := `name: Test Action +runs: + using: node20 + main: index.js` + + actionFile := filepath.Join(tmpDir, "action.yml") + err := os.WriteFile(actionFile, []byte(actionContent), 0644) + require.NoError(t, err, "Should write test action file") + + _, _, err = processYAMLWorkflowImport(actionFile) + require.Error(t, err, "Should reject action definition") + assert.Contains(t, err.Error(), "cannot import action definition", "Error should mention action definition") + }) + + t.Run("reject invalid workflow", func(t *testing.T) { + invalidContent := `name: Not a Workflow +description: This is not a valid workflow` + + invalidFile := filepath.Join(tmpDir, "invalid.yml") + err := os.WriteFile(invalidFile, []byte(invalidContent), 0644) + require.NoError(t, err, "Should write test invalid file") + + _, _, err = processYAMLWorkflowImport(invalidFile) + require.Error(t, err, "Should reject invalid workflow") + assert.Contains(t, err.Error(), "not a valid GitHub Actions workflow", "Error should mention invalid workflow") + }) +} + +func TestImportYAMLWorkflow(t *testing.T) { + tmpDir := t.TempDir() + + // Create a simple YAML workflow + yamlWorkflow := `name: CI Workflow +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - run: npm test` + + yamlFile := filepath.Join(tmpDir, "ci.yml") + err := os.WriteFile(yamlFile, []byte(yamlWorkflow), 0644) + require.NoError(t, err, "Should create YAML workflow file") + + // Create a markdown workflow that imports the YAML workflow + mdWorkflow := `--- +name: Main Workflow +on: issue_comment +imports: + - ci.yml +--- + +# Main Workflow +This imports a YAML workflow.` + + mdFile := filepath.Join(tmpDir, "main.md") + err = os.WriteFile(mdFile, []byte(mdWorkflow), 0644) + require.NoError(t, err, "Should create markdown workflow file") + + // Process imports + result, err := ExtractFrontmatterFromContent(mdWorkflow) + require.NoError(t, err, "Should extract frontmatter") + + importsResult, err := ProcessImportsFromFrontmatterWithManifest(result.Frontmatter, tmpDir, nil) + require.NoError(t, err, "Should process imports") + + // Verify jobs were imported + assert.NotEmpty(t, importsResult.MergedJobs, "Should have merged jobs from YAML workflow") + assert.Contains(t, importsResult.MergedJobs, "test", "Should contain test job from YAML workflow") +} + +func TestRejectLockYMLImport(t *testing.T) { + tmpDir := t.TempDir() + + // Create a lock file + lockContent := `# This is a compiled lock file +name: Compiled Workflow +jobs: + test: + runs-on: ubuntu-latest` + + lockFile := filepath.Join(tmpDir, "workflow.lock.yml") + err := os.WriteFile(lockFile, []byte(lockContent), 0644) + require.NoError(t, err, "Should create lock file") + + // Create a markdown workflow that tries to import the lock file + mdWorkflow := `--- +name: Main Workflow +on: push +imports: + - workflow.lock.yml +--- + +# Main Workflow` + + mdFile := filepath.Join(tmpDir, "main.md") + err = os.WriteFile(mdFile, []byte(mdWorkflow), 0644) + require.NoError(t, err, "Should create markdown workflow file") + + // Process imports - should fail + result, err := ExtractFrontmatterFromContent(mdWorkflow) + require.NoError(t, err, "Should extract frontmatter") + + _, err = ProcessImportsFromFrontmatterWithManifest(result.Frontmatter, tmpDir, nil) + require.Error(t, err, "Should reject .lock.yml import") + assert.Contains(t, err.Error(), "cannot import .lock.yml files", "Error should mention .lock.yml rejection") + assert.Contains(t, err.Error(), "Import the source .md file instead", "Error should suggest importing .md file") +} diff --git a/pkg/workflow/action_pins_test.go b/pkg/workflow/action_pins_test.go index f41ea84b88..cd88cf3072 100644 --- a/pkg/workflow/action_pins_test.go +++ b/pkg/workflow/action_pins_test.go @@ -297,9 +297,9 @@ func TestApplyActionPinToStep(t *testing.T) { func TestGetActionPinsSorting(t *testing.T) { pins := getActionPins() - // Verify we got all the pins (42 as of January 2026) - if len(pins) != 42 { - t.Errorf("getActionPins() returned %d pins, expected 42", len(pins)) + // Verify we got all the pins (43 as of January 2026) + if len(pins) != 43 { + t.Errorf("getActionPins() returned %d pins, expected 43", len(pins)) } // Verify they are sorted by version (descending) then by repository name (ascending) diff --git a/pkg/workflow/compiler_orchestrator_workflow.go b/pkg/workflow/compiler_orchestrator_workflow.go index 877b74e042..323ad641c1 100644 --- a/pkg/workflow/compiler_orchestrator_workflow.go +++ b/pkg/workflow/compiler_orchestrator_workflow.go @@ -1,6 +1,7 @@ package workflow import ( + "encoding/json" "fmt" "strings" @@ -325,6 +326,54 @@ func (c *Compiler) processAndMergeServices(frontmatter map[string]any, workflowD } } +// mergeJobsFromYAMLImports merges jobs from imported YAML workflows with main workflow jobs +// Main workflow jobs take precedence over imported jobs (override behavior) +func (c *Compiler) mergeJobsFromYAMLImports(mainJobs map[string]any, mergedJobsJSON string) map[string]any { + orchestratorWorkflowLog.Print("Merging jobs from imported YAML workflows") + + if mergedJobsJSON == "" || mergedJobsJSON == "{}" { + orchestratorWorkflowLog.Print("No imported jobs to merge") + return mainJobs + } + + // Initialize result with main jobs or create empty map + result := make(map[string]any) + for k, v := range mainJobs { + result[k] = v + } + + // Split by newlines to handle multiple JSON objects from different imports + lines := strings.Split(mergedJobsJSON, "\n") + orchestratorWorkflowLog.Printf("Processing %d job definition lines", len(lines)) + + for _, line := range lines { + line = strings.TrimSpace(line) + if line == "" || line == "{}" { + continue + } + + // Parse JSON line to map + var importedJobs map[string]any + if err := json.Unmarshal([]byte(line), &importedJobs); err != nil { + orchestratorWorkflowLog.Printf("Skipping malformed job entry: %v", err) + continue + } + + // Merge jobs - main workflow jobs take precedence (don't override) + for jobName, jobConfig := range importedJobs { + if _, exists := result[jobName]; !exists { + orchestratorWorkflowLog.Printf("Adding imported job: %s", jobName) + result[jobName] = jobConfig + } else { + orchestratorWorkflowLog.Printf("Skipping imported job %s (already defined in main workflow)", jobName) + } + } + } + + orchestratorWorkflowLog.Printf("Successfully merged jobs: total=%d, imported=%d", len(result), len(result)-len(mainJobs)) + return result +} + // extractAdditionalConfigurations extracts cache-memory, repo-memory, safe-inputs, and safe-outputs configurations func (c *Compiler) extractAdditionalConfigurations( frontmatter map[string]any, @@ -358,6 +407,12 @@ func (c *Compiler) extractAdditionalConfigurations( // Extract and process safe-inputs and safe-outputs workflowData.Command, workflowData.CommandEvents = c.extractCommandConfig(frontmatter) workflowData.Jobs = c.extractJobsFromFrontmatter(frontmatter) + + // Merge jobs from imported YAML workflows + if importsResult.MergedJobs != "" && importsResult.MergedJobs != "{}" { + workflowData.Jobs = c.mergeJobsFromYAMLImports(workflowData.Jobs, importsResult.MergedJobs) + } + workflowData.Roles = c.extractRoles(frontmatter) workflowData.Bots = c.extractBots(frontmatter) diff --git a/pkg/workflow/data/action_pins.json b/pkg/workflow/data/action_pins.json index 80130e4a1a..edbd2e8785 100644 --- a/pkg/workflow/data/action_pins.json +++ b/pkg/workflow/data/action_pins.json @@ -25,6 +25,11 @@ "version": "v4.3.0", "sha": "0057852bfaa89a56745cba8c7296529d2fc39830" }, + "actions/checkout@v3": { + "repo": "actions/checkout", + "version": "v3", + "sha": "f43a0e5ff2bd294095638e18286ca9a3d1956744" + }, "actions/checkout@v4": { "repo": "actions/checkout", "version": "v4", @@ -100,7 +105,7 @@ "version": "v5.6.0", "sha": "a26af69be951a213d495a4c3e4e4022e16d87065" }, - "actions/upload-artifact@v4": { + "actions/upload-artifact@v4.6.2": { "repo": "actions/upload-artifact", "version": "v4.6.2", "sha": "ea165f8d65b6e75b540449e92b4886f43607fa02" diff --git a/pkg/workflow/template_expression_integration_test.go b/pkg/workflow/template_expression_integration_test.go index 68e00e186b..5c1a8aceeb 100644 --- a/pkg/workflow/template_expression_integration_test.go +++ b/pkg/workflow/template_expression_integration_test.go @@ -96,19 +96,26 @@ ${{ needs.activation.outputs.text }} } // Verify GitHub expressions are properly replaced with placeholders in template conditionals - // After the fix, expressions should be replaced with __GH_AW_*__ placeholders + // The GitHub context section (built-in) should have placeholders + // User markdown content is loaded via runtime-import and processed at runtime expectedPlaceholderExpressions := []string{ "{{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }}", "{{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }}", - "{{#if __GH_AW_NEEDS_ACTIVATION_OUTPUTS_TEXT__ }}", } for _, expectedExpr := range expectedPlaceholderExpressions { if !strings.Contains(compiledStr, expectedExpr) { - t.Errorf("Compiled workflow should contain placeholder expression: %s", expectedExpr) + t.Errorf("Compiled workflow should contain placeholder expression in GitHub context: %s", expectedExpr) } } + // Verify that the main workflow content is loaded via runtime-import + // Template conditionals in the user's markdown (like needs.activation.outputs.text) + // are processed at runtime by the JavaScript runtime_import helper + if !strings.Contains(compiledStr, "{{#runtime-import") { + t.Error("Compiled workflow should contain runtime-import macro for main workflow content") + } + // Verify that expressions OUTSIDE template conditionals are NOT double-wrapped // These should remain as ${{ github.event.issue.number }} (not wrapped again) if strings.Contains(compiledStr, "${{ ${{ github.event.issue.number }}") { @@ -271,27 +278,17 @@ Steps expression - will be wrapped. compiledStr := string(compiledYAML) - // Verify all expressions are replaced with placeholders (correct behavior) + // Verify GitHub expressions in the GitHub context section are replaced with placeholders + // (These are in the built-in context, not the user's markdown) if !strings.Contains(compiledStr, "{{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }}") { - t.Error("GitHub expression should be replaced with placeholder") - } - - if !strings.Contains(compiledStr, "{{#if __GH_AW_STEPS_MY_STEP_OUTPUTS_VALUE__ }}") { - t.Error("Steps expression should be replaced with placeholder") - } - - // Verify that literal values are also replaced with placeholders - // true and false literals get normalized to __GH_AW_TRUE__ and __GH_AW_FALSE__ - if !strings.Contains(compiledStr, "{{#if __GH_AW_TRUE__ }}") { - t.Error("Literal 'true' should be replaced with placeholder") - } - - if !strings.Contains(compiledStr, "{{#if __GH_AW_FALSE__ }}") { - t.Error("Literal 'false' should be replaced with placeholder") + t.Error("GitHub context should contain placeholder for github.event.issue.number") } - if !strings.Contains(compiledStr, "{{#if __GH_AW_SOME_VARIABLE__ }}") { - t.Error("Unknown variable should be replaced with placeholder") + // Verify that the main workflow content is loaded via runtime-import + // Template conditionals in the user's markdown (like steps, true/false literals, etc.) + // are processed at runtime by the JavaScript runtime_import helper + if !strings.Contains(compiledStr, "{{#runtime-import") { + t.Error("Compiled workflow should contain runtime-import macro for main workflow content") } // Make sure we didn't create invalid double-wrapping