diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ac09a02..17324b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,7 @@ env: on: pull_request: branches: [ main ] + workflow_dispatch: # Tier 1 also runs in merge queue context so the same unit + build checks # execute against the tentative merge commit that the queue creates. See # microsoft/apm#770 for the design. diff --git a/.github/workflows/evergreen.lock.yml b/.github/workflows/evergreen.lock.yml new file mode 100644 index 0000000..248eddf --- /dev/null +++ b/.github/workflows/evergreen.lock.yml @@ -0,0 +1,1631 @@ +# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"6cf7a82013fa0fbafe945ccafdd131565e5254fa8cf46c850ce76d7c7cff9166","strict":true,"agent_id":"copilot"} +# gh-aw-manifest: {"version":1,"secrets":["GH_AW_CI_TRIGGER_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/github-script","sha":"3a2844b7e9c422d3c10d287c895573f7108da1b3","version":"v9.0.0"},{"repo":"actions/setup-node","sha":"48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e","version":"v6.4.0"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.44"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.44"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.44"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.3.6","digest":"sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c","pinned_image":"ghcr.io/github/gh-aw-mcpg:v0.3.6@sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c"},{"image":"ghcr.io/github/github-mcp-server:v1.0.3","digest":"sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959","pinned_image":"ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959"},{"image":"node:lts-alpine","digest":"sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f","pinned_image":"node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f"}]} +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw. DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# Evergreen -- keeps pull requests healthy by automatically fixing merge conflicts +# and failing CI checks. Runs on a short schedule, deterministically selects one +# PR per run, and gives up after 5 attempts that don't improve the same repo state. +# +# Resolved workflow manifest: +# Imports: +# - shared/reporting.md +# +# Secrets used: +# - GH_AW_CI_TRIGGER_TOKEN +# - GH_AW_GITHUB_MCP_SERVER_TOKEN +# - GH_AW_GITHUB_TOKEN +# - GITHUB_TOKEN +# +# Custom actions used: +# - actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 +# - actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 +# - actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 +# - actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 +# - actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 +# - actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 +# +# Container images used: +# - ghcr.io/github/gh-aw-firewall/agent:0.25.44 +# - ghcr.io/github/gh-aw-firewall/api-proxy:0.25.44 +# - ghcr.io/github/gh-aw-firewall/squid:0.25.44 +# - ghcr.io/github/gh-aw-mcpg:v0.3.6@sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c +# - ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959 +# - node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f + +name: "Evergreen -- PR Health Keeper" +"on": + schedule: + - cron: "*/5 * * * *" + # Friendly format: every 5m + workflow_dispatch: + inputs: + aw_context: + default: "" + description: Agent caller context (used internally by Agentic Workflows). + required: false + type: string + pr_number: + description: Fix a specific PR by number (bypasses scheduling) + required: false + type: string + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}" + +run-name: "Evergreen -- PR Health Keeper" + +jobs: + activation: + runs-on: ubuntu-slim + permissions: + actions: read + contents: read + outputs: + comment_id: "" + comment_repo: "" + engine_id: ${{ steps.generate_aw_info.outputs.engine_id }} + lockdown_check_failed: ${{ steps.generate_aw_info.outputs.lockdown_check_failed == 'true' }} + model: ${{ steps.generate_aw_info.outputs.model }} + setup-parent-span-id: ${{ steps.setup.outputs.parent-span-id || steps.setup.outputs.span-id }} + setup-span-id: ${{ steps.setup.outputs.span-id }} + setup-trace-id: ${{ steps.setup.outputs.trace-id }} + stale_lock_file_failed: ${{ steps.check-lock-file.outputs.stale_lock_file_failed == 'true' }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + id: setup + uses: ./actions/setup + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + env: + GH_AW_SETUP_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_CURRENT_WORKFLOW_REF: ${{ github.repository }}/.github/workflows/evergreen.lock.yml@${{ github.ref }} + GH_AW_INFO_VERSION: "1.0.43" + - name: Generate agentic run info + id: generate_aw_info + env: + GH_AW_INFO_ENGINE_ID: "copilot" + GH_AW_INFO_ENGINE_NAME: "GitHub Copilot CLI" + GH_AW_INFO_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || 'claude-sonnet-4.6' }} + GH_AW_INFO_VERSION: "1.0.43" + GH_AW_INFO_AGENT_VERSION: "1.0.43" + GH_AW_INFO_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_INFO_EXPERIMENTAL: "false" + GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true" + GH_AW_INFO_STAGED: "false" + GH_AW_INFO_ALLOWED_DOMAINS: '["defaults","node","go","releaseassets.githubusercontent.com"]' + GH_AW_INFO_FIREWALL_ENABLED: "true" + GH_AW_INFO_AWF_VERSION: "v0.25.44" + GH_AW_INFO_AWMG_VERSION: "" + GH_AW_INFO_FIREWALL_TYPE: "squid" + GH_AW_COMPILED_STRICT: "true" + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/generate_aw_info.cjs'); + await main(core, context); + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + sparse-checkout: | + .github + .agents + actions/setup + .claude + .codex + .crush + .gemini + .opencode + .pi + sparse-checkout-cone-mode: true + fetch-depth: 1 + - name: Save agent config folders for base branch restoration + env: + GH_AW_AGENT_FOLDERS: ".agents .claude .codex .crush .gemini .github .opencode .pi" + GH_AW_AGENT_FILES: ".crush.json AGENTS.md CLAUDE.md GEMINI.md PI.md opencode.jsonc" + # poutine:ignore untrusted_checkout_exec + run: bash "${RUNNER_TEMP}/gh-aw/actions/save_base_github_folders.sh" + - name: Check workflow lock file + id: check-lock-file + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_WORKFLOW_FILE: "evergreen.lock.yml" + GH_AW_CONTEXT_WORKFLOW_REF: "${{ github.workflow_ref }}" + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ runner.temp }}/gh-aw/safeoutputs/outputs.jsonl + GH_AW_EXPR_1A3A194A: ${{ github.event.discussion.number || (fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_type == 'discussion' && fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_number) }} + GH_AW_EXPR_463A214A: ${{ github.event.pull_request.number || (fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_type == 'pull_request' && fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_number) }} + GH_AW_EXPR_802A9F6A: ${{ github.event.issue.number || (fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_type == 'issue' && fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_number) }} + GH_AW_EXPR_FF1D34CE: ${{ github.event.comment.id || fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').comment_id }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_WIKI_NOTE: ${{ '' }} + # poutine:ignore untrusted_checkout_exec + run: | + bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh" + { + cat << 'GH_AW_PROMPT_1f903275043763b7_EOF' + + GH_AW_PROMPT_1f903275043763b7_EOF + cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md" + cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md" + cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md" + cat "${RUNNER_TEMP}/gh-aw/prompts/repo_memory_prompt.md" + cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md" + cat << 'GH_AW_PROMPT_1f903275043763b7_EOF' + + Tools: add_comment(max:3), push_to_pull_request_branch(max:3), missing_tool, missing_data, noop + GH_AW_PROMPT_1f903275043763b7_EOF + cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_push_to_pr_branch.md" + cat << 'GH_AW_PROMPT_1f903275043763b7_EOF' + + GH_AW_PROMPT_1f903275043763b7_EOF + cat "${RUNNER_TEMP}/gh-aw/prompts/mcp_cli_tools_prompt.md" + cat << 'GH_AW_PROMPT_1f903275043763b7_EOF' + + The following GitHub context information is available for this workflow: + {{#if github.actor}} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if github.repository}} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if github.workspace}} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if github.event.issue.number || (github.aw.context.item_type == 'issue' && github.aw.context.item_number)}} + - **issue-number**: #__GH_AW_EXPR_802A9F6A__ + {{/if}} + {{#if github.event.discussion.number || (github.aw.context.item_type == 'discussion' && github.aw.context.item_number)}} + - **discussion-number**: #__GH_AW_EXPR_1A3A194A__ + {{/if}} + {{#if github.event.pull_request.number || (github.aw.context.item_type == 'pull_request' && github.aw.context.item_number)}} + - **pull-request-number**: #__GH_AW_EXPR_463A214A__ + {{/if}} + {{#if github.event.comment.id || github.aw.context.comment_id}} + - **comment-id**: __GH_AW_EXPR_FF1D34CE__ + {{/if}} + {{#if github.run_id}} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + - **checkouts**: The following repositories have been checked out and are available in the workspace: + - `$GITHUB_WORKSPACE` → `__GH_AW_GITHUB_REPOSITORY__` (cwd) [full history, all branches available as remote-tracking refs] [additional refs fetched: *] + - **Note**: If a branch you need is not in the list above and is not listed as an additional fetched ref, it has NOT been checked out. For private repositories you cannot fetch it without proper authentication. If the branch is required and not available, exit with an error and ask the user to add it to the `fetch:` option of the `checkout:` configuration (e.g., `fetch: ["refs/pulls/open/*"]` for all open PR refs, or `fetch: ["main", "feature/my-branch"]` for specific branches). + + + GH_AW_PROMPT_1f903275043763b7_EOF + cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md" + cat << 'GH_AW_PROMPT_1f903275043763b7_EOF' + + {{#runtime-import .github/workflows/shared/reporting.md}} + {{#runtime-import .github/workflows/evergreen.md}} + GH_AW_PROMPT_1f903275043763b7_EOF + } > "$GH_AW_PROMPT" + - name: Interpolate variables and render templates + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_ENGINE_ID: "copilot" + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_EXPR_1A3A194A: ${{ github.event.discussion.number || (fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_type == 'discussion' && fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_number) }} + GH_AW_EXPR_463A214A: ${{ github.event.pull_request.number || (fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_type == 'pull_request' && fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_number) }} + GH_AW_EXPR_802A9F6A: ${{ github.event.issue.number || (fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_type == 'issue' && fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').item_number) }} + GH_AW_EXPR_FF1D34CE: ${{ github.event.comment.id || fromJSON(github.event.inputs.aw_context || github.event.client_payload.aw_context || '{}').comment_id }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_MCP_CLI_SERVERS_LIST: '- `safeoutputs` — run `safeoutputs --help` to see available tools' + GH_AW_MEMORY_BRANCH_NAME: 'memory/evergreen' + GH_AW_MEMORY_CONSTRAINTS: "\n\n**Constraints:**\n- **Allowed Files**: Only files matching patterns: *.md\n- **Max File Size**: 10240 bytes (0.01 MB) per file\n- **Max File Count**: 100 files per commit\n- **Max Patch Size**: 10240 bytes (10 KB) total per push (max: 100 KB)\n" + GH_AW_MEMORY_DESCRIPTION: '' + GH_AW_MEMORY_DIR: '/tmp/gh-aw/repo-memory/default/' + GH_AW_MEMORY_TARGET_REPO: ' of the current repository' + GH_AW_WIKI_NOTE: '' + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + + const substitutePlaceholders = require('${{ runner.temp }}/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_EXPR_1A3A194A: process.env.GH_AW_EXPR_1A3A194A, + GH_AW_EXPR_463A214A: process.env.GH_AW_EXPR_463A214A, + GH_AW_EXPR_802A9F6A: process.env.GH_AW_EXPR_802A9F6A, + GH_AW_EXPR_FF1D34CE: process.env.GH_AW_EXPR_FF1D34CE, + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_MCP_CLI_SERVERS_LIST: process.env.GH_AW_MCP_CLI_SERVERS_LIST, + GH_AW_MEMORY_BRANCH_NAME: process.env.GH_AW_MEMORY_BRANCH_NAME, + GH_AW_MEMORY_CONSTRAINTS: process.env.GH_AW_MEMORY_CONSTRAINTS, + GH_AW_MEMORY_DESCRIPTION: process.env.GH_AW_MEMORY_DESCRIPTION, + GH_AW_MEMORY_DIR: process.env.GH_AW_MEMORY_DIR, + GH_AW_MEMORY_TARGET_REPO: process.env.GH_AW_MEMORY_TARGET_REPO, + GH_AW_WIKI_NOTE: process.env.GH_AW_WIKI_NOTE + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + # poutine:ignore untrusted_checkout_exec + run: bash "${RUNNER_TEMP}/gh-aw/actions/validate_prompt_placeholders.sh" + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + # poutine:ignore untrusted_checkout_exec + run: bash "${RUNNER_TEMP}/gh-aw/actions/print_prompt_summary.sh" + - name: Upload activation artifact + if: success() + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: activation + include-hidden-files: true + path: | + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/aw-prompts/prompt-template.txt + /tmp/gh-aw/aw-prompts/prompt-import-tree.json + /tmp/gh-aw/github_rate_limits.jsonl + /tmp/gh-aw/base + /tmp/gh-aw/.github/agents + if-no-files-found: ignore + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + actions: read + attestations: read + checks: read + contents: read + copilot-requests: write + deployments: read + discussions: read + issues: read + models: read + packages: read + pages: read + pull-requests: read + repository-projects: read + security-events: read + statuses: read + vulnerability-alerts: read + concurrency: + group: "gh-aw-copilot-${{ github.workflow }}" + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_WORKFLOW_ID_SANITIZED: evergreen + outputs: + agentic_engine_timeout: ${{ steps.detect-copilot-errors.outputs.agentic_engine_timeout || 'false' }} + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + effective_tokens: ${{ steps.parse-mcp-gateway.outputs.effective_tokens }} + effective_tokens_rate_limit_error: ${{ steps.parse-mcp-gateway.outputs.effective_tokens_rate_limit_error || 'false' }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + inference_access_error: ${{ steps.detect-copilot-errors.outputs.inference_access_error || 'false' }} + mcp_policy_error: ${{ steps.detect-copilot-errors.outputs.mcp_policy_error || 'false' }} + model: ${{ needs.activation.outputs.model }} + model_not_supported_error: ${{ steps.detect-copilot-errors.outputs.model_not_supported_error || 'false' }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + setup-parent-span-id: ${{ steps.setup.outputs.parent-span-id || steps.setup.outputs.span-id }} + setup-span-id: ${{ steps.setup.outputs.span-id }} + setup-trace-id: ${{ steps.setup.outputs.trace-id }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + id: setup + uses: ./actions/setup + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + parent-span-id: ${{ needs.activation.outputs.setup-parent-span-id || needs.activation.outputs.setup-span-id }} + env: + GH_AW_SETUP_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_CURRENT_WORKFLOW_REF: ${{ github.repository }}/.github/workflows/evergreen.lock.yml@${{ github.ref }} + GH_AW_INFO_VERSION: "1.0.43" + - name: Set runtime paths + id: set-runtime-paths + run: | + { + echo "GH_AW_SAFE_OUTPUTS=${RUNNER_TEMP}/gh-aw/safeoutputs/outputs.jsonl" + echo "GH_AW_SAFE_OUTPUTS_CONFIG_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" + echo "GH_AW_SAFE_OUTPUTS_TOOLS_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/tools.json" + } >> "$GITHUB_OUTPUT" + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + fetch-depth: 0 + - name: Fetch additional refs + env: + GH_AW_FETCH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + header=$(printf "x-access-token:%s" "${GH_AW_FETCH_TOKEN}" | base64 -w 0) + git -c "http.extraheader=Authorization: Basic ${header}" fetch origin '+refs/heads/*:refs/remotes/origin/*' + - name: Create gh-aw temp directory + run: bash "${RUNNER_TEMP}/gh-aw/actions/create_gh_aw_tmp_dir.sh" + - name: Configure gh CLI for GitHub Enterprise + run: bash "${RUNNER_TEMP}/gh-aw/actions/configure_gh_for_ghe.sh" + env: + GH_TOKEN: ${{ github.token }} + - env: + FORCED_PR: ${{ github.event.inputs.pr_number }} + GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ github.token }} + id: find-pr + name: Find a PR that needs attention + run: "python3 - << 'PYEOF'\nimport os, json, re, subprocess, sys\nimport urllib.request, urllib.error\n\ndef emit_selected_output(pr_number):\n \"\"\"Expose `selected` as a step output for workflow gating.\n Empty string means no PR needs attention; otherwise the PR number.\"\"\"\n gh_output = os.environ.get(\"GITHUB_OUTPUT\")\n if gh_output:\n with open(gh_output, \"a\") as f:\n f.write(f\"selected={'' if pr_number is None else pr_number}\\n\")\n\ntoken = os.environ.get(\"GITHUB_TOKEN\", \"\")\nrepo = os.environ.get(\"GITHUB_REPOSITORY\", \"\")\nforced_pr = os.environ.get(\"FORCED_PR\", \"\").strip()\n\nrepo_memory_dir = \"/tmp/gh-aw/repo-memory/evergreen\"\noutput_file = \"/tmp/gh-aw/evergreen.json\"\nos.makedirs(\"/tmp/gh-aw\", exist_ok=True)\n\nMAX_ATTEMPTS = 5\n\ndef api_get(url):\n \"\"\"Make an authenticated GET request to the GitHub API.\"\"\"\n req = urllib.request.Request(url, headers={\n \"Authorization\": f\"token {token}\",\n \"Accept\": \"application/vnd.github.v3+json\",\n })\n with urllib.request.urlopen(req, timeout=30) as resp:\n return json.loads(resp.read().decode())\n\ndef get_all_open_prs():\n \"\"\"Fetch all open PRs, paginated.\"\"\"\n prs = []\n page = 1\n while True:\n url = f\"https://api.github.com/repos/{repo}/pulls?state=open&per_page=100&page={page}&sort=number&direction=asc\"\n batch = api_get(url)\n if not batch:\n break\n prs.extend(batch)\n if len(batch) < 100:\n break\n page += 1\n return prs\n\ndef get_check_status(pr):\n \"\"\"Get combined CI check status for a PR's head commit.\"\"\"\n head_sha = pr[\"head\"][\"sha\"]\n url = f\"https://api.github.com/repos/{repo}/commits/{head_sha}/status\"\n try:\n status = api_get(url)\n return status.get(\"state\", \"unknown\")\n except Exception as e:\n print(f\" Warning: could not fetch status for PR #{pr['number']}: {e}\")\n return \"unknown\"\n\ndef get_check_runs(pr):\n \"\"\"Get check runs for a PR's head commit.\"\"\"\n head_sha = pr[\"head\"][\"sha\"]\n url = f\"https://api.github.com/repos/{repo}/commits/{head_sha}/check-runs\"\n try:\n data = api_get(url)\n return data.get(\"check_runs\", [])\n except Exception as e:\n print(f\" Warning: could not fetch check runs for PR #{pr['number']}: {e}\")\n return []\n\ndef read_attempt_state(pr_number):\n \"\"\"Read attempt tracking state from repo-memory.\"\"\"\n state_file = os.path.join(repo_memory_dir, f\"pr-{pr_number}.md\")\n if not os.path.isfile(state_file):\n return {\"attempts\": 0, \"head_sha\": None}\n with open(state_file, encoding=\"utf-8\") as f:\n content = f.read()\n state = {\"attempts\": 0, \"head_sha\": None}\n m = re.search(r'\\|\\s*head_sha\\s*\\|\\s*(\\S+)\\s*\\|', content)\n if m:\n state[\"head_sha\"] = m.group(1)\n m = re.search(r'\\|\\s*attempts\\s*\\|\\s*(\\d+)\\s*\\|', content)\n if m:\n state[\"attempts\"] = int(m.group(1))\n return state\n\ndef get_commit_date(sha):\n \"\"\"Return the committer date (ISO 8601) for a given commit SHA, or None.\"\"\"\n url = f\"https://api.github.com/repos/{repo}/commits/{sha}\"\n try:\n data = api_get(url)\n return data.get(\"commit\", {}).get(\"committer\", {}).get(\"date\")\n except Exception as e:\n print(f\" Warning: could not fetch commit {sha[:12]}: {e}\")\n return None\n\ndef is_autoloop_pr(pr):\n \"\"\"Return True if the PR is from an autoloop branch.\n Branch name is the primary gate (labels can be added by anyone on\n public repos); the `autoloop` label is just an additional signal.\"\"\"\n head_ref = pr.get(\"head\", {}).get(\"ref\", \"\") or \"\"\n return head_ref.startswith(\"autoloop/\")\n\ndef get_behind_by(pr):\n \"\"\"Return how many commits the PR base branch is ahead of the PR head.\"\"\"\n base = pr[\"base\"][\"ref\"]\n head_sha = pr[\"head\"][\"sha\"]\n url = f\"https://api.github.com/repos/{repo}/compare/{base}...{head_sha}\"\n try:\n data = api_get(url)\n return int(data.get(\"behind_by\", 0) or 0)\n except Exception as e:\n print(f\" Warning: could not fetch compare for PR #{pr['number']}: {e}\")\n return 0\n\ndef trigger_ci_workflow(branch):\n \"\"\"Trigger this repository's ci.yml for `branch` by pushing an empty\n commit with GH_AW_CI_TRIGGER_TOKEN. Because the push is attributed to\n a real user instead of GITHUB_TOKEN, GitHub emits the PR synchronize\n event that starts the pull_request CI workflow; this is separate from\n the manual workflow_dispatch trigger available in ci.yml.\"\"\"\n ci_token = os.environ.get(\"GH_AW_CI_TRIGGER_TOKEN\", \"\") or token\n try:\n # Get current HEAD SHA\n url = f\"https://api.github.com/repos/{repo}/git/ref/heads/{branch}\"\n req = urllib.request.Request(url, headers={\n \"Authorization\": f\"token {ci_token}\",\n \"Accept\": \"application/vnd.github.v3+json\",\n })\n with urllib.request.urlopen(req, timeout=30) as resp:\n head_sha = json.loads(resp.read().decode())[\"object\"][\"sha\"]\n\n # Create an empty commit on top of HEAD\n url = f\"https://api.github.com/repos/{repo}/git/commits\"\n payload = json.dumps({\n \"message\": \"chore: trigger CI [evergreen]\",\n \"tree\": json.loads(urllib.request.urlopen(\n urllib.request.Request(\n f\"https://api.github.com/repos/{repo}/git/commits/{head_sha}\",\n headers={\"Authorization\": f\"token {ci_token}\", \"Accept\": \"application/vnd.github.v3+json\"},\n ), timeout=30\n ).read().decode())[\"tree\"][\"sha\"],\n \"parents\": [head_sha],\n }).encode()\n req = urllib.request.Request(url, data=payload, method=\"POST\", headers={\n \"Authorization\": f\"token {ci_token}\",\n \"Accept\": \"application/vnd.github.v3+json\",\n \"Content-Type\": \"application/json\",\n })\n with urllib.request.urlopen(req, timeout=30) as resp:\n new_sha = json.loads(resp.read().decode())[\"sha\"]\n\n # Update the branch ref to the new commit\n url = f\"https://api.github.com/repos/{repo}/git/refs/heads/{branch}\"\n payload = json.dumps({\"sha\": new_sha}).encode()\n req = urllib.request.Request(url, data=payload, method=\"PATCH\", headers={\n \"Authorization\": f\"token {ci_token}\",\n \"Accept\": \"application/vnd.github.v3+json\",\n \"Content-Type\": \"application/json\",\n })\n with urllib.request.urlopen(req, timeout=30) as resp:\n return 200 <= resp.status < 300\n except urllib.error.HTTPError as e:\n print(f\" Warning: CI trigger (empty commit) failed for {branch}: HTTP {e.code} {e.reason}\")\n return False\n except Exception as e:\n print(f\" Warning: CI trigger (empty commit) failed for {branch}: {e}\")\n return False\n\ndef post_pr_comment(pr_number, body):\n \"\"\"Post a comment on a PR using the issues comments API.\"\"\"\n url = f\"https://api.github.com/repos/{repo}/issues/{pr_number}/comments\"\n payload = json.dumps({\"body\": body}).encode()\n req = urllib.request.Request(url, data=payload, method=\"POST\", headers={\n \"Authorization\": f\"token {token}\",\n \"Accept\": \"application/vnd.github.v3+json\",\n \"Content-Type\": \"application/json\",\n })\n try:\n with urllib.request.urlopen(req, timeout=30) as resp:\n return 200 <= resp.status < 300\n except Exception as e:\n print(f\" Warning: could not post comment on PR #{pr_number}: {e}\")\n return False\n\ndef pr_needs_attention(pr):\n \"\"\"Check if a PR has merge conflicts, is behind main, or has failing CI.\n Returns a list of issues.\"\"\"\n issues = []\n\n # Check mergeable state\n # Need to fetch full PR details for mergeable info\n pr_url = f\"https://api.github.com/repos/{repo}/pulls/{pr['number']}\"\n try:\n full_pr = api_get(pr_url)\n mergeable = full_pr.get(\"mergeable\")\n mergeable_state = full_pr.get(\"mergeable_state\", \"unknown\")\n if mergeable is False:\n issues.append(\"merge_conflict\")\n elif mergeable_state == \"dirty\":\n issues.append(\"merge_conflict\")\n except Exception as e:\n print(f\" Warning: could not fetch mergeable state for PR #{pr['number']}: {e}\")\n\n # Check if the PR branch is behind its base branch (e.g., main moved forward).\n # We always want to merge main first before fixing CI, so flag this explicitly.\n behind_by = get_behind_by(pr)\n if behind_by > 0 and \"merge_conflict\" not in issues:\n issues.append(f\"behind_main: {behind_by} commit(s)\")\n\n # Check CI status via check runs\n check_runs = get_check_runs(pr)\n failed_checks = []\n for cr in check_runs:\n conclusion = cr.get(\"conclusion\")\n status = cr.get(\"status\")\n name = cr.get(\"name\", \"unknown\")\n if conclusion in (\"failure\", \"timed_out\", \"action_required\"):\n failed_checks.append(name)\n elif status == \"completed\" and conclusion not in (\"success\", \"neutral\", \"skipped\"):\n if conclusion is not None:\n failed_checks.append(name)\n if failed_checks:\n issues.append(f\"failing_checks: {', '.join(failed_checks)}\")\n\n # Also check commit status API (some checks use the older status API)\n combined_status = get_check_status(pr)\n if combined_status == \"failure\":\n if not failed_checks:\n issues.append(\"failing_status\")\n\n # Detect missing/stale CI for autoloop PRs.\n # Pushes via GITHUB_TOKEN don't trigger workflows, so autoloop PRs\n # can sit indefinitely with no checks. Only autoloop branches are\n # eligible -- never trigger CI automatically on outside-contributor PRs.\n if is_autoloop_pr(pr):\n completed_runs = [cr for cr in check_runs if cr.get(\"status\") == \"completed\"]\n # No check runs at all on this HEAD SHA\n if not check_runs:\n issues.append(\"missing_checks: no check runs on HEAD\")\n # All runs are still queued / in_progress and the HEAD has been\n # sitting around for a while -- likely a stuck/missing trigger.\n elif not completed_runs:\n head_date = get_commit_date(pr[\"head\"][\"sha\"])\n if head_date:\n # If HEAD is older than 15 minutes and nothing has completed,\n # treat it as missing (a real CI run would have started by now).\n try:\n from datetime import datetime, timezone\n ht = datetime.fromisoformat(head_date.replace(\"Z\", \"+00:00\"))\n age_s = (datetime.now(timezone.utc) - ht).total_seconds()\n if age_s > 15 * 60:\n issues.append(\"missing_checks: only queued/in-progress checks on HEAD\")\n except Exception:\n pass\n\n return issues\n\n# --- Main logic ---\n\nprint(\"=== Evergreen PR Health Check ===\")\nprint(f\"Repository: {repo}\")\n\nprs = get_all_open_prs()\nprint(f\"Found {len(prs)} open PR(s)\")\n\nif not prs:\n print(\"No open PRs. Nothing to do.\")\n with open(output_file, \"w\") as f:\n json.dump({\"selected\": None, \"reason\": \"no_open_prs\"}, f)\n emit_selected_output(None)\n sys.exit(0)\n\n# Evaluate each PR deterministically (sorted by PR number ascending)\ncandidates = []\nskipped = []\nci_triggered = []\n\n# If a specific PR is forced, only check that one\nif forced_pr:\n prs = [pr for pr in prs if str(pr[\"number\"]) == forced_pr]\n if not prs:\n print(f\"ERROR: PR #{forced_pr} not found among open PRs.\")\n sys.exit(1)\n print(f\"FORCED: checking only PR #{forced_pr}\")\n\nfor pr in sorted(prs, key=lambda p: p[\"number\"]):\n pr_num = pr[\"number\"]\n head_sha = pr[\"head\"][\"sha\"]\n print(f\"\\nChecking PR #{pr_num}: {pr['title'][:60]}...\")\n print(f\" Head SHA: {head_sha[:12]}\")\n\n issues = pr_needs_attention(pr)\n if not issues:\n print(f\" Status: healthy (no issues)\")\n continue\n\n print(f\" Issues: {issues}\")\n\n # Handle `missing_checks` for autoloop PRs directly in the pre-flight,\n # without invoking the agent. The action is purely an API dispatch --\n # no code fix is needed -- and keeping the privileged CI trigger token\n # out of the agent context is a security win. We only do this for\n # autoloop branches (the detector enforces this), and only if\n # `missing_checks` is the *only* issue: any other issue (merge\n # conflict, behind main, failing checks) still needs the agent.\n if (\n len(issues) == 1\n and issues[0].startswith(\"missing_checks\")\n and is_autoloop_pr(pr)\n ):\n branch = pr[\"head\"][\"ref\"]\n # Cap retries on the same SHA so we don't spam-dispatch on a\n # truly-broken workflow.\n attempt_state = read_attempt_state(pr_num)\n prior_attempts = (\n attempt_state[\"attempts\"] if attempt_state[\"head_sha\"] == head_sha else 0\n )\n if prior_attempts >= MAX_ATTEMPTS:\n skipped.append({\n \"pr\": pr_num,\n \"reason\": (\n f\"missing_checks: max dispatch attempts ({MAX_ATTEMPTS}) \"\n f\"reached on SHA {head_sha[:12]}\"\n ),\n })\n print(f\" SKIPPED: max missing_checks attempts reached\")\n continue\n print(f\" Triggering ci.yml on branch {branch} (attempt {prior_attempts + 1}/{MAX_ATTEMPTS})\")\n ok = trigger_ci_workflow(branch)\n if ok:\n print(f\" [+] Triggered ci.yml on {branch}\")\n workflow_url = (\n f\"https://github.com/{repo}/actions/workflows/ci.yml\"\n f\"?query=branch%3A{branch}\"\n )\n post_pr_comment(\n pr_num,\n (\n \"Evergreen: this PR's HEAD had no completed CI checks, \"\n \"so I pushed an empty commit to trigger the `ci.yml` workflow on this branch. \"\n f\"See [recent CI runs]({workflow_url}).\\n\\n\"\n \"_(Triggered automatically because pushes via `GITHUB_TOKEN` \"\n \"do not start workflows.)_\"\n ),\n )\n ci_triggered.append({\n \"pr\": pr_num,\n \"branch\": branch,\n \"head_sha\": head_sha,\n })\n # Persist attempt count so we eventually give up if dispatching\n # never produces check runs.\n os.makedirs(repo_memory_dir, exist_ok=True)\n state_path = os.path.join(repo_memory_dir, f\"pr-{pr_num}.md\")\n from datetime import datetime, timezone\n ts = datetime.now(timezone.utc).strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n with open(state_path, \"w\", encoding=\"utf-8\") as sf:\n sf.write(\n f\"# Evergreen: PR #{pr_num}\\n\\n\"\n f\"## State\\n\\n\"\n f\"| Field | Value |\\n\"\n f\"|:---|:---|\\n\"\n f\"| head_sha | {head_sha} |\\n\"\n f\"| attempts | {prior_attempts + 1} |\\n\"\n f\"| last_run | {ts} |\\n\"\n f\"| last_result | ci_dispatched |\\n\"\n )\n else:\n print(f\" [x] Failed to dispatch ci.yml on {branch}\")\n skipped.append({\n \"pr\": pr_num,\n \"reason\": \"missing_checks: CI trigger push failed\",\n })\n # Do NOT add to candidates -- the agent has nothing to fix here.\n continue\n\n # Check attempt tracking\n attempt_state = read_attempt_state(pr_num)\n if attempt_state[\"head_sha\"] == head_sha:\n attempts = attempt_state[\"attempts\"]\n print(f\" Attempts on this SHA: {attempts}/{MAX_ATTEMPTS}\")\n if attempts >= MAX_ATTEMPTS:\n skipped.append({\n \"pr\": pr_num,\n \"reason\": f\"max attempts ({MAX_ATTEMPTS}) reached on SHA {head_sha[:12]}\",\n })\n print(f\" SKIPPED: max attempts reached\")\n continue\n else:\n attempts = 0\n print(f\" New SHA detected -- resetting attempt counter\")\n\n candidates.append({\n \"pr_number\": pr_num,\n \"title\": pr[\"title\"],\n \"head_sha\": head_sha,\n \"base_branch\": pr[\"base\"][\"ref\"],\n \"head_branch\": pr[\"head\"][\"ref\"],\n \"issues\": issues,\n \"attempts\": attempts,\n })\n\n# Select the first candidate (lowest PR number -- deterministic)\nselected = candidates[0] if candidates else None\n\nresult = {\n \"selected\": selected,\n \"skipped\": skipped,\n \"ci_triggered\": ci_triggered,\n \"total_open_prs\": len(prs),\n \"candidates_found\": len(candidates),\n}\n\nwith open(output_file, \"w\") as f:\n json.dump(result, f, indent=2)\n\nif selected:\n branch = selected[\"head_branch\"]\n print(f\"Checking out PR branch before agent run: {branch}\")\n subprocess.check_call([\"git\", \"checkout\", \"-B\", branch, f\"origin/{branch}\"])\n subprocess.check_call([\"git\", \"branch\", \"--set-upstream-to\", f\"origin/{branch}\", branch])\n print(f\"\\n>>> Selected PR #{selected['pr_number']}: {selected['title']}\")\n print(f\" Issues: {selected['issues']}\")\n print(f\" Attempt: {selected['attempts'] + 1}/{MAX_ATTEMPTS}\")\n emit_selected_output(selected[\"pr_number\"])\nelse:\n print(\"\\nNo PRs need attention. Nothing to do.\")\n emit_selected_output(None)\n sys.exit(0)\nPYEOF\n" + + # Repo memory git-based storage configuration from frontmatter processed below + - name: Clone repo-memory branch (default) + env: + GH_TOKEN: ${{ github.token }} + GITHUB_SERVER_URL: ${{ github.server_url }} + BRANCH_NAME: memory/evergreen + TARGET_REPO: ${{ github.repository }} + MEMORY_DIR: /tmp/gh-aw/repo-memory/default + CREATE_ORPHAN: true + run: bash "${RUNNER_TEMP}/gh-aw/actions/clone_repo_memory_branch.sh" + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + GITHUB_TOKEN: ${{ github.token }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + github.event.pull_request || github.event.issue.pull_request + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Install GitHub Copilot CLI + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh" 1.0.43 + env: + GH_HOST: github.com + - name: Install AWF binary + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.44 + - name: Determine automatic lockdown mode for GitHub MCP Server + id: determine-automatic-lockdown + uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9 + env: + GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + with: + script: | + const determineAutomaticLockdown = require('${{ runner.temp }}/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download activation artifact + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: activation + path: /tmp/gh-aw + - name: Restore agent config folders from base branch + if: steps.checkout-pr.outcome == 'success' + env: + GH_AW_AGENT_FOLDERS: ".agents .claude .codex .crush .gemini .github .opencode .pi" + GH_AW_AGENT_FILES: ".crush.json AGENTS.md CLAUDE.md GEMINI.md PI.md opencode.jsonc" + run: bash "${RUNNER_TEMP}/gh-aw/actions/restore_base_github_folders.sh" + - name: Restore inline sub-agents from activation artifact + env: + GH_AW_SUB_AGENT_DIR: ".github/agents" + GH_AW_SUB_AGENT_EXT: ".agent.md" + run: bash "${RUNNER_TEMP}/gh-aw/actions/restore_inline_sub_agents.sh" + - name: Download container images + run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.44 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.44 ghcr.io/github/gh-aw-firewall/squid:0.25.44 ghcr.io/github/gh-aw-mcpg:v0.3.6@sha256:2bb8eef86006a4c5963c55616a9c51c32f27bfdecb023b8aa6f91f6718d9171c ghcr.io/github/github-mcp-server:v1.0.3@sha256:2ac27ef03461ef2b877031b838a7d1fd7f12b12d4ace7796d8cad91446d55959 node:lts-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f + - name: Generate Safe Outputs Config + run: | + mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs" + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << 'GH_AW_SAFE_OUTPUTS_CONFIG_73a5a435e60b1546_EOF' + {"add_comment":{"max":3,"target":"*"},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_repo_memory":{"memories":[{"dir":"/tmp/gh-aw/repo-memory/default","id":"default","max_file_count":100,"max_file_size":10240,"max_patch_size":10240}]},"push_to_pull_request_branch":{"if_no_changes":"warn","max":3,"max_patch_size":1024,"protect_top_level_dot_folders":true,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS","DESIGN.md","README.md","CONTRIBUTING.md","CHANGELOG.md","SECURITY.md","CODE_OF_CONDUCT.md","AGENTS.md","CLAUDE.md","GEMINI.md"],"protected_files_policy":"allowed","target":"*"},"report_incomplete":{}} + GH_AW_SAFE_OUTPUTS_CONFIG_73a5a435e60b1546_EOF + - name: Generate Safe Outputs Tools + env: + GH_AW_TOOLS_META_JSON: | + { + "description_suffixes": { + "add_comment": " CONSTRAINTS: Maximum 3 comment(s) can be added. Target: *. Supports reply_to_id for discussion threading.", + "push_to_pull_request_branch": " CONSTRAINTS: Maximum 3 push(es) can be made." + }, + "repo_params": {}, + "dynamic_tools": [] + } + GH_AW_VALIDATION_JSON: | + { + "add_comment": { + "defaultMax": 1, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "item_number": { + "issueOrPRNumber": true + }, + "reply_to_id": { + "type": "string", + "maxLength": 256 + }, + "repo": { + "type": "string", + "maxLength": 256 + } + } + }, + "missing_data": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "context": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "data_type": { + "type": "string", + "sanitize": true, + "maxLength": 128 + }, + "reason": { + "type": "string", + "sanitize": true, + "maxLength": 256 + } + } + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + }, + "push_to_pull_request_branch": { + "defaultMax": 1, + "fields": { + "branch": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "pull_request_number": { + "issueOrPRNumber": true + } + } + }, + "report_incomplete": { + "defaultMax": 5, + "fields": { + "details": { + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 1024 + } + } + } + } + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/generate_safe_outputs_tools.cjs'); + await main(); + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash "${RUNNER_TEMP}/gh-aw/actions/start_safe_outputs_server.sh" + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_GUARD_MIN_INTEGRITY: ${{ steps.determine-automatic-lockdown.outputs.min_integrity }} + GITHUB_MCP_GUARD_REPOS: ${{ steps.determine-automatic-lockdown.outputs.repos }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p "${RUNNER_TEMP}/gh-aw/mcp-config" + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="8080" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + export MCP_GATEWAY_HOST_DOMAIN="localhost" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" + export DEBUG="*" + + export GH_AW_ENGINE="copilot" + MCP_GATEWAY_UID=$(id -u 2>/dev/null || echo '0') + MCP_GATEWAY_GID=$(id -g 2>/dev/null || echo '0') + case "${DOCKER_HOST:-}" in + unix://* ) DOCKER_SOCK_PATH="${DOCKER_HOST#unix://}" ;; + /* ) DOCKER_SOCK_PATH="$DOCKER_HOST" ;; + * ) DOCKER_SOCK_PATH=/var/run/docker.sock ;; + esac + DOCKER_SOCK_GID=$(stat -c '%g' "$DOCKER_SOCK_PATH" 2>/dev/null || echo '0') + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v '"${DOCKER_SOCK_PATH}"':/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DOCKER_HOST=unix:///var/run/docker.sock -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.6' + + mkdir -p /home/runner/.copilot + GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node) + cat << GH_AW_MCP_CONFIG_929273354dfd0486_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" + { + "mcpServers": { + "github": { + "type": "stdio", + "container": "ghcr.io/github/github-mcp-server:v1.0.3", + "env": { + "GITHUB_HOST": "\${GITHUB_SERVER_URL}", + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "repos,pull_requests,issues,actions" + }, + "guard-policies": { + "allow-only": { + "min-integrity": "$GITHUB_MCP_GUARD_MIN_INTEGRITY", + "repos": "$GITHUB_MCP_GUARD_REPOS" + } + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" + }, + "guard-policies": { + "write-sink": { + "accept": [ + "*" + ] + } + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + } + } + GH_AW_MCP_CONFIG_929273354dfd0486_EOF + - name: Mount MCP servers as CLIs + id: mount-mcp-clis + continue-on-error: true + env: + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + MCP_GATEWAY_DOMAIN: ${{ steps.start-mcp-gateway.outputs.gateway-domain }} + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('${{ runner.temp }}/gh-aw/actions/mount_mcp_as_cli.cjs'); + await main(); + - name: Clean credentials + continue-on-error: true + run: bash "${RUNNER_TEMP}/gh-aw/actions/clean_git_credentials.sh" + - name: Audit pre-agent workspace + id: pre_agent_audit + continue-on-error: true + run: bash "${RUNNER_TEMP}/gh-aw/actions/audit_pre_agent_workspace.sh" + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 360 + run: | + set -o pipefail + printf '%s' "$(date +%s%3N)" > /tmp/gh-aw/agent_cli_start_ms.txt + touch /tmp/gh-aw/agent-step-summary.md + GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true) + export GH_AW_NODE_BIN + (umask 177 && touch /tmp/gh-aw/agent-stdio.log) + printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.44/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","api.npms.io","api.snapcraft.io","archive.ubuntu.com","azure.archive.ubuntu.com","bun.sh","cdn.jsdelivr.net","crl.geotrust.com","crl.globalsign.com","crl.identrust.com","crl.sectigo.com","crl.thawte.com","crl.usertrust.com","crl.verisign.com","crl3.digicert.com","crl4.digicert.com","crls.ssl.com","deb.nodesource.com","deno.land","esm.sh","get.pnpm.io","github.com","go.dev","golang.org","googleapis.deno.dev","googlechromelabs.github.io","goproxy.io","host.docker.internal","json-schema.org","json.schemastore.org","jsr.io","keyserver.ubuntu.com","nodejs.org","npm.pkg.github.com","npmjs.com","npmjs.org","ocsp.digicert.com","ocsp.geotrust.com","ocsp.globalsign.com","ocsp.identrust.com","ocsp.sectigo.com","ocsp.ssl.com","ocsp.thawte.com","ocsp.usertrust.com","ocsp.verisign.com","packagecloud.io","packages.cloud.google.com","packages.microsoft.com","pkg.go.dev","ppa.launchpad.net","proxy.golang.org","raw.githubusercontent.com","registry.bower.io","registry.npmjs.com","registry.npmjs.org","registry.yarnpkg.com","releaseassets.githubusercontent.com","repo.yarnpkg.com","s.symcb.com","s.symcd.com","security.ubuntu.com","skimdb.npmjs.com","storage.googleapis.com","sum.golang.org","telemetry.enterprise.githubcopilot.com","telemetry.vercel.com","ts-crl.ws.symantec.com","ts-ocsp.ws.symantec.com","www.googleapis.com","www.npmjs.com","www.npmjs.org","yarnpkg.com"]},"apiProxy":{"enabled":true,"maxRuns":500,"maxEffectiveTokens":25000000,"models":{"auto":["large"],"deep-research":["copilot/deep-research*","copilot/o3-deep-research*","copilot/o4-mini-deep-research*","google/deep-research*","gemini/deep-research*","openai/o3-deep-research*","openai/o4-mini-deep-research*"],"gemini-flash":["copilot/gemini-*flash*","google/gemini-*flash*","gemini/gemini-*flash*"],"gemini-flash-lite":["copilot/gemini-*flash*lite*","google/gemini-*flash*lite*","gemini/gemini-*flash*lite*"],"gemini-pro":["copilot/gemini-*pro*","google/gemini-*pro*","gemini/gemini-*pro*"],"gemma":["copilot/gemma*","google/gemma*","gemini/gemma*"],"gpt-4.1":["copilot/gpt-4.1*","openai/gpt-4.1*"],"gpt-5":["copilot/gpt-5*","openai/gpt-5*"],"gpt-5-codex":["copilot/gpt-5*codex*","openai/gpt-5*codex*"],"gpt-5-mini":["copilot/gpt-5*mini*","openai/gpt-5*mini*"],"gpt-5-nano":["copilot/gpt-5*nano*","openai/gpt-5*nano*"],"gpt-5-pro":["copilot/gpt-5*pro*","openai/gpt-5*pro*"],"haiku":["copilot/*haiku*","anthropic/*haiku*"],"large":["sonnet","gpt-5-pro","gpt-5","gemini-pro"],"mini":["haiku","gpt-5-mini","gpt-5-nano","gemini-flash-lite"],"opus":["copilot/*opus*","anthropic/*opus*"],"reasoning":["copilot/o1*","copilot/o3*","copilot/o4*","openai/o1*","openai/o3*","openai/o4*"],"small":["mini"],"sonnet":["copilot/*sonnet*","anthropic/*sonnet*"]}},"container":{"imageTag":"0.25.44"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json + GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="" + if [[ "${DOCKER_HOST:-}" =~ ^tcp:// ]]; then + GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="--docker-host-path-prefix /tmp/gh-aw" + fi + # shellcheck disable=SC1003 + sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" ${GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS} --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull \ + -- /bin/bash -c 'export PATH="${RUNNER_TEMP}/gh-aw/mcp-cli/bin:$PATH" && export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 5 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || true)"; fi; if [ -z "$GH_AW_NODE_EXEC" ]; then echo "node runtime missing on this runner — check runtimes.node in workflow YAML" >&2; exit 127; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --allow-all-paths --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + AWF_REFLECT_ENABLED: 1 + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_API_KEY: dummy-byok-key-for-offline-mode + COPILOT_GITHUB_TOKEN: ${{ github.token }} + COPILOT_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || 'claude-sonnet-4.6' }} + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_PHASE: agent + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_VERSION: dev + GITHUB_API_URL: ${{ github.api_url }} + GITHUB_AW: true + GITHUB_COPILOT_INTEGRATION_ID: agentic-workflows + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md + GITHUB_WORKSPACE: ${{ github.workspace }} + GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_AUTHOR_NAME: github-actions[bot] + GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_COMMITTER_NAME: github-actions[bot] + S2STOKENS: true + XDG_CONFIG_HOME: /home/runner + - name: Detect Copilot errors + id: detect-copilot-errors + if: always() + continue-on-error: true + run: node "${RUNNER_TEMP}/gh-aw/actions/detect_copilot_errors.cjs" + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + GITHUB_TOKEN: ${{ github.token }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: bash "${RUNNER_TEMP}/gh-aw/actions/copy_copilot_session_state.sh" + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash "${RUNNER_TEMP}/gh-aw/actions/stop_mcp_gateway.sh" "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'GH_AW_CI_TRIGGER_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Append agent step summary + if: always() + run: bash "${RUNNER_TEMP}/gh-aw/actions/append_agent_step_summary.sh" + - name: Copy Safe Outputs + if: always() + env: + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + run: | + mkdir -p /tmp/gh-aw + cp "$GH_AW_SAFE_OUTPUTS" /tmp/gh-aw/safeoutputs.jsonl 2>/dev/null || true + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.npms.io,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,bun.sh,cdn.jsdelivr.net,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,deb.nodesource.com,deno.land,esm.sh,get.pnpm.io,github.com,go.dev,golang.org,googleapis.deno.dev,googlechromelabs.github.io,goproxy.io,host.docker.internal,json-schema.org,json.schemastore.org,jsr.io,keyserver.ubuntu.com,nodejs.org,npm.pkg.github.com,npmjs.com,npmjs.org,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pkg.go.dev,ppa.launchpad.net,proxy.golang.org,raw.githubusercontent.com,registry.bower.io,registry.npmjs.com,registry.npmjs.org,registry.yarnpkg.com,releaseassets.githubusercontent.com,repo.yarnpkg.com,s.symcb.com,s.symcd.com,security.ubuntu.com,skimdb.npmjs.com,storage.googleapis.com,sum.golang.org,telemetry.enterprise.githubcopilot.com,telemetry.vercel.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com,www.npmjs.com,www.npmjs.org,yarnpkg.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + id: parse-mcp-gateway + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs/audit dirs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+rX /tmp/gh-aw/sandbox/firewall 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + - name: Parse token usage for step summary + if: always() + continue-on-error: true + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_token_usage.cjs'); + await main(); + - name: Print AWF reflect summary + if: always() + continue-on-error: true + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/awf_reflect_summary.cjs'); + await main(); + - name: Write agent output placeholder if missing + if: always() + run: | + if [ ! -f /tmp/gh-aw/agent_output.json ]; then + echo '{"items":[]}' > /tmp/gh-aw/agent_output.json + fi + # Upload repo memory as artifacts for push job + - name: Sanitize repo-memory filenames (default) + if: always() + continue-on-error: true + env: + MEMORY_DIR: /tmp/gh-aw/repo-memory/default + run: bash "${RUNNER_TEMP}/gh-aw/actions/sanitize_repo_memory_filenames.sh" + - name: Upload repo-memory artifact (default) + if: always() + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: repo-memory-default + path: /tmp/gh-aw/repo-memory/default + retention-days: 1 + if-no-files-found: ignore + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: agent + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/agent_usage.json + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/pre-agent-audit.txt + /tmp/gh-aw/agent/ + /tmp/gh-aw/github_rate_limits.jsonl + /tmp/gh-aw/safeoutputs.jsonl + /tmp/gh-aw/agent_output.json + /tmp/gh-aw/aw-*.patch + /tmp/gh-aw/aw-*.bundle + /tmp/gh-aw/awf-config.json + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/sandbox/firewall/audit/ + /tmp/gh-aw/sandbox/firewall/awf-reflect.json + if-no-files-found: ignore + + conclusion: + needs: + - activation + - agent + - detection + - push_repo_memory + - safe_outputs + if: > + always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true' || + needs.activation.outputs.stale_lock_file_failed == 'true') + runs-on: ubuntu-slim + permissions: + contents: write + discussions: write + issues: write + pull-requests: write + concurrency: + group: "gh-aw-conclusion-evergreen" + cancel-in-progress: false + queue: max + outputs: + incomplete_count: ${{ steps.report_incomplete.outputs.incomplete_count }} + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + id: setup + uses: ./actions/setup + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + parent-span-id: ${{ needs.activation.outputs.setup-parent-span-id || needs.activation.outputs.setup-span-id }} + env: + GH_AW_SETUP_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_CURRENT_WORKFLOW_REF: ${{ github.repository }}/.github/workflows/evergreen.lock.yml@${{ github.ref }} + GH_AW_INFO_VERSION: "1.0.43" + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: agent + path: /tmp/gh-aw/ + - name: Setup agent output environment variable + id: setup-agent-output-env + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/ + find "/tmp/gh-aw/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" + - name: Process no-op messages + id: noop + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: "1" + GH_AW_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_noop_message.cjs'); + await main(); + - name: Log detection run + id: detection_runs + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }} + GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_detection_runs.cjs'); + await main(); + - name: Record missing tool + id: missing_tool + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_MISSING_TOOL_CREATE_ISSUE: "true" + GH_AW_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Record incomplete + id: report_incomplete + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_REPORT_INCOMPLETE_CREATE_ISSUE: "true" + GH_AW_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/report_incomplete_handler.cjs'); + await main(); + - name: Handle agent failure + id: handle_agent_failure + if: always() + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "evergreen" + GH_AW_ACTION_FAILURE_ISSUE_EXPIRES_HOURS: "168" + GH_AW_ENGINE_ID: "copilot" + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens || '' }} + GH_AW_EFFECTIVE_TOKENS_RATE_LIMIT_ERROR: ${{ needs.agent.outputs.effective_tokens_rate_limit_error || 'false' }} + GH_AW_INFERENCE_ACCESS_ERROR: ${{ needs.agent.outputs.inference_access_error }} + GH_AW_MCP_POLICY_ERROR: ${{ needs.agent.outputs.mcp_policy_error }} + GH_AW_AGENTIC_ENGINE_TIMEOUT: ${{ needs.agent.outputs.agentic_engine_timeout }} + GH_AW_MODEL_NOT_SUPPORTED_ERROR: ${{ needs.agent.outputs.model_not_supported_error }} + GH_AW_ENGINE_API_HOSTS: "api.enterprise.githubcopilot.com,api.githubcopilot.com,api.business.githubcopilot.com,api.individual.githubcopilot.com" + GH_AW_CODE_PUSH_FAILURE_ERRORS: ${{ needs.safe_outputs.outputs.code_push_failure_errors }} + GH_AW_CODE_PUSH_FAILURE_COUNT: ${{ needs.safe_outputs.outputs.code_push_failure_count }} + GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }} + GH_AW_STALE_LOCK_FILE_FAILED: ${{ needs.activation.outputs.stale_lock_file_failed }} + GH_AW_PUSH_REPO_MEMORY_RESULT: ${{ needs.push_repo_memory.result }} + GH_AW_REPO_MEMORY_VALIDATION_FAILED_default: ${{ needs.push_repo_memory.outputs.validation_failed_default }} + GH_AW_REPO_MEMORY_VALIDATION_ERROR_default: ${{ needs.push_repo_memory.outputs.validation_error_default }} + GH_AW_REPO_MEMORY_PATCH_SIZE_EXCEEDED_default: ${{ needs.push_repo_memory.outputs.patch_size_exceeded_default }} + GH_AW_GROUP_REPORTS: "false" + GH_AW_FAILURE_REPORT_AS_ISSUE: "true" + GH_AW_MISSING_TOOL_REPORT_AS_FAILURE: "true" + GH_AW_MISSING_DATA_REPORT_AS_FAILURE: "true" + GH_AW_TIMEOUT_MINUTES: "360" + GH_AW_MAX_EFFECTIVE_TOKENS: "25000000" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + + detection: + needs: + - activation + - agent + if: > + always() && needs.agent.result != 'skipped' && (needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true') + runs-on: ubuntu-latest + permissions: + contents: read + copilot-requests: write + outputs: + detection_conclusion: ${{ steps.detection_conclusion.outputs.conclusion }} + detection_reason: ${{ steps.detection_conclusion.outputs.reason }} + detection_success: ${{ steps.detection_conclusion.outputs.success }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + id: setup + uses: ./actions/setup + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + parent-span-id: ${{ needs.activation.outputs.setup-parent-span-id || needs.activation.outputs.setup-span-id }} + env: + GH_AW_SETUP_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_CURRENT_WORKFLOW_REF: ${{ github.repository }}/.github/workflows/evergreen.lock.yml@${{ github.ref }} + GH_AW_INFO_VERSION: "1.0.43" + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: agent + path: /tmp/gh-aw/ + - name: Setup agent output environment variable + id: setup-agent-output-env + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/ + find "/tmp/gh-aw/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" + - name: Checkout repository for patch context + if: needs.agent.outputs.has_patch == 'true' + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + # --- Threat Detection --- + - name: Clean stale firewall files from agent artifact + run: | + rm -rf /tmp/gh-aw/sandbox/firewall/logs + rm -rf /tmp/gh-aw/sandbox/firewall/audit + - name: Download container images + run: bash "${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh" ghcr.io/github/gh-aw-firewall/agent:0.25.44 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.44 ghcr.io/github/gh-aw-firewall/squid:0.25.44 + - name: Check if detection needed + id: detection_guard + if: always() + env: + OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + run: | + if [[ -n "$OUTPUT_TYPES" || "$HAS_PATCH" == "true" ]]; then + echo "run_detection=true" >> "$GITHUB_OUTPUT" + echo "Detection will run: output_types=$OUTPUT_TYPES, has_patch=$HAS_PATCH" + else + echo "run_detection=false" >> "$GITHUB_OUTPUT" + echo "Detection skipped: no agent outputs or patches to analyze" + fi + - name: Clear MCP Config for detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + rm -f "${RUNNER_TEMP}/gh-aw/mcp-config/mcp-servers.json" + rm -f /home/runner/.copilot/mcp-config.json + rm -f "$GITHUB_WORKSPACE/.gemini/settings.json" + - name: Prepare threat detection files + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection/aw-prompts + cp /tmp/gh-aw/aw-prompts/prompt.txt /tmp/gh-aw/threat-detection/aw-prompts/prompt.txt 2>/dev/null || true + cp /tmp/gh-aw/agent_output.json /tmp/gh-aw/threat-detection/agent_output.json 2>/dev/null || true + for f in /tmp/gh-aw/aw-*.patch; do + [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true + done + for f in /tmp/gh-aw/aw-*.bundle; do + [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true + done + echo "Prepared threat detection files:" + ls -la /tmp/gh-aw/threat-detection/ 2>/dev/null || true + - name: Setup threat detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + WORKFLOW_DESCRIPTION: "Evergreen -- keeps pull requests healthy by automatically fixing merge conflicts\nand failing CI checks. Runs on a short schedule, deterministically selects one\nPR per run, and gives up after 5 attempts that don't improve the same repo state." + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Setup Node.js + uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 + with: + node-version: '24' + package-manager-cache: false + - name: Install GitHub Copilot CLI + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh" 1.0.43 + env: + GH_HOST: github.com + - name: Install AWF binary + run: bash "${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh" v0.25.44 + - name: Execute GitHub Copilot CLI + if: always() && steps.detection_guard.outputs.run_detection == 'true' + continue-on-error: true + id: detection_agentic_execution + # Copilot CLI tool arguments (sorted): + timeout-minutes: 20 + run: | + set -o pipefail + printf '%s' "$(date +%s%3N)" > /tmp/gh-aw/agent_cli_start_ms.txt + touch /tmp/gh-aw/agent-step-summary.md + GH_AW_NODE_BIN=$(command -v node 2>/dev/null || true) + export GH_AW_NODE_BIN + (umask 177 && touch /tmp/gh-aw/threat-detection/detection.log) + printf '%s\n' '{"$schema":"https://github.com/github/gh-aw-firewall/releases/download/v0.25.44/awf-config.schema.json","network":{"allowDomains":["api.business.githubcopilot.com","api.enterprise.githubcopilot.com","api.github.com","api.githubcopilot.com","api.individual.githubcopilot.com","github.com","host.docker.internal","telemetry.enterprise.githubcopilot.com"]},"apiProxy":{"enabled":true,"maxRuns":500,"maxEffectiveTokens":25000000},"container":{"imageTag":"0.25.44"}}' > "${RUNNER_TEMP}/gh-aw/awf-config.json" && cp "${RUNNER_TEMP}/gh-aw/awf-config.json" /tmp/gh-aw/awf-config.json + GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="" + if [[ "${DOCKER_HOST:-}" =~ ^tcp:// ]]; then + GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS="--docker-host-path-prefix /tmp/gh-aw" + fi + # shellcheck disable=SC1003 + sudo -E awf --config "${RUNNER_TEMP}/gh-aw/awf-config.json" --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" ${GH_AW_DOCKER_HOST_PATH_PREFIX_ARGS} --env-all --exclude-env COPILOT_GITHUB_TOKEN --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --allow-host-ports 80,443,8080 --skip-pull \ + -- /bin/bash -c 'export PATH="$(find /opt/hostedtoolcache /home/runner/work/_tool -maxdepth 5 -type d -name bin 2>/dev/null | tr '\''\n'\'' '\'':'\'')$PATH"; [ -n "$GOROOT" ] && export PATH="$GOROOT/bin:$PATH" || true && GH_AW_NODE_EXEC="${GH_AW_NODE_BIN:-}"; if [ -z "$GH_AW_NODE_EXEC" ] || [ ! -x "$GH_AW_NODE_EXEC" ]; then GH_AW_NODE_EXEC="$(command -v node 2>/dev/null || true)"; fi; if [ -z "$GH_AW_NODE_EXEC" ]; then echo "node runtime missing on this runner — check runtimes.node in workflow YAML" >&2; exit 127; fi; "$GH_AW_NODE_EXEC" ${RUNNER_TEMP}/gh-aw/actions/copilot_harness.cjs /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --no-ask-user --allow-all-tools --add-dir "${GITHUB_WORKSPACE}" --prompt-file /tmp/gh-aw/aw-prompts/prompt.txt' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log + env: + AWF_REFLECT_ENABLED: 1 + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_API_KEY: dummy-byok-key-for-offline-mode + COPILOT_GITHUB_TOKEN: ${{ github.token }} + COPILOT_MODEL: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || 'claude-sonnet-4.6' }} + GH_AW_PHASE: detection + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_VERSION: dev + GITHUB_API_URL: ${{ github.api_url }} + GITHUB_AW: true + GITHUB_COPILOT_INTEGRATION_ID: agentic-workflows + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md + GITHUB_WORKSPACE: ${{ github.workspace }} + GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_AUTHOR_NAME: github-actions[bot] + GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_COMMITTER_NAME: github-actions[bot] + S2STOKENS: true + XDG_CONFIG_HOME: /home/runner + - name: Upload threat detection log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: detection + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + - name: Parse and conclude threat detection + id: detection_conclusion + if: always() + continue-on-error: true + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + RUN_DETECTION: ${{ steps.detection_guard.outputs.run_detection }} + DETECTION_AGENTIC_EXECUTION_OUTCOME: ${{ steps.detection_agentic_execution.outcome }} + GH_AW_DETECTION_CONTINUE_ON_ERROR: "true" + with: + script: | + try { + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + } catch (loadErr) { + const continueOnError = process.env.GH_AW_DETECTION_CONTINUE_ON_ERROR !== 'false'; + const detectionExecutionFailed = process.env.DETECTION_AGENTIC_EXECUTION_OUTCOME === 'failure'; + const msg = 'ERR_SYSTEM: \u274C Unexpected error loading threat detection module: ' + (loadErr && loadErr.message ? loadErr.message : String(loadErr)); + core.error(msg); + core.setOutput('reason', 'parse_error'); + if (continueOnError && !detectionExecutionFailed) { + core.warning('\u26A0\uFE0F ' + msg); + core.setOutput('conclusion', 'warning'); + core.setOutput('success', 'false'); + } else { + core.setOutput('conclusion', 'failure'); + core.setOutput('success', 'false'); + core.setFailed(msg); + } + } + + push_repo_memory: + needs: + - activation + - agent + - detection + if: > + always() && (!cancelled()) && (needs.detection.result == 'success' || needs.detection.result == 'skipped') && + needs.agent.result == 'success' + runs-on: ubuntu-slim + permissions: + contents: write + concurrency: + group: "push-repo-memory-${{ github.repository }}|memory/evergreen" + cancel-in-progress: false + outputs: + patch_size_exceeded_default: ${{ steps.push_repo_memory_default.outputs.patch_size_exceeded }} + validation_error_default: ${{ steps.push_repo_memory_default.outputs.validation_error }} + validation_failed_default: ${{ steps.push_repo_memory_default.outputs.validation_failed }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + id: setup + uses: ./actions/setup + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + parent-span-id: ${{ needs.activation.outputs.setup-parent-span-id || needs.activation.outputs.setup-span-id }} + env: + GH_AW_SETUP_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_CURRENT_WORKFLOW_REF: ${{ github.repository }}/.github/workflows/evergreen.lock.yml@${{ github.ref }} + GH_AW_INFO_VERSION: "1.0.43" + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + sparse-checkout: . + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + GITHUB_TOKEN: ${{ github.token }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Download repo-memory artifact (default) + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + continue-on-error: true + with: + name: repo-memory-default + path: /tmp/gh-aw/repo-memory/default + - name: Push repo-memory changes (default) + id: push_repo_memory_default + if: always() + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_TOKEN: ${{ github.token }} + GITHUB_RUN_ID: ${{ github.run_id }} + GITHUB_SERVER_URL: ${{ github.server_url }} + ARTIFACT_DIR: /tmp/gh-aw/repo-memory/default + MEMORY_ID: default + TARGET_REPO: ${{ github.repository }} + BRANCH_NAME: memory/evergreen + MAX_FILE_SIZE: 10240 + MAX_FILE_COUNT: 100 + MAX_PATCH_SIZE: 10240 + ALLOWED_EXTENSIONS: '[]' + FILE_GLOB_FILTER: "*.md" + with: + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/push_repo_memory.cjs'); + await main(); + - name: Restore actions folder + if: always() + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions/setup + sparse-checkout-cone-mode: true + persist-credentials: false + + safe_outputs: + needs: + - activation + - agent + - detection + if: (!cancelled()) && needs.agent.result != 'skipped' && needs.detection.result == 'success' + runs-on: ubuntu-slim + permissions: + contents: write + discussions: write + issues: write + pull-requests: write + timeout-minutes: 15 + env: + GH_AW_CALLER_WORKFLOW_ID: "${{ github.repository }}/evergreen" + GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }} + GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }} + GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens }} + GH_AW_ENGINE_ID: "copilot" + GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }} + GH_AW_ENGINE_VERSION: "1.0.43" + GH_AW_WORKFLOW_ID: "evergreen" + GH_AW_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + outputs: + code_push_failure_count: ${{ steps.process_safe_outputs.outputs.code_push_failure_count }} + code_push_failure_errors: ${{ steps.process_safe_outputs.outputs.code_push_failure_errors }} + comment_id: ${{ steps.process_safe_outputs.outputs.comment_id }} + comment_url: ${{ steps.process_safe_outputs.outputs.comment_url }} + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + push_commit_sha: ${{ steps.process_safe_outputs.outputs.push_commit_sha }} + push_commit_url: ${{ steps.process_safe_outputs.outputs.push_commit_url }} + steps: + - name: Checkout actions folder + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions + persist-credentials: false + - name: Setup Scripts + id: setup + uses: ./actions/setup + with: + destination: ${{ runner.temp }}/gh-aw/actions + job-name: ${{ github.job }} + trace-id: ${{ needs.activation.outputs.setup-trace-id }} + parent-span-id: ${{ needs.activation.outputs.setup-parent-span-id || needs.activation.outputs.setup-span-id }} + env: + GH_AW_SETUP_WORKFLOW_NAME: "Evergreen -- PR Health Keeper" + GH_AW_CURRENT_WORKFLOW_REF: ${{ github.repository }}/.github/workflows/evergreen.lock.yml@${{ github.ref }} + GH_AW_INFO_VERSION: "1.0.43" + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: agent + path: /tmp/gh-aw/ + - name: Setup agent output environment variable + id: setup-agent-output-env + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/ + find "/tmp/gh-aw/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" + - name: Download patch artifact + continue-on-error: true + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: agent + path: /tmp/gh-aw/ + - name: Extract base branch from agent output + id: extract-base-branch + if: steps.download-agent-output.outcome == 'success' + shell: bash + run: | + if [ -f "/tmp/gh-aw/agent_output.json" ]; then + GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node) + BASE_BRANCH=$("$GH_AW_NODE" -e " + try { + const data = JSON.parse(require('fs').readFileSync('/tmp/gh-aw/agent_output.json', 'utf8')); + const item = (data.items || []).find(i => + (i.type === 'create_pull_request' || i.type === 'push_to_pull_request_branch') && + i.base_branch + ); + if (item) process.stdout.write(item.base_branch); + } catch(e) {} + " 2>/dev/null || true) + # Validate: only allow safe git branch name characters + if [[ "$BASE_BRANCH" =~ ^[a-zA-Z0-9/_.-]+$ ]] && [ ${#BASE_BRANCH} -le 255 ]; then + printf 'base-branch=%s\n' "$BASE_BRANCH" >> "$GITHUB_OUTPUT" + echo "Extracted base branch from safe output: $BASE_BRANCH" + fi + fi + - name: Checkout repository + if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'push_to_pull_request_branch') + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ steps.extract-base-branch.outputs.base-branch || github.base_ref || github.event.pull_request.base.ref || github.ref_name || github.event.repository.default_branch }} + token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + persist-credentials: false + fetch-depth: 1 + - name: Configure Git credentials + if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'push_to_pull_request_branch') + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + GIT_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GIT_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Configure GH_HOST for enterprise compatibility + id: ghes-host-config + shell: bash + run: | + # Derive GH_HOST from GITHUB_SERVER_URL so the gh CLI targets the correct + # GitHub instance (GHES/GHEC). On github.com this is a harmless no-op. + GH_HOST="${GITHUB_SERVER_URL#https://}" + GH_HOST="${GH_HOST#http://}" + echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0 + env: + GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.npms.io,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,bun.sh,cdn.jsdelivr.net,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,deb.nodesource.com,deno.land,esm.sh,get.pnpm.io,github.com,go.dev,golang.org,googleapis.deno.dev,googlechromelabs.github.io,goproxy.io,host.docker.internal,json-schema.org,json.schemastore.org,jsr.io,keyserver.ubuntu.com,nodejs.org,npm.pkg.github.com,npmjs.com,npmjs.org,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pkg.go.dev,ppa.launchpad.net,proxy.golang.org,raw.githubusercontent.com,registry.bower.io,registry.npmjs.com,registry.npmjs.org,registry.yarnpkg.com,releaseassets.githubusercontent.com,repo.yarnpkg.com,s.symcb.com,s.symcd.com,security.ubuntu.com,skimdb.npmjs.com,storage.googleapis.com,sum.golang.org,telemetry.enterprise.githubcopilot.com,telemetry.vercel.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com,www.npmjs.com,www.npmjs.org,yarnpkg.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"max\":3,\"target\":\"*\"},\"create_report_incomplete_issue\":{},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"push_to_pull_request_branch\":{\"if_no_changes\":\"warn\",\"max\":3,\"max_patch_size\":1024,\"protect_top_level_dot_folders\":true,\"protected_files\":[\"package.json\",\"bun.lockb\",\"bunfig.toml\",\"deno.json\",\"deno.jsonc\",\"deno.lock\",\"global.json\",\"NuGet.Config\",\"Directory.Packages.props\",\"mix.exs\",\"mix.lock\",\"go.mod\",\"go.sum\",\"stack.yaml\",\"stack.yaml.lock\",\"pom.xml\",\"build.gradle\",\"build.gradle.kts\",\"settings.gradle\",\"settings.gradle.kts\",\"gradle.properties\",\"package-lock.json\",\"yarn.lock\",\"pnpm-lock.yaml\",\"npm-shrinkwrap.json\",\"requirements.txt\",\"Pipfile\",\"Pipfile.lock\",\"pyproject.toml\",\"setup.py\",\"setup.cfg\",\"Gemfile\",\"Gemfile.lock\",\"uv.lock\",\"CODEOWNERS\",\"DESIGN.md\",\"README.md\",\"CONTRIBUTING.md\",\"CHANGELOG.md\",\"SECURITY.md\",\"CODE_OF_CONDUCT.md\",\"AGENTS.md\",\"CLAUDE.md\",\"GEMINI.md\"],\"protected_files_policy\":\"allowed\",\"target\":\"*\"},\"report_incomplete\":{}}" + GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io, getOctokit); + const { main } = require('${{ runner.temp }}/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); + - name: Upload Safe Outputs Items + if: always() + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: safe-outputs-items + path: | + /tmp/gh-aw/safe-output-items.jsonl + /tmp/gh-aw/temporary-id-map.json + if-no-files-found: ignore + - name: Restore actions folder + if: always() + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: github/gh-aw + sparse-checkout: | + actions/setup + sparse-checkout-cone-mode: true + persist-credentials: false + diff --git a/.github/workflows/evergreen.md b/.github/workflows/evergreen.md new file mode 100644 index 0000000..9ce311e --- /dev/null +++ b/.github/workflows/evergreen.md @@ -0,0 +1,602 @@ +--- +description: | + Evergreen -- keeps pull requests healthy by automatically fixing merge conflicts + and failing CI checks. Runs on a short schedule, deterministically selects one + PR per run, and gives up after 5 attempts that don't improve the same repo state. + +on: + schedule: every 5m + workflow_dispatch: + inputs: + pr_number: + description: "Fix a specific PR by number (bypasses scheduling)" + required: false + type: string + +permissions: read-all + +timeout-minutes: 360 + +network: + allowed: + - defaults + - node + - go + - releaseassets.githubusercontent.com + +safe-outputs: + push-to-pull-request-branch: + target: "*" + max: 3 + protected-files: allowed + add-comment: + max: 3 + target: "*" + +checkout: + fetch: ["*"] + fetch-depth: 0 + +tools: + github: + toolsets: [repos, pull_requests, issues, actions] + bash: true + repo-memory: + branch-name: memory/evergreen + file-glob: ["*.md"] + +imports: + - shared/reporting.md + +steps: + - name: Find a PR that needs attention + id: find-pr + env: + GITHUB_TOKEN: ${{ github.token }} + GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + FORCED_PR: ${{ github.event.inputs.pr_number }} + run: | + python3 - << 'PYEOF' + import os, json, re, subprocess, sys + import urllib.request, urllib.error + + def emit_selected_output(pr_number): + """Expose `selected` as a step output for workflow gating. + Empty string means no PR needs attention; otherwise the PR number.""" + gh_output = os.environ.get("GITHUB_OUTPUT") + if gh_output: + with open(gh_output, "a") as f: + f.write(f"selected={'' if pr_number is None else pr_number}\n") + + token = os.environ.get("GITHUB_TOKEN", "") + repo = os.environ.get("GITHUB_REPOSITORY", "") + forced_pr = os.environ.get("FORCED_PR", "").strip() + + repo_memory_dir = "/tmp/gh-aw/repo-memory/evergreen" + output_file = "/tmp/gh-aw/evergreen.json" + os.makedirs("/tmp/gh-aw", exist_ok=True) + + MAX_ATTEMPTS = 5 + + def api_get(url): + """Make an authenticated GET request to the GitHub API.""" + req = urllib.request.Request(url, headers={ + "Authorization": f"token {token}", + "Accept": "application/vnd.github.v3+json", + }) + with urllib.request.urlopen(req, timeout=30) as resp: + return json.loads(resp.read().decode()) + + def get_all_open_prs(): + """Fetch all open PRs, paginated.""" + prs = [] + page = 1 + while True: + url = f"https://api.github.com/repos/{repo}/pulls?state=open&per_page=100&page={page}&sort=number&direction=asc" + batch = api_get(url) + if not batch: + break + prs.extend(batch) + if len(batch) < 100: + break + page += 1 + return prs + + def get_check_status(pr): + """Get combined CI check status for a PR's head commit.""" + head_sha = pr["head"]["sha"] + url = f"https://api.github.com/repos/{repo}/commits/{head_sha}/status" + try: + status = api_get(url) + return status.get("state", "unknown") + except Exception as e: + print(f" Warning: could not fetch status for PR #{pr['number']}: {e}") + return "unknown" + + def get_check_runs(pr): + """Get check runs for a PR's head commit.""" + head_sha = pr["head"]["sha"] + url = f"https://api.github.com/repos/{repo}/commits/{head_sha}/check-runs" + try: + data = api_get(url) + return data.get("check_runs", []) + except Exception as e: + print(f" Warning: could not fetch check runs for PR #{pr['number']}: {e}") + return [] + + def read_attempt_state(pr_number): + """Read attempt tracking state from repo-memory.""" + state_file = os.path.join(repo_memory_dir, f"pr-{pr_number}.md") + if not os.path.isfile(state_file): + return {"attempts": 0, "head_sha": None} + with open(state_file, encoding="utf-8") as f: + content = f.read() + state = {"attempts": 0, "head_sha": None} + m = re.search(r'\|\s*head_sha\s*\|\s*(\S+)\s*\|', content) + if m: + state["head_sha"] = m.group(1) + m = re.search(r'\|\s*attempts\s*\|\s*(\d+)\s*\|', content) + if m: + state["attempts"] = int(m.group(1)) + return state + + def get_commit_date(sha): + """Return the committer date (ISO 8601) for a given commit SHA, or None.""" + url = f"https://api.github.com/repos/{repo}/commits/{sha}" + try: + data = api_get(url) + return data.get("commit", {}).get("committer", {}).get("date") + except Exception as e: + print(f" Warning: could not fetch commit {sha[:12]}: {e}") + return None + + def is_autoloop_pr(pr): + """Return True if the PR is from an autoloop branch. + Branch name is the primary gate (labels can be added by anyone on + public repos); the `autoloop` label is just an additional signal.""" + head_ref = pr.get("head", {}).get("ref", "") or "" + return head_ref.startswith("autoloop/") + + def get_behind_by(pr): + """Return how many commits the PR base branch is ahead of the PR head.""" + base = pr["base"]["ref"] + head_sha = pr["head"]["sha"] + url = f"https://api.github.com/repos/{repo}/compare/{base}...{head_sha}" + try: + data = api_get(url) + return int(data.get("behind_by", 0) or 0) + except Exception as e: + print(f" Warning: could not fetch compare for PR #{pr['number']}: {e}") + return 0 + + def trigger_ci_workflow(branch): + """Trigger this repository's ci.yml for `branch` by pushing an empty + commit with GH_AW_CI_TRIGGER_TOKEN. Because the push is attributed to + a real user instead of GITHUB_TOKEN, GitHub emits the PR synchronize + event that starts the pull_request CI workflow; this is separate from + the manual workflow_dispatch trigger available in ci.yml.""" + ci_token = os.environ.get("GH_AW_CI_TRIGGER_TOKEN", "") or token + try: + # Get current HEAD SHA + url = f"https://api.github.com/repos/{repo}/git/ref/heads/{branch}" + req = urllib.request.Request(url, headers={ + "Authorization": f"token {ci_token}", + "Accept": "application/vnd.github.v3+json", + }) + with urllib.request.urlopen(req, timeout=30) as resp: + head_sha = json.loads(resp.read().decode())["object"]["sha"] + + # Create an empty commit on top of HEAD + url = f"https://api.github.com/repos/{repo}/git/commits" + payload = json.dumps({ + "message": "chore: trigger CI [evergreen]", + "tree": json.loads(urllib.request.urlopen( + urllib.request.Request( + f"https://api.github.com/repos/{repo}/git/commits/{head_sha}", + headers={"Authorization": f"token {ci_token}", "Accept": "application/vnd.github.v3+json"}, + ), timeout=30 + ).read().decode())["tree"]["sha"], + "parents": [head_sha], + }).encode() + req = urllib.request.Request(url, data=payload, method="POST", headers={ + "Authorization": f"token {ci_token}", + "Accept": "application/vnd.github.v3+json", + "Content-Type": "application/json", + }) + with urllib.request.urlopen(req, timeout=30) as resp: + new_sha = json.loads(resp.read().decode())["sha"] + + # Update the branch ref to the new commit + url = f"https://api.github.com/repos/{repo}/git/refs/heads/{branch}" + payload = json.dumps({"sha": new_sha}).encode() + req = urllib.request.Request(url, data=payload, method="PATCH", headers={ + "Authorization": f"token {ci_token}", + "Accept": "application/vnd.github.v3+json", + "Content-Type": "application/json", + }) + with urllib.request.urlopen(req, timeout=30) as resp: + return 200 <= resp.status < 300 + except urllib.error.HTTPError as e: + print(f" Warning: CI trigger (empty commit) failed for {branch}: HTTP {e.code} {e.reason}") + return False + except Exception as e: + print(f" Warning: CI trigger (empty commit) failed for {branch}: {e}") + return False + + def post_pr_comment(pr_number, body): + """Post a comment on a PR using the issues comments API.""" + url = f"https://api.github.com/repos/{repo}/issues/{pr_number}/comments" + payload = json.dumps({"body": body}).encode() + req = urllib.request.Request(url, data=payload, method="POST", headers={ + "Authorization": f"token {token}", + "Accept": "application/vnd.github.v3+json", + "Content-Type": "application/json", + }) + try: + with urllib.request.urlopen(req, timeout=30) as resp: + return 200 <= resp.status < 300 + except Exception as e: + print(f" Warning: could not post comment on PR #{pr_number}: {e}") + return False + + def pr_needs_attention(pr): + """Check if a PR has merge conflicts, is behind main, or has failing CI. + Returns a list of issues.""" + issues = [] + + # Check mergeable state + # Need to fetch full PR details for mergeable info + pr_url = f"https://api.github.com/repos/{repo}/pulls/{pr['number']}" + try: + full_pr = api_get(pr_url) + mergeable = full_pr.get("mergeable") + mergeable_state = full_pr.get("mergeable_state", "unknown") + if mergeable is False: + issues.append("merge_conflict") + elif mergeable_state == "dirty": + issues.append("merge_conflict") + except Exception as e: + print(f" Warning: could not fetch mergeable state for PR #{pr['number']}: {e}") + + # Check if the PR branch is behind its base branch (e.g., main moved forward). + # We always want to merge main first before fixing CI, so flag this explicitly. + behind_by = get_behind_by(pr) + if behind_by > 0 and "merge_conflict" not in issues: + issues.append(f"behind_main: {behind_by} commit(s)") + + # Check CI status via check runs + check_runs = get_check_runs(pr) + failed_checks = [] + for cr in check_runs: + conclusion = cr.get("conclusion") + status = cr.get("status") + name = cr.get("name", "unknown") + if conclusion in ("failure", "timed_out", "action_required"): + failed_checks.append(name) + elif status == "completed" and conclusion not in ("success", "neutral", "skipped"): + if conclusion is not None: + failed_checks.append(name) + if failed_checks: + issues.append(f"failing_checks: {', '.join(failed_checks)}") + + # Also check commit status API (some checks use the older status API) + combined_status = get_check_status(pr) + if combined_status == "failure": + if not failed_checks: + issues.append("failing_status") + + # Detect missing/stale CI for autoloop PRs. + # Pushes via GITHUB_TOKEN don't trigger workflows, so autoloop PRs + # can sit indefinitely with no checks. Only autoloop branches are + # eligible -- never trigger CI automatically on outside-contributor PRs. + if is_autoloop_pr(pr): + completed_runs = [cr for cr in check_runs if cr.get("status") == "completed"] + # No check runs at all on this HEAD SHA + if not check_runs: + issues.append("missing_checks: no check runs on HEAD") + # All runs are still queued / in_progress and the HEAD has been + # sitting around for a while -- likely a stuck/missing trigger. + elif not completed_runs: + head_date = get_commit_date(pr["head"]["sha"]) + if head_date: + # If HEAD is older than 15 minutes and nothing has completed, + # treat it as missing (a real CI run would have started by now). + try: + from datetime import datetime, timezone + ht = datetime.fromisoformat(head_date.replace("Z", "+00:00")) + age_s = (datetime.now(timezone.utc) - ht).total_seconds() + if age_s > 15 * 60: + issues.append("missing_checks: only queued/in-progress checks on HEAD") + except Exception: + pass + + return issues + + # --- Main logic --- + + print("=== Evergreen PR Health Check ===") + print(f"Repository: {repo}") + + prs = get_all_open_prs() + print(f"Found {len(prs)} open PR(s)") + + if not prs: + print("No open PRs. Nothing to do.") + with open(output_file, "w") as f: + json.dump({"selected": None, "reason": "no_open_prs"}, f) + emit_selected_output(None) + sys.exit(0) + + # Evaluate each PR deterministically (sorted by PR number ascending) + candidates = [] + skipped = [] + ci_triggered = [] + + # If a specific PR is forced, only check that one + if forced_pr: + prs = [pr for pr in prs if str(pr["number"]) == forced_pr] + if not prs: + print(f"ERROR: PR #{forced_pr} not found among open PRs.") + sys.exit(1) + print(f"FORCED: checking only PR #{forced_pr}") + + for pr in sorted(prs, key=lambda p: p["number"]): + pr_num = pr["number"] + head_sha = pr["head"]["sha"] + print(f"\nChecking PR #{pr_num}: {pr['title'][:60]}...") + print(f" Head SHA: {head_sha[:12]}") + + issues = pr_needs_attention(pr) + if not issues: + print(f" Status: healthy (no issues)") + continue + + print(f" Issues: {issues}") + + # Handle `missing_checks` for autoloop PRs directly in the pre-flight, + # without invoking the agent. The action is purely an API dispatch -- + # no code fix is needed -- and keeping the privileged CI trigger token + # out of the agent context is a security win. We only do this for + # autoloop branches (the detector enforces this), and only if + # `missing_checks` is the *only* issue: any other issue (merge + # conflict, behind main, failing checks) still needs the agent. + if ( + len(issues) == 1 + and issues[0].startswith("missing_checks") + and is_autoloop_pr(pr) + ): + branch = pr["head"]["ref"] + # Cap retries on the same SHA so we don't spam-dispatch on a + # truly-broken workflow. + attempt_state = read_attempt_state(pr_num) + prior_attempts = ( + attempt_state["attempts"] if attempt_state["head_sha"] == head_sha else 0 + ) + if prior_attempts >= MAX_ATTEMPTS: + skipped.append({ + "pr": pr_num, + "reason": ( + f"missing_checks: max dispatch attempts ({MAX_ATTEMPTS}) " + f"reached on SHA {head_sha[:12]}" + ), + }) + print(f" SKIPPED: max missing_checks attempts reached") + continue + print(f" Triggering ci.yml on branch {branch} (attempt {prior_attempts + 1}/{MAX_ATTEMPTS})") + ok = trigger_ci_workflow(branch) + if ok: + print(f" [+] Triggered ci.yml on {branch}") + workflow_url = ( + f"https://github.com/{repo}/actions/workflows/ci.yml" + f"?query=branch%3A{branch}" + ) + post_pr_comment( + pr_num, + ( + "Evergreen: this PR's HEAD had no completed CI checks, " + "so I pushed an empty commit to trigger the `ci.yml` workflow on this branch. " + f"See [recent CI runs]({workflow_url}).\n\n" + "_(Triggered automatically because pushes via `GITHUB_TOKEN` " + "do not start workflows.)_" + ), + ) + ci_triggered.append({ + "pr": pr_num, + "branch": branch, + "head_sha": head_sha, + }) + # Persist attempt count so we eventually give up if dispatching + # never produces check runs. + os.makedirs(repo_memory_dir, exist_ok=True) + state_path = os.path.join(repo_memory_dir, f"pr-{pr_num}.md") + from datetime import datetime, timezone + ts = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + with open(state_path, "w", encoding="utf-8") as sf: + sf.write( + f"# Evergreen: PR #{pr_num}\n\n" + f"## State\n\n" + f"| Field | Value |\n" + f"|:---|:---|\n" + f"| head_sha | {head_sha} |\n" + f"| attempts | {prior_attempts + 1} |\n" + f"| last_run | {ts} |\n" + f"| last_result | ci_dispatched |\n" + ) + else: + print(f" [x] Failed to dispatch ci.yml on {branch}") + skipped.append({ + "pr": pr_num, + "reason": "missing_checks: CI trigger push failed", + }) + # Do NOT add to candidates -- the agent has nothing to fix here. + continue + + # Check attempt tracking + attempt_state = read_attempt_state(pr_num) + if attempt_state["head_sha"] == head_sha: + attempts = attempt_state["attempts"] + print(f" Attempts on this SHA: {attempts}/{MAX_ATTEMPTS}") + if attempts >= MAX_ATTEMPTS: + skipped.append({ + "pr": pr_num, + "reason": f"max attempts ({MAX_ATTEMPTS}) reached on SHA {head_sha[:12]}", + }) + print(f" SKIPPED: max attempts reached") + continue + else: + attempts = 0 + print(f" New SHA detected -- resetting attempt counter") + + candidates.append({ + "pr_number": pr_num, + "title": pr["title"], + "head_sha": head_sha, + "base_branch": pr["base"]["ref"], + "head_branch": pr["head"]["ref"], + "issues": issues, + "attempts": attempts, + }) + + # Select the first candidate (lowest PR number -- deterministic) + selected = candidates[0] if candidates else None + + result = { + "selected": selected, + "skipped": skipped, + "ci_triggered": ci_triggered, + "total_open_prs": len(prs), + "candidates_found": len(candidates), + } + + with open(output_file, "w") as f: + json.dump(result, f, indent=2) + + if selected: + branch = selected["head_branch"] + print(f"Checking out PR branch before agent run: {branch}") + subprocess.check_call(["git", "checkout", "-B", branch, f"origin/{branch}"]) + subprocess.check_call(["git", "branch", "--set-upstream-to", f"origin/{branch}", branch]) + print(f"\n>>> Selected PR #{selected['pr_number']}: {selected['title']}") + print(f" Issues: {selected['issues']}") + print(f" Attempt: {selected['attempts'] + 1}/{MAX_ATTEMPTS}") + emit_selected_output(selected["pr_number"]) + else: + print("\nNo PRs need attention. Nothing to do.") + emit_selected_output(None) + sys.exit(0) + PYEOF + +engine: copilot + +features: + copilot-requests: true +--- + +# Evergreen -- PR Health Keeper + +You are the Evergreen agent. Your job is to fix pull requests that have merge conflicts or failing CI checks. + +## Context + +A pre-flight step has already identified a PR that needs attention. Read the selection data from `/tmp/gh-aw/evergreen.json` to understand which PR to fix and what issues it has. + +## Workflow + +1. **Read the selection file** at `/tmp/gh-aw/evergreen.json`. It contains: + - `selected.pr_number` -- the PR to fix + - `selected.issues` -- list of problems (e.g., `"merge_conflict"`, `"failing_checks: Test & Lint"`) + - `selected.head_sha` -- current HEAD of the PR branch + - `selected.head_branch` -- the PR's branch name + - `selected.base_branch` -- the target branch (usually `main`) + - `selected.attempts` -- how many times we've already tried on this SHA + + > If `selected` is `null`, no PRs need attention right now. Call the **noop** tool with a message like "All PRs are healthy -- nothing to fix." and stop. + +2. The pre-flight step already checks out `selected.head_branch` as a named local tracking branch before you start. Keep working on that branch (do not switch back to `main` or use detached HEAD). + +3. **Fix the issues** -- always follow this sequence, in order. Each push is a separate `push-to-pull-request-branch` call: + + ### Step 1 -- Merge `main` first if the PR is behind (or has conflicts) + If `selected.issues` contains `"merge_conflict"` **or** any `"behind_main: ..."` entry, you must bring the branch up to date with `main` before doing anything else: + + - `git fetch origin main` + - `git merge origin/main` (or `origin/` if the base isn't `main`) + - Resolve any conflicts intelligently by understanding the intent of both sides. If the PR is from an autoloop branch, prefer the PR's changes for feature code and `main`'s changes for infrastructure/config. + - Run tests/lint/typecheck locally to make sure the merge is clean. + + ### Step 2 -- Push the merge as its own commit + - Push the merge commit using `push-to-pull-request-branch` **before doing anything else**. + - This is the *first* push of the run. It contains *only* the merge with `main` (plus any conflict resolutions). Do **not** mix CI-fix changes into this patch. + - Merging `main` often fixes CI on its own (the failure was just drift). After the push, re-check whether CI is still failing on the new HEAD. + + ### Step 3 -- Re-check CI after the merge + - Look at the failing checks for the new HEAD SHA (the one you just pushed). + - If everything is green or pending-but-likely-green, you're done -- skip to step 5. + - If checks are still failing, continue to step 4. + + ### Step 4 -- Fix the failing checks (second push) + - Read the failing check logs using GitHub tools. + - Identify the root cause (test failures, lint errors, type errors, build failures). + - Fix the code on the (now-merged) PR branch. + - Run the relevant checks locally to verify the fix before pushing. + - Push the fix using `push-to-pull-request-branch`. This is the *second* push of the run, and it contains *only* the CI fix -- no merge commits. + + ### Step 5 -- Update tracking and comment + Continue to steps 4 and 5 below. + +4. **Update attempt tracking** by writing to repo-memory. Write a file to the repo-memory directory at `/tmp/gh-aw/repo-memory/evergreen/pr-{number}.md` with this format: + + ```markdown + # Evergreen: PR #{number} + + ## State + + | Field | Value | + |:---|:---| + | head_sha | {sha_after_push} | + | attempts | {new_attempt_count} | + | last_run | {ISO 8601 timestamp} | + | last_result | {success or failure} | + ``` + + - If you **pushed a fix** (or a clean merge), set `head_sha` to the new SHA (post-push), reset `attempts` to `0`, and set `last_result` to `success`. + - If you **could not fix** the issue, keep the original `head_sha`, increment `attempts` by 1, and set `last_result` to `failure`. + +5. **Add a comment** on the PR summarizing what you did (or why you couldn't fix it). If you pushed both a merge and a CI fix, mention both. + +## Rules + +- **Be surgical**: make the minimum changes needed to fix the issue. Do not refactor, improve, or add features. +- **Don't break things**: always run tests/lint/typecheck locally before pushing. +- **Always merge `main` first, as its own push.** If the PR branch is behind `main` (or has merge conflicts), the *first* `push-to-pull-request-branch` call of the run must contain only the merge commit (and any conflict resolutions). Do **NOT** include a merge of `main` inside a CI-fix patch -- that's a separate, second push. Mixing the two causes patch conflicts when the remote PR branch hasn't been merged yet. +- **One concern per push**: the merge push contains only the merge; the fix push contains only the fix. Never combine them. +- **Give up gracefully**: if you cannot fix the issue after investigating, update the attempt counter and leave a comment explaining what went wrong. Do not force-push or make destructive changes. +- **One PR per run**: only fix the selected PR. Do not touch other PRs. +- **Respect the 5-attempt limit**: the pre-flight step will stop selecting this PR once attempts reach 5 on the same HEAD SHA. If the SHA changes (someone else pushes), the counter resets. + +## Autoloop PRs + +Evergreen treats Autoloop draft PRs (branch name `autoloop/*`, label `autoloop`) the same as human-authored PRs for CI-failure and merge-conflict fixing. These PRs are produced by the Autoloop agent (`.github/workflows/autoloop.md`), which has its own in-iteration fix-retry loop (up to 5 attempts per iteration). If Autoloop exhausts its budget or hits its per-iteration wall-clock cap, it sets `paused: true` in its state file (`{program-name}.md` on the `memory/autoloop` branch) with a `pause_reason` like `"ci-fix-exhausted: "` or `"stuck in CI fix loop: "`. The PR is left in a failing state -- deliberately, so Evergreen (or a human) can continue from there. + +When Evergreen is selected for an Autoloop PR: + +1. Identify it by the branch prefix `autoloop/` and/or the `autoloop` label. +2. Attempt the fix as usual -- read failing check logs, make the minimum change, run local checks, push via `push-to-pull-request-branch`. +3. If the push succeeds **and** you believe the fix is correct, also **un-pause the Autoloop program**: + - Clone or checkout the `memory/autoloop` branch. + - Find the state file `{program-name}.md` where `{program-name}` is the part of the branch name after `autoloop/`. + - In the **Machine State** table, set `Paused` to `false` and `Pause Reason` to `--`. + - Commit and push the state-file change to `memory/autoloop`. + - Leave a comment on the Autoloop program issue (`[Autoloop: {program-name}]`, labeled `autoloop-program`) noting that Evergreen pushed a CI fix and un-paused the program, with links to the commit and the newly-green check run. +4. If you cannot fix it, the standard attempt-tracker (5 attempts per HEAD SHA) applies -- do **not** un-pause. Autoloop remains paused for human review. + +> The same 5-attempts-per-SHA rule applies to Autoloop PRs: Evergreen eventually gives up rather than burning cycles on a hopelessly broken change. + +## Missing CI checks (autoloop PRs only -- handled in pre-flight) + +If an autoloop PR's HEAD has no CI check runs (or only stuck queued/in-progress runs), the **pre-flight step** detects this (`missing_checks` issue) and dispatches the `ci.yml` workflow directly via the GitHub API using `GH_AW_CI_TRIGGER_TOKEN`. It also posts a comment on the PR. No agent run is needed in that case, so the PR will not appear in `selected` for `missing_checks`-only issues. + +You will only see a `missing_checks` issue in `selected.issues` if it is combined with another fixable issue (e.g. `merge_conflict` or `failing_checks`). In that case, focus on the *other* issue: pre-flight will re-trigger CI on the next scheduled run if checks are still missing after your push. + +**Security gate:** the pre-flight only ever triggers CI for branches matching `autoloop/*`. Do not bypass this check or trigger CI yourself for PRs from outside contributors.