diff --git a/.github/workflows/requirements.txt b/.github/workflows/requirements.txt
index 56829fdbd0..abcda1723e 100644
--- a/.github/workflows/requirements.txt
+++ b/.github/workflows/requirements.txt
@@ -1,3 +1,4 @@
+"mempalace==3.2.0"
markitdown-mcp
numpy
scikit-learn
diff --git a/.github/workflows/smoke-create-cross-repo-pr.lock.yml b/.github/workflows/smoke-create-cross-repo-pr.lock.yml
index abdd7ed4c2..8ea587dbaf 100644
--- a/.github/workflows/smoke-create-cross-repo-pr.lock.yml
+++ b/.github/workflows/smoke-create-cross-repo-pr.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"86acca80f30c2c8b5c2f2acfcd36ccdf20eeb732a678a64f2db35dd5e1f6425a","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"55e430e79f4d85abfe9d63d9335d30f2e95aeb486662520e010facf5d69dde96","strict":true,"agent_id":"copilot"}
# gh-aw-manifest: {"version":1,"secrets":["GH_AW_CI_TRIGGER_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GH_AW_SIDE_REPO_PAT","GITHUB_TOKEN"],"actions":[{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.25"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.25"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.25"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.25"},{"image":"ghcr.io/github/github-mcp-server:v1.0.0"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -22,7 +22,7 @@
#
# For more information: https://github.github.com/gh-aw/introduction/overview/
#
-# Smoke test validating cross-repo pull request creation in githubnext/gh-aw-side-repo
+# Smoke test validating cross-repo pull request creation in github/gh-aw-side-repo
#
# Resolved workflow manifest:
# Imports:
@@ -190,7 +190,7 @@ jobs:
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
env:
GH_AW_WORKFLOW_NAME: "Smoke Create Cross-Repo PR"
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in githubnext/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in githubnext/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in github/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in github/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -214,19 +214,19 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_a4d7ae5e3561acd9_EOF'
+ cat << 'GH_AW_PROMPT_0420ea7924b34dcb_EOF'
- GH_AW_PROMPT_a4d7ae5e3561acd9_EOF
+ GH_AW_PROMPT_0420ea7924b34dcb_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_a4d7ae5e3561acd9_EOF'
+ cat << 'GH_AW_PROMPT_0420ea7924b34dcb_EOF'
Tools: add_comment(max:2), create_issue, create_pull_request, missing_tool, missing_data, noop
- GH_AW_PROMPT_a4d7ae5e3561acd9_EOF
+ GH_AW_PROMPT_0420ea7924b34dcb_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_create_pull_request.md"
- cat << 'GH_AW_PROMPT_a4d7ae5e3561acd9_EOF'
+ cat << 'GH_AW_PROMPT_0420ea7924b34dcb_EOF'
The following GitHub context information is available for this workflow:
@@ -255,17 +255,17 @@ jobs:
- **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__
{{/if}}
- **checkouts**: The following repositories have been checked out and are available in the workspace:
- - `$GITHUB_WORKSPACE` → `githubnext/gh-aw-side-repo` (cwd) [shallow clone, fetch-depth=1 (default)]
+ - `$GITHUB_WORKSPACE` → `github/gh-aw-side-repo` (cwd) [shallow clone, fetch-depth=1 (default)]
- **Note**: If a branch you need is not in the list above and is not listed as an additional fetched ref, it has NOT been checked out. For private repositories you cannot fetch it without proper authentication. If the branch is required and not available, exit with an error and ask the user to add it to the `fetch:` option of the `checkout:` configuration (e.g., `fetch: ["refs/pulls/open/*"]` for all open PR refs, or `fetch: ["main", "feature/my-branch"]` for specific branches).
- GH_AW_PROMPT_a4d7ae5e3561acd9_EOF
+ GH_AW_PROMPT_0420ea7924b34dcb_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_a4d7ae5e3561acd9_EOF'
+ cat << 'GH_AW_PROMPT_0420ea7924b34dcb_EOF'
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/smoke-create-cross-repo-pr.md}}
- GH_AW_PROMPT_a4d7ae5e3561acd9_EOF
+ GH_AW_PROMPT_0420ea7924b34dcb_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -396,11 +396,11 @@ jobs:
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- - name: Checkout githubnext/gh-aw-side-repo
+ - name: Checkout github/gh-aw-side-repo
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- repository: githubnext/gh-aw-side-repo
+ repository: github/gh-aw-side-repo
token: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
- name: Create gh-aw temp directory
run: bash "${RUNNER_TEMP}/gh-aw/actions/create_gh_aw_tmp_dir.sh"
@@ -460,9 +460,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << GH_AW_SAFE_OUTPUTS_CONFIG_2a41d056264725d2_EOF
- {"add_comment":{"hide_older_comments":true,"max":2},"create_issue":{"close_older_issues":true,"expires":2,"labels":["automation","testing"],"max":1},"create_pull_request":{"draft":true,"expires":24,"fallback_as_issue":false,"github-token":"${GH_AW_SIDE_REPO_PAT}","if_no_changes":"error","labels":["smoke-test"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS","AGENTS.md","CLAUDE.md","GEMINI.md"],"protected_path_prefixes":[".github/",".agents/"],"target-repo":"githubnext/gh-aw-side-repo","title_prefix":"[smoke] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_2a41d056264725d2_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << GH_AW_SAFE_OUTPUTS_CONFIG_6f9e329493efc8a8_EOF
+ {"add_comment":{"hide_older_comments":true,"max":2},"create_issue":{"close_older_issues":true,"expires":2,"labels":["automation","testing"],"max":1},"create_pull_request":{"draft":true,"expires":24,"fallback_as_issue":false,"github-token":"${GH_AW_SIDE_REPO_PAT}","if_no_changes":"error","labels":["smoke-test"],"max":1,"max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS","AGENTS.md","CLAUDE.md","GEMINI.md"],"protected_path_prefixes":[".github/",".agents/"],"target-repo":"github/gh-aw-side-repo","title_prefix":"[smoke] "},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"report_incomplete":{}}
+ GH_AW_SAFE_OUTPUTS_CONFIG_6f9e329493efc8a8_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -726,7 +726,7 @@ jobs:
mkdir -p /home/runner/.copilot
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_bc036f9a74cfcfbb_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_cc43158393ecf2ed_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"github": {
@@ -773,7 +773,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_bc036f9a74cfcfbb_EOF
+ GH_AW_MCP_CONFIG_cc43158393ecf2ed_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -1125,7 +1125,7 @@ jobs:
GH_AW_CODE_PUSH_FAILURE_COUNT: ${{ needs.safe_outputs.outputs.code_push_failure_count }}
GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }}
GH_AW_STALE_LOCK_FILE_FAILED: ${{ needs.activation.outputs.stale_lock_file_failed }}
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in githubnext/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in githubnext/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in github/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in github/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
GH_AW_GROUP_REPORTS: "false"
GH_AW_FAILURE_REPORT_AS_ISSUE: "true"
GH_AW_TIMEOUT_MINUTES: "10"
@@ -1148,7 +1148,7 @@ jobs:
GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }}
GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }}
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in githubnext/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in githubnext/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in github/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in github/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -1251,7 +1251,7 @@ jobs:
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
env:
WORKFLOW_NAME: "Smoke Create Cross-Repo PR"
- WORKFLOW_DESCRIPTION: "Smoke test validating cross-repo pull request creation in githubnext/gh-aw-side-repo"
+ WORKFLOW_DESCRIPTION: "Smoke test validating cross-repo pull request creation in github/gh-aw-side-repo"
HAS_PATCH: ${{ needs.agent.outputs.has_patch }}
with:
script: |
@@ -1384,7 +1384,7 @@ jobs:
GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens }}
GH_AW_ENGINE_ID: "copilot"
GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }}
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in githubnext/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in githubnext/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in github/gh-aw-side-repo...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in github/gh-aw-side-repo!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}\"}"
GH_AW_WORKFLOW_ID: "smoke-create-cross-repo-pr"
GH_AW_WORKFLOW_NAME: "Smoke Create Cross-Repo PR"
outputs:
@@ -1441,7 +1441,7 @@ jobs:
if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'create_pull_request')
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
- repository: githubnext/gh-aw-side-repo
+ repository: github/gh-aw-side-repo
ref: ${{ github.base_ref || github.event.pull_request.base.ref || github.ref_name || github.event.repository.default_branch }}
token: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
persist-credentials: false
@@ -1449,7 +1449,7 @@ jobs:
- name: Configure Git credentials
if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'create_pull_request')
env:
- REPO_NAME: "githubnext/gh-aw-side-repo"
+ REPO_NAME: "github/gh-aw-side-repo"
SERVER_URL: ${{ github.server_url }}
GIT_TOKEN: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
run: |
@@ -1477,7 +1477,7 @@ jobs:
GH_AW_ALLOWED_DOMAINS: "*.githubusercontent.com,127.0.0.1,::1,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,app.renovatebot.com,appveyor.com,archive.ubuntu.com,azure.archive.ubuntu.com,badgen.net,circleci.com,codacy.com,codeclimate.com,codecov.io,codeload.github.com,coveralls.io,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,deepsource.io,docs.github.com,drone.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,img.shields.io,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,localhost,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,readthedocs.io,readthedocs.org,registry.npmjs.org,renovatebot.com,s.symcb.com,s.symcd.com,security.ubuntu.com,semaphoreci.com,shields.io,snyk.io,sonarcloud.io,sonarqube.com,telemetry.enterprise.githubcopilot.com,travis-ci.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_API_URL: ${{ github.api_url }}
- GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"create_issue\":{\"close_older_issues\":true,\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_pull_request\":{\"draft\":true,\"expires\":24,\"fallback_as_issue\":false,\"github-token\":\"${{ secrets.GH_AW_SIDE_REPO_PAT }}\",\"if_no_changes\":\"error\",\"labels\":[\"smoke-test\"],\"max\":1,\"max_patch_size\":1024,\"protected_files\":[\"package.json\",\"bun.lockb\",\"bunfig.toml\",\"deno.json\",\"deno.jsonc\",\"deno.lock\",\"global.json\",\"NuGet.Config\",\"Directory.Packages.props\",\"mix.exs\",\"mix.lock\",\"go.mod\",\"go.sum\",\"stack.yaml\",\"stack.yaml.lock\",\"pom.xml\",\"build.gradle\",\"build.gradle.kts\",\"settings.gradle\",\"settings.gradle.kts\",\"gradle.properties\",\"package-lock.json\",\"yarn.lock\",\"pnpm-lock.yaml\",\"npm-shrinkwrap.json\",\"requirements.txt\",\"Pipfile\",\"Pipfile.lock\",\"pyproject.toml\",\"setup.py\",\"setup.cfg\",\"Gemfile\",\"Gemfile.lock\",\"uv.lock\",\"CODEOWNERS\",\"AGENTS.md\",\"CLAUDE.md\",\"GEMINI.md\"],\"protected_path_prefixes\":[\".github/\",\".agents/\"],\"target-repo\":\"githubnext/gh-aw-side-repo\",\"title_prefix\":\"[smoke] \"},\"create_report_incomplete_issue\":{},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"report_incomplete\":{}}"
+ GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"create_issue\":{\"close_older_issues\":true,\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_pull_request\":{\"draft\":true,\"expires\":24,\"fallback_as_issue\":false,\"github-token\":\"${{ secrets.GH_AW_SIDE_REPO_PAT }}\",\"if_no_changes\":\"error\",\"labels\":[\"smoke-test\"],\"max\":1,\"max_patch_size\":1024,\"protected_files\":[\"package.json\",\"bun.lockb\",\"bunfig.toml\",\"deno.json\",\"deno.jsonc\",\"deno.lock\",\"global.json\",\"NuGet.Config\",\"Directory.Packages.props\",\"mix.exs\",\"mix.lock\",\"go.mod\",\"go.sum\",\"stack.yaml\",\"stack.yaml.lock\",\"pom.xml\",\"build.gradle\",\"build.gradle.kts\",\"settings.gradle\",\"settings.gradle.kts\",\"gradle.properties\",\"package-lock.json\",\"yarn.lock\",\"pnpm-lock.yaml\",\"npm-shrinkwrap.json\",\"requirements.txt\",\"Pipfile\",\"Pipfile.lock\",\"pyproject.toml\",\"setup.py\",\"setup.cfg\",\"Gemfile\",\"Gemfile.lock\",\"uv.lock\",\"CODEOWNERS\",\"AGENTS.md\",\"CLAUDE.md\",\"GEMINI.md\"],\"protected_path_prefixes\":[\".github/\",\".agents/\"],\"target-repo\":\"github/gh-aw-side-repo\",\"title_prefix\":\"[smoke] \"},\"create_report_incomplete_issue\":{},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"report_incomplete\":{}}"
GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
with:
diff --git a/.github/workflows/smoke-create-cross-repo-pr.md b/.github/workflows/smoke-create-cross-repo-pr.md
index 554d38b1e8..1464954c09 100644
--- a/.github/workflows/smoke-create-cross-repo-pr.md
+++ b/.github/workflows/smoke-create-cross-repo-pr.md
@@ -1,6 +1,6 @@
---
name: Smoke Create Cross-Repo PR
-description: Smoke test validating cross-repo pull request creation in githubnext/gh-aw-side-repo
+description: Smoke test validating cross-repo pull request creation in github/gh-aw-side-repo
on:
workflow_dispatch:
pull_request:
@@ -19,7 +19,7 @@ network:
- github
checkout:
- - repository: githubnext/gh-aw-side-repo
+ - repository: github/gh-aw-side-repo
github-token: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
tools:
@@ -33,7 +33,7 @@ tools:
safe-outputs:
allowed-domains: [default-safe-outputs]
create-pull-request:
- target-repo: "githubnext/gh-aw-side-repo"
+ target-repo: "github/gh-aw-side-repo"
github-token: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
title-prefix: "[smoke] "
labels: [smoke-test]
@@ -50,8 +50,8 @@ safe-outputs:
max: 2
messages:
footer: "> 🔬 *Cross-repo smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}"
- run-started: "🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in githubnext/gh-aw-side-repo..."
- run-success: "✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in githubnext/gh-aw-side-repo!"
+ run-started: "🔬 [{workflow_name}]({run_url}) is testing cross-repo PR creation in github/gh-aw-side-repo..."
+ run-success: "✅ [{workflow_name}]({run_url}) successfully created a cross-repo PR in github/gh-aw-side-repo!"
run-failure: "❌ [{workflow_name}]({run_url}) failed to create a cross-repo PR: {status}"
timeout-minutes: 10
diff --git a/.github/workflows/smoke-update-cross-repo-pr.lock.yml b/.github/workflows/smoke-update-cross-repo-pr.lock.yml
index 7d76d11723..0875fda71f 100644
--- a/.github/workflows/smoke-update-cross-repo-pr.lock.yml
+++ b/.github/workflows/smoke-update-cross-repo-pr.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"72e74209f519ac8a58429a763b6209ce0cfc5f4400a65afc569d4000defeb89a","strict":true,"agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"5b785f55ba2a16a818befb3d75af99bead1554141d32d0929c26c762b0075dd6","strict":true,"agent_id":"copilot"}
# gh-aw-manifest: {"version":1,"secrets":["GH_AW_CI_TRIGGER_TOKEN","GH_AW_GITHUB_MCP_SERVER_TOKEN","GH_AW_GITHUB_TOKEN","GH_AW_OTEL_ENDPOINT","GH_AW_OTEL_HEADERS","GH_AW_SIDE_REPO_PAT","GITHUB_TOKEN"],"actions":[{"repo":"actions/cache/restore","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/cache/save","sha":"27d5ce7f107fe9357f9df03efb73ab90386fccae","version":"v5.0.5"},{"repo":"actions/checkout","sha":"de0fac2e4500dabe0009e67214ff5f5447ce83dd","version":"v6.0.2"},{"repo":"actions/download-artifact","sha":"3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c","version":"v8.0.1"},{"repo":"actions/github-script","sha":"373c709c69115d41ff229c7e5df9f8788daa9553","version":"v9"},{"repo":"actions/upload-artifact","sha":"043fb46d1a93c77aae656e7c1c64a875d1fc6a0a","version":"v7.0.1"}],"containers":[{"image":"ghcr.io/github/gh-aw-firewall/agent:0.25.25"},{"image":"ghcr.io/github/gh-aw-firewall/api-proxy:0.25.25"},{"image":"ghcr.io/github/gh-aw-firewall/squid:0.25.25"},{"image":"ghcr.io/github/gh-aw-mcpg:v0.2.25"},{"image":"ghcr.io/github/github-mcp-server:v1.0.0"},{"image":"node:lts-alpine","digest":"sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b","pinned_image":"node:lts-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b"}]}
# ___ _ _
# / _ \ | | (_)
@@ -22,7 +22,7 @@
#
# For more information: https://github.github.com/gh-aw/introduction/overview/
#
-# Smoke test validating cross-repo pull request updates in githubnext/gh-aw-side-repo by adding lines from Homer's Odyssey to the README
+# Smoke test validating cross-repo pull request updates in github/gh-aw-side-repo by adding lines from Homer's Odyssey to the README
#
# Resolved workflow manifest:
# Imports:
@@ -192,7 +192,7 @@ jobs:
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
env:
GH_AW_WORKFLOW_NAME: "Smoke Update Cross-Repo PR"
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to githubnext/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to github/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
with:
script: |
const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
@@ -216,20 +216,20 @@ jobs:
run: |
bash "${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh"
{
- cat << 'GH_AW_PROMPT_4524bd3388c5bf7b_EOF'
+ cat << 'GH_AW_PROMPT_95b80c043e2e7005_EOF'
- GH_AW_PROMPT_4524bd3388c5bf7b_EOF
+ GH_AW_PROMPT_95b80c043e2e7005_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_4524bd3388c5bf7b_EOF'
+ cat << 'GH_AW_PROMPT_95b80c043e2e7005_EOF'
Tools: add_comment(max:2), create_issue, push_to_pull_request_branch, missing_tool, missing_data, noop
- GH_AW_PROMPT_4524bd3388c5bf7b_EOF
+ GH_AW_PROMPT_95b80c043e2e7005_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_push_to_pr_branch.md"
- cat << 'GH_AW_PROMPT_4524bd3388c5bf7b_EOF'
+ cat << 'GH_AW_PROMPT_95b80c043e2e7005_EOF'
The following GitHub context information is available for this workflow:
@@ -258,17 +258,17 @@ jobs:
- **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__
{{/if}}
- **checkouts**: The following repositories have been checked out and are available in the workspace:
- - `$GITHUB_WORKSPACE` → `githubnext/gh-aw-side-repo` (cwd) [full history, all branches available as remote-tracking refs] [additional refs fetched: main, refs/pulls/open/*]
+ - `$GITHUB_WORKSPACE` → `github/gh-aw-side-repo` (cwd) [full history, all branches available as remote-tracking refs] [additional refs fetched: main, refs/pulls/open/*]
- **Note**: If a branch you need is not in the list above and is not listed as an additional fetched ref, it has NOT been checked out. For private repositories you cannot fetch it without proper authentication. If the branch is required and not available, exit with an error and ask the user to add it to the `fetch:` option of the `checkout:` configuration (e.g., `fetch: ["refs/pulls/open/*"]` for all open PR refs, or `fetch: ["main", "feature/my-branch"]` for specific branches).
- GH_AW_PROMPT_4524bd3388c5bf7b_EOF
+ GH_AW_PROMPT_95b80c043e2e7005_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_4524bd3388c5bf7b_EOF'
+ cat << 'GH_AW_PROMPT_95b80c043e2e7005_EOF'
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/smoke-update-cross-repo-pr.md}}
- GH_AW_PROMPT_4524bd3388c5bf7b_EOF
+ GH_AW_PROMPT_95b80c043e2e7005_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
@@ -405,14 +405,14 @@ jobs:
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- - name: Checkout githubnext/gh-aw-side-repo
+ - name: Checkout github/gh-aw-side-repo
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- repository: githubnext/gh-aw-side-repo
+ repository: github/gh-aw-side-repo
token: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
fetch-depth: 0
- - name: Fetch additional refs for githubnext/gh-aw-side-repo
+ - name: Fetch additional refs for github/gh-aw-side-repo
env:
GH_AW_FETCH_TOKEN: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
run: |
@@ -491,9 +491,9 @@ jobs:
mkdir -p "${RUNNER_TEMP}/gh-aw/safeoutputs"
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << GH_AW_SAFE_OUTPUTS_CONFIG_fa6946bcf33a49e7_EOF
- {"add_comment":{"hide_older_comments":true,"max":2},"create_issue":{"close_older_issues":true,"expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_to_pull_request_branch":{"github-token":"${GH_AW_SIDE_REPO_PAT}","if_no_changes":"error","max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS","AGENTS.md","CLAUDE.md","GEMINI.md"],"protected_path_prefixes":[".github/",".agents/"],"target":"1","target-repo":"githubnext/gh-aw-side-repo"},"report_incomplete":{}}
- GH_AW_SAFE_OUTPUTS_CONFIG_fa6946bcf33a49e7_EOF
+ cat > "${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" << GH_AW_SAFE_OUTPUTS_CONFIG_e4863e5cb9b826e6_EOF
+ {"add_comment":{"hide_older_comments":true,"max":2},"create_issue":{"close_older_issues":true,"expires":2,"labels":["automation","testing"],"max":1},"create_report_incomplete_issue":{},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"push_to_pull_request_branch":{"github-token":"${GH_AW_SIDE_REPO_PAT}","if_no_changes":"error","max_patch_size":1024,"protected_files":["package.json","bun.lockb","bunfig.toml","deno.json","deno.jsonc","deno.lock","global.json","NuGet.Config","Directory.Packages.props","mix.exs","mix.lock","go.mod","go.sum","stack.yaml","stack.yaml.lock","pom.xml","build.gradle","build.gradle.kts","settings.gradle","settings.gradle.kts","gradle.properties","package-lock.json","yarn.lock","pnpm-lock.yaml","npm-shrinkwrap.json","requirements.txt","Pipfile","Pipfile.lock","pyproject.toml","setup.py","setup.cfg","Gemfile","Gemfile.lock","uv.lock","CODEOWNERS","AGENTS.md","CLAUDE.md","GEMINI.md"],"protected_path_prefixes":[".github/",".agents/"],"target":"1","target-repo":"github/gh-aw-side-repo"},"report_incomplete":{}}
+ GH_AW_SAFE_OUTPUTS_CONFIG_e4863e5cb9b826e6_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -735,7 +735,7 @@ jobs:
mkdir -p /home/runner/.copilot
GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node)
- cat << GH_AW_MCP_CONFIG_c5bec9ad3d1e0dc9_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
+ cat << GH_AW_MCP_CONFIG_770e0ca1bdd5fb05_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs"
{
"mcpServers": {
"github": {
@@ -782,7 +782,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_c5bec9ad3d1e0dc9_EOF
+ GH_AW_MCP_CONFIG_770e0ca1bdd5fb05_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -1146,7 +1146,7 @@ jobs:
GH_AW_CODE_PUSH_FAILURE_COUNT: ${{ needs.safe_outputs.outputs.code_push_failure_count }}
GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }}
GH_AW_STALE_LOCK_FILE_FAILED: ${{ needs.activation.outputs.stale_lock_file_failed }}
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to githubnext/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to github/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
GH_AW_GROUP_REPORTS: "false"
GH_AW_FAILURE_REPORT_AS_ISSUE: "true"
GH_AW_TIMEOUT_MINUTES: "10"
@@ -1169,7 +1169,7 @@ jobs:
GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }}
GH_AW_DETECTION_REASON: ${{ needs.detection.outputs.detection_reason }}
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to githubnext/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to github/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -1272,7 +1272,7 @@ jobs:
uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
env:
WORKFLOW_NAME: "Smoke Update Cross-Repo PR"
- WORKFLOW_DESCRIPTION: "Smoke test validating cross-repo pull request updates in githubnext/gh-aw-side-repo by adding lines from Homer's Odyssey to the README"
+ WORKFLOW_DESCRIPTION: "Smoke test validating cross-repo pull request updates in github/gh-aw-side-repo by adding lines from Homer's Odyssey to the README"
HAS_PATCH: ${{ needs.agent.outputs.has_patch }}
with:
script: |
@@ -1405,7 +1405,7 @@ jobs:
GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens }}
GH_AW_ENGINE_ID: "copilot"
GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }}
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to githubnext/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"runStarted\":\"📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to github/gh-aw-side-repo PR #1...\",\"runSuccess\":\"✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!\",\"runFailure\":\"❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}\"}"
GH_AW_WORKFLOW_ID: "smoke-update-cross-repo-pr"
GH_AW_WORKFLOW_NAME: "Smoke Update Cross-Repo PR"
outputs:
@@ -1497,7 +1497,7 @@ jobs:
GH_AW_ALLOWED_DOMAINS: "*.githubusercontent.com,127.0.0.1,::1,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,app.renovatebot.com,appveyor.com,archive.ubuntu.com,azure.archive.ubuntu.com,badgen.net,circleci.com,codacy.com,codeclimate.com,codecov.io,codeload.github.com,coveralls.io,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,deepsource.io,docs.github.com,drone.io,github-cloud.githubusercontent.com,github-cloud.s3.amazonaws.com,github.blog,github.com,github.githubassets.com,host.docker.internal,img.shields.io,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,lfs.github.com,localhost,objects.githubusercontent.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,readthedocs.io,readthedocs.org,registry.npmjs.org,renovatebot.com,s.symcb.com,s.symcd.com,security.ubuntu.com,semaphoreci.com,shields.io,snyk.io,sonarcloud.io,sonarqube.com,telemetry.enterprise.githubcopilot.com,travis-ci.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com"
GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_API_URL: ${{ github.api_url }}
- GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"create_issue\":{\"close_older_issues\":true,\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_report_incomplete_issue\":{},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"push_to_pull_request_branch\":{\"github-token\":\"${{ secrets.GH_AW_SIDE_REPO_PAT }}\",\"if_no_changes\":\"error\",\"max_patch_size\":1024,\"protected_files\":[\"package.json\",\"bun.lockb\",\"bunfig.toml\",\"deno.json\",\"deno.jsonc\",\"deno.lock\",\"global.json\",\"NuGet.Config\",\"Directory.Packages.props\",\"mix.exs\",\"mix.lock\",\"go.mod\",\"go.sum\",\"stack.yaml\",\"stack.yaml.lock\",\"pom.xml\",\"build.gradle\",\"build.gradle.kts\",\"settings.gradle\",\"settings.gradle.kts\",\"gradle.properties\",\"package-lock.json\",\"yarn.lock\",\"pnpm-lock.yaml\",\"npm-shrinkwrap.json\",\"requirements.txt\",\"Pipfile\",\"Pipfile.lock\",\"pyproject.toml\",\"setup.py\",\"setup.cfg\",\"Gemfile\",\"Gemfile.lock\",\"uv.lock\",\"CODEOWNERS\",\"AGENTS.md\",\"CLAUDE.md\",\"GEMINI.md\"],\"protected_path_prefixes\":[\".github/\",\".agents/\"],\"target\":\"1\",\"target-repo\":\"githubnext/gh-aw-side-repo\"},\"report_incomplete\":{}}"
+ GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":2},\"create_issue\":{\"close_older_issues\":true,\"expires\":2,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_report_incomplete_issue\":{},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"push_to_pull_request_branch\":{\"github-token\":\"${{ secrets.GH_AW_SIDE_REPO_PAT }}\",\"if_no_changes\":\"error\",\"max_patch_size\":1024,\"protected_files\":[\"package.json\",\"bun.lockb\",\"bunfig.toml\",\"deno.json\",\"deno.jsonc\",\"deno.lock\",\"global.json\",\"NuGet.Config\",\"Directory.Packages.props\",\"mix.exs\",\"mix.lock\",\"go.mod\",\"go.sum\",\"stack.yaml\",\"stack.yaml.lock\",\"pom.xml\",\"build.gradle\",\"build.gradle.kts\",\"settings.gradle\",\"settings.gradle.kts\",\"gradle.properties\",\"package-lock.json\",\"yarn.lock\",\"pnpm-lock.yaml\",\"npm-shrinkwrap.json\",\"requirements.txt\",\"Pipfile\",\"Pipfile.lock\",\"pyproject.toml\",\"setup.py\",\"setup.cfg\",\"Gemfile\",\"Gemfile.lock\",\"uv.lock\",\"CODEOWNERS\",\"AGENTS.md\",\"CLAUDE.md\",\"GEMINI.md\"],\"protected_path_prefixes\":[\".github/\",\".agents/\"],\"target\":\"1\",\"target-repo\":\"github/gh-aw-side-repo\"},\"report_incomplete\":{}}"
GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
with:
diff --git a/.github/workflows/smoke-update-cross-repo-pr.md b/.github/workflows/smoke-update-cross-repo-pr.md
index be40456a6a..1eea5a5482 100644
--- a/.github/workflows/smoke-update-cross-repo-pr.md
+++ b/.github/workflows/smoke-update-cross-repo-pr.md
@@ -1,6 +1,6 @@
---
name: Smoke Update Cross-Repo PR
-description: Smoke test validating cross-repo pull request updates in githubnext/gh-aw-side-repo by adding lines from Homer's Odyssey to the README
+description: Smoke test validating cross-repo pull request updates in github/gh-aw-side-repo by adding lines from Homer's Odyssey to the README
on:
workflow_dispatch:
@@ -20,7 +20,7 @@ network:
- github
checkout:
- - repository: githubnext/gh-aw-side-repo
+ - repository: github/gh-aw-side-repo
github-token: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
fetch: ["main", "refs/pulls/open/*"] # fetch all open PR refs after checkout
fetch-depth: 0 # fetch full history to ensure we can see all commits and PR details
@@ -44,13 +44,13 @@ safe-outputs:
hide-older-comments: true
max: 2
push-to-pull-request-branch:
- target-repo: "githubnext/gh-aw-side-repo"
+ target-repo: "github/gh-aw-side-repo"
github-token: ${{ secrets.GH_AW_SIDE_REPO_PAT }}
if-no-changes: "error"
target: "1" # PR #1
messages:
footer: "> 📜 *Cross-repo PR update smoke test by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}"
- run-started: "📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to githubnext/gh-aw-side-repo PR #1..."
+ run-started: "📜 [{workflow_name}]({run_url}) is adding the next Odyssey line to github/gh-aw-side-repo PR #1..."
run-success: "✅ [{workflow_name}]({run_url}) successfully updated the cross-repo PR with a new Odyssey line!"
run-failure: "❌ [{workflow_name}]({run_url}) failed to update the cross-repo PR: {status}"
diff --git a/actions/setup/js/check_runs_helpers.cjs b/actions/setup/js/check_runs_helpers.cjs
new file mode 100644
index 0000000000..b121691de0
--- /dev/null
+++ b/actions/setup/js/check_runs_helpers.cjs
@@ -0,0 +1,83 @@
+// @ts-check
+
+/**
+ * Returns true for check runs that represent deployment environment gates rather
+ * than CI checks.
+ * @param {any} run
+ * @returns {boolean}
+ */
+function isDeploymentCheck(run) {
+ return run?.app?.slug === "github-deployments";
+}
+
+/**
+ * Select latest check run per name and apply standard filtering.
+ * @param {any[]} checkRuns
+ * @param {{
+ * includeList?: string[]|null,
+ * excludeList?: string[]|null,
+ * excludedCheckRunIds?: Set,
+ * }} [options]
+ * @returns {{relevant: any[], deploymentCheckCount: number, currentRunFilterCount: number}}
+ */
+function selectLatestRelevantChecks(checkRuns, options = {}) {
+ const includeList = options.includeList || null;
+ const excludeList = options.excludeList || null;
+ const excludedCheckRunIds = options.excludedCheckRunIds || new Set();
+
+ /** @type {Map} */
+ const latestByName = new Map();
+ let deploymentCheckCount = 0;
+ let currentRunFilterCount = 0;
+
+ for (const run of checkRuns) {
+ if (isDeploymentCheck(run)) {
+ deploymentCheckCount++;
+ continue;
+ }
+ if (excludedCheckRunIds.has(run.id)) {
+ currentRunFilterCount++;
+ continue;
+ }
+ const existing = latestByName.get(run.name);
+ if (!existing || new Date(run.started_at ?? 0) > new Date(existing.started_at ?? 0)) {
+ latestByName.set(run.name, run);
+ }
+ }
+
+ const relevant = [];
+ for (const [name, run] of latestByName) {
+ if (includeList && includeList.length > 0 && !includeList.includes(name)) {
+ continue;
+ }
+ if (excludeList && excludeList.length > 0 && excludeList.includes(name)) {
+ continue;
+ }
+ relevant.push(run);
+ }
+
+ return { relevant, deploymentCheckCount, currentRunFilterCount };
+}
+
+/**
+ * Computes failing checks with shared semantics.
+ * @param {any[]} checkRuns
+ * @param {{allowPending?: boolean}} [options]
+ * @returns {any[]}
+ */
+function getFailingChecks(checkRuns, options = {}) {
+ const allowPending = options.allowPending === true;
+ const failedConclusions = new Set(["failure", "cancelled", "timed_out"]);
+ return checkRuns.filter(run => {
+ if (run.status === "completed") {
+ return run.conclusion != null && failedConclusions.has(run.conclusion);
+ }
+ return !allowPending;
+ });
+}
+
+module.exports = {
+ isDeploymentCheck,
+ selectLatestRelevantChecks,
+ getFailingChecks,
+};
diff --git a/actions/setup/js/check_skip_if_check_failing.cjs b/actions/setup/js/check_skip_if_check_failing.cjs
index 97d0ff5475..9051488d42 100644
--- a/actions/setup/js/check_skip_if_check_failing.cjs
+++ b/actions/setup/js/check_skip_if_check_failing.cjs
@@ -5,6 +5,7 @@ const { getErrorMessage, isRateLimitError } = require("./error_helpers.cjs");
const { ERR_API } = require("./error_codes.cjs");
const { getBaseBranch } = require("./get_base_branch.cjs");
const { writeDenialSummary } = require("./pre_activation_summary.cjs");
+const { selectLatestRelevantChecks, getFailingChecks } = require("./check_runs_helpers.cjs");
/**
* Determines the ref to check for CI status.
@@ -52,22 +53,6 @@ function parseListEnv(envValue) {
}
}
-/**
- * Returns true for check runs that represent deployment environment gates rather
- * than CI checks. These should be ignored by default so that a pending deployment
- * approval does not falsely block the agentic workflow.
- *
- * Deployment gate checks are identified by the GitHub App that created them:
- * - "github-deployments" – the built-in GitHub Deployments service
- *
- * @param {object} run - A check run object from the GitHub API
- * @returns {boolean}
- */
-function isDeploymentCheck(run) {
- const slug = run.app?.slug;
- return slug === "github-deployments";
-}
-
/**
* Fetches the check run IDs for all jobs in the current workflow run.
* These IDs are used to filter out the current workflow's own checks
@@ -149,25 +134,11 @@ async function main() {
// Filter to the latest run per check name (GitHub may have multiple runs per name).
// Deployment gate checks and the current run's own checks are silently skipped here
// so they never influence the gate.
- /** @type {Map} */
- const latestByName = new Map();
- let deploymentCheckCount = 0;
- let currentRunFilterCount = 0;
- for (const run of checkRuns) {
- if (isDeploymentCheck(run)) {
- deploymentCheckCount++;
- continue;
- }
- if (currentRunCheckRunIds.has(run.id)) {
- currentRunFilterCount++;
- continue;
- }
- const name = run.name;
- const existing = latestByName.get(name);
- if (!existing || new Date(run.started_at ?? 0) > new Date(existing.started_at ?? 0)) {
- latestByName.set(name, run);
- }
- }
+ const { relevant, deploymentCheckCount, currentRunFilterCount } = selectLatestRelevantChecks(checkRuns, {
+ includeList,
+ excludeList,
+ excludedCheckRunIds: currentRunCheckRunIds,
+ });
if (deploymentCheckCount > 0) {
core.info(`Skipping ${deploymentCheckCount} deployment gate check(s) (app: github-deployments)`);
@@ -176,32 +147,9 @@ async function main() {
core.info(`Skipping ${currentRunFilterCount} check run(s) from the current workflow run`);
}
- // Apply user-defined include/exclude filtering
- const relevant = [];
- for (const [name, run] of latestByName) {
- if (includeList && includeList.length > 0 && !includeList.includes(name)) {
- continue;
- }
- if (excludeList && excludeList.length > 0 && excludeList.includes(name)) {
- continue;
- }
- relevant.push(run);
- }
-
core.info(`Evaluating ${relevant.length} check run(s) after filtering`);
- // A check is "failing" if it either:
- // 1. Completed with a non-success conclusion (failure, cancelled, timed_out), OR
- // 2. Is still pending/in-progress — unless allow-pending is set
- const failedConclusions = new Set(["failure", "cancelled", "timed_out"]);
-
- const failingChecks = relevant.filter(run => {
- if (run.status === "completed") {
- return run.conclusion != null && failedConclusions.has(run.conclusion);
- }
- // Pending/queued/in_progress: treat as failing unless allow-pending is true
- return !allowPending;
- });
+ const failingChecks = getFailingChecks(relevant, { allowPending });
if (failingChecks.length > 0) {
const names = failingChecks.map(r => (r.status === "completed" ? `${r.name} (${r.conclusion})` : `${r.name} (${r.status})`)).join(", ");
diff --git a/actions/setup/js/merge_pull_request.cjs b/actions/setup/js/merge_pull_request.cjs
new file mode 100644
index 0000000000..f936b77d65
--- /dev/null
+++ b/actions/setup/js/merge_pull_request.cjs
@@ -0,0 +1,655 @@
+// @ts-check
+///
+
+const { createAuthenticatedGitHubClient } = require("./handler_auth.cjs");
+const { getErrorMessage } = require("./error_helpers.cjs");
+const { resolveTargetRepoConfig, resolveAndValidateRepo } = require("./repo_helpers.cjs");
+const { globPatternToRegex } = require("./glob_pattern_helpers.cjs");
+const { isStagedMode } = require("./safe_output_helpers.cjs");
+const { selectLatestRelevantChecks } = require("./check_runs_helpers.cjs");
+const { withRetry, isTransientError } = require("./error_recovery.cjs");
+const { normalizeBranchName } = require("./normalize_branch_name.cjs");
+const { resolveNumberFromTemporaryId } = require("./temporary_id.cjs");
+const MERGEABILITY_PENDING_ERROR = "pull request mergeability is still being computed";
+
+/**
+ * @typedef {import('./types/handler-factory').HandlerFactoryFunction} HandlerFactoryFunction
+ */
+
+/**
+ * @param {string[]} patterns
+ * @returns {RegExp[]}
+ */
+function compilePathGlobs(patterns) {
+ return patterns.map(p => globPatternToRegex(p, { pathMode: true, caseSensitive: true }));
+}
+
+/**
+ * @param {string[]} changedFiles
+ * @param {RegExp[]} patterns
+ * @returns {string[]}
+ */
+function findNonMatchingFiles(changedFiles, patterns) {
+ return changedFiles.filter(file => !patterns.some(re => re.test(file)));
+}
+
+/**
+ * @param {string[]} changedFiles
+ * @param {RegExp[]} patterns
+ * @returns {string[]}
+ */
+function findMatchingFiles(changedFiles, patterns) {
+ return changedFiles.filter(file => patterns.some(re => re.test(file)));
+}
+
+/**
+ * @param {any} githubClient
+ * @param {string} owner
+ * @param {string} repo
+ * @param {number} pullNumber
+ * @returns {Promise}
+ */
+async function getPullRequestWithMergeability(githubClient, owner, repo, pullNumber) {
+ core.info(`Fetching PR #${pullNumber} in ${owner}/${repo} with mergeability retry`);
+ return withRetry(
+ async () => {
+ const { data } = await githubClient.rest.pulls.get({
+ owner,
+ repo,
+ pull_number: pullNumber,
+ });
+ if (data && data.mergeable === null) {
+ throw new Error(MERGEABILITY_PENDING_ERROR);
+ }
+ return data;
+ },
+ {
+ maxRetries: 3,
+ initialDelayMs: 1000,
+ shouldRetry: error => {
+ const msg = getErrorMessage(error).toLowerCase();
+ return isTransientError(error) || msg === MERGEABILITY_PENDING_ERROR;
+ },
+ },
+ `fetch pull request #${pullNumber}`
+ ).catch(async error => {
+ try {
+ const fallback = await githubClient.rest.pulls.get({
+ owner,
+ repo,
+ pull_number: pullNumber,
+ });
+ if (fallback?.data) {
+ core.warning(`Mergeability remained unknown after retries for PR #${pullNumber}, continuing with latest state`);
+ return fallback.data;
+ }
+ } catch (fallbackError) {
+ throw new Error(`Failed to fetch pull request #${pullNumber} after retry and fallback attempts. Retry error: ${getErrorMessage(error)}. Fallback error: ${getErrorMessage(fallbackError)}`);
+ }
+ throw error;
+ });
+}
+
+/**
+ * @param {any} githubClient
+ * @param {string} owner
+ * @param {string} repo
+ * @param {number} pullNumber
+ * @returns {Promise<{reviewDecision: string|null, unresolvedThreadCount: number}>}
+ */
+async function getReviewSummary(githubClient, owner, repo, pullNumber) {
+ core.info(`Collecting review summary for PR #${pullNumber}`);
+ let unresolvedThreadCount = 0;
+ let reviewDecision = null;
+ let cursor = null;
+ let hasNextPage = true;
+ let page = 0;
+ while (hasNextPage) {
+ page++;
+ const result = await withRetry(
+ async () =>
+ githubClient.graphql(
+ `
+ query($owner: String!, $repo: String!, $number: Int!, $after: String) {
+ repository(owner: $owner, name: $repo) {
+ pullRequest(number: $number) {
+ reviewDecision
+ reviewThreads(first: 100, after: $after) {
+ pageInfo { hasNextPage endCursor }
+ nodes { isResolved }
+ }
+ }
+ }
+ }
+ `,
+ { owner, repo, number: pullNumber, after: cursor }
+ ),
+ {
+ maxRetries: 3,
+ initialDelayMs: 1000,
+ shouldRetry: error => isTransientError(error),
+ },
+ `fetch review summary GraphQL page ${page} for PR #${pullNumber}`
+ );
+
+ const pr = result?.repository?.pullRequest;
+ if (!pr) {
+ core.warning(`No pull request data returned while reading review summary for PR #${pullNumber}`);
+ break;
+ }
+ reviewDecision = pr.reviewDecision || null;
+ const threads = pr.reviewThreads?.nodes || [];
+ core.info(`Review page ${page}: ${threads.length} thread(s)`);
+ unresolvedThreadCount += threads.filter(t => !t.isResolved).length;
+ hasNextPage = pr.reviewThreads?.pageInfo?.hasNextPage === true;
+ cursor = pr.reviewThreads?.pageInfo?.endCursor || null;
+ }
+
+ core.info(`Review summary: decision=${reviewDecision || "null"}, unresolvedThreads=${unresolvedThreadCount}`);
+ return { reviewDecision, unresolvedThreadCount };
+}
+
+/**
+ * @param {any} githubClient
+ * @param {string} owner
+ * @param {string} repo
+ * @param {string} baseBranch
+ * @returns {Promise<{isProtected: boolean, isDefault: boolean, defaultBranch: string|null, requiredChecks: string[]}>}
+ */
+async function getBranchPolicy(githubClient, owner, repo, baseBranch) {
+ const baseBranchValidation = sanitizeBranchName(baseBranch, "target base");
+ if (!baseBranchValidation.valid || !baseBranchValidation.value) {
+ throw new Error(`Invalid target base branch for policy evaluation: ${baseBranchValidation.error} (original: ${JSON.stringify(baseBranch)}, normalized: ${JSON.stringify(baseBranchValidation.normalized || "")})`);
+ }
+ const sanitizedBaseBranch = baseBranchValidation.value;
+
+ core.info(`Checking target branch policy for ${owner}/${repo}@${sanitizedBaseBranch}`);
+ const [{ data: branch }, { data: repository }] = await Promise.all([
+ githubClient.rest.repos.getBranch({
+ owner,
+ repo,
+ branch: sanitizedBaseBranch,
+ }),
+ githubClient.rest.repos.get({
+ owner,
+ repo,
+ }),
+ ]);
+
+ const defaultBranchRaw = repository.default_branch;
+ const defaultBranchValidation = sanitizeBranchName(defaultBranchRaw, "default");
+ const defaultBranch = defaultBranchValidation.valid ? defaultBranchValidation.value : defaultBranchRaw;
+ const isDefault = defaultBranch !== null && sanitizedBaseBranch === defaultBranch;
+ if (isDefault) {
+ core.info(`Target branch ${sanitizedBaseBranch} is the repository default branch`);
+ }
+
+ const isProtected = branch?.protected === true;
+ if (isProtected) {
+ core.info(`Target branch ${sanitizedBaseBranch} is protected`);
+ return { isProtected: true, isDefault, defaultBranch, requiredChecks: [] };
+ }
+
+ try {
+ const { data } = await githubClient.rest.repos.getBranchProtection({
+ owner,
+ repo,
+ branch: sanitizedBaseBranch,
+ });
+ const contexts = Array.isArray(data?.required_status_checks?.contexts) ? data.required_status_checks.contexts : [];
+ const checks = Array.isArray(data?.required_status_checks?.checks) ? data.required_status_checks.checks.map(c => c?.context).filter(Boolean) : [];
+ core.info(`Branch protection checks for ${sanitizedBaseBranch}: ${[...new Set([...contexts, ...checks])].join(", ") || "(none)"}`);
+ return { isProtected: false, isDefault, defaultBranch, requiredChecks: [...new Set([...contexts, ...checks])] };
+ } catch (error) {
+ if (error && typeof error === "object" && "status" in error && error.status === 404) {
+ core.info(`No branch protection rules found for ${sanitizedBaseBranch}`);
+ return { isProtected: false, isDefault, defaultBranch, requiredChecks: [] };
+ }
+ core.error(`Failed to read branch protection for ${sanitizedBaseBranch}: ${getErrorMessage(error)}`);
+ throw error;
+ }
+}
+
+/**
+ * @param {any} githubClient
+ * @param {string} owner
+ * @param {string} repo
+ * @param {string} headSha
+ * @param {string[]} requiredChecks
+ * @returns {Promise<{missing: string[], failing: Array<{name: string, status: string, conclusion: string|null}>}>}
+ */
+async function evaluateRequiredChecks(githubClient, owner, repo, headSha, requiredChecks) {
+ core.info(`Evaluating required checks on ${headSha}: ${requiredChecks.join(", ") || "(none)"}`);
+ if (requiredChecks.length === 0) {
+ return { missing: [], failing: [] };
+ }
+
+ const checkRuns = await githubClient.paginate(githubClient.rest.checks.listForRef, {
+ owner,
+ repo,
+ ref: headSha,
+ per_page: 100,
+ });
+
+ const { relevant } = selectLatestRelevantChecks(checkRuns, { includeList: requiredChecks });
+ core.info(`Fetched ${checkRuns.length} check run(s), ${relevant.length} relevant latest check run(s)`);
+ const byName = new Map(relevant.map(run => [run.name, run]));
+ const missing = [];
+ const failing = [];
+
+ for (const checkName of requiredChecks) {
+ const run = byName.get(checkName);
+ if (!run) {
+ core.warning(`Required check missing: ${checkName}`);
+ missing.push(checkName);
+ continue;
+ }
+ if (run.status !== "completed" || run.conclusion !== "success") {
+ core.warning(`Required check not passing: ${checkName} status=${run.status} conclusion=${run.conclusion || "null"}`);
+ failing.push({ name: checkName, status: run.status, conclusion: run.conclusion || null });
+ }
+ }
+
+ return { missing, failing };
+}
+
+/**
+ * @param {any} githubClient
+ * @param {string} owner
+ * @param {string} repo
+ * @param {number} pullNumber
+ * @returns {Promise}
+ */
+async function listChangedFiles(githubClient, owner, repo, pullNumber) {
+ core.info(`Listing changed files for PR #${pullNumber}`);
+ const files = await githubClient.paginate(githubClient.rest.pulls.listFiles, {
+ owner,
+ repo,
+ pull_number: pullNumber,
+ per_page: 100,
+ });
+ const changed = files.map(f => f.filename).filter(Boolean);
+ core.info(`PR #${pullNumber} changed ${changed.length} file(s)`);
+ return changed;
+}
+
+/**
+ * @returns {number|undefined}
+ */
+function resolveContextPullNumber() {
+ if (context.payload?.pull_request?.number) {
+ return context.payload.pull_request.number;
+ }
+ if (context.payload?.issue?.pull_request && context.payload?.issue?.number) {
+ return context.payload.issue.number;
+ }
+ return undefined;
+}
+
+/**
+ * @param {string|undefined|null} branchName
+ * @param {string} branchRole
+ * @returns {{valid: boolean, value?: string, error?: string, normalized?: string}}
+ */
+function sanitizeBranchName(branchName, branchRole) {
+ if (typeof branchName !== "string" || branchName.trim() === "") {
+ return { valid: false, error: `${branchRole} branch is missing` };
+ }
+
+ const normalized = normalizeBranchName(branchName);
+ if (typeof normalized !== "string" || normalized.trim() === "") {
+ return {
+ valid: false,
+ error: `${branchRole} branch is invalid after sanitization`,
+ normalized: String(normalized || ""),
+ };
+ }
+
+ if (normalized !== branchName) {
+ return {
+ valid: false,
+ error: `${branchRole} branch contains invalid characters`,
+ normalized,
+ };
+ }
+
+ return { valid: true, value: normalized };
+}
+
+/**
+ * @param {string[]} labels
+ * @param {string[]} allowedLabels
+ * @returns {string[]}
+ */
+function findAllowedLabelMatches(labels, allowedLabels) {
+ return labels.filter(label => allowedLabels.includes(label));
+}
+
+/**
+ * @param {any} message
+ * Message object containing pull_request_number (optional)
+ * @param {any} [resolvedTemporaryIds]
+ * Optional map of resolved temporary IDs from prior safe-output operations
+ * @returns {{success: true, pullNumber: number, fromTemporaryId: boolean} | {success: false, error: string}}
+ */
+function resolvePullRequestNumber(message, resolvedTemporaryIds) {
+ const pullNumberRaw = message?.pull_request_number;
+ if (pullNumberRaw !== undefined && pullNumberRaw !== null) {
+ const resolution = resolveNumberFromTemporaryId(pullNumberRaw, resolvedTemporaryIds);
+ if (resolution.errorMessage) {
+ return { success: false, error: resolution.errorMessage };
+ }
+ if (resolution.resolved === null) {
+ return { success: false, error: "Failed to resolve pull_request_number" };
+ }
+ return { success: true, pullNumber: resolution.resolved, fromTemporaryId: resolution.wasTemporaryId };
+ }
+
+ const contextPullNumber = resolveContextPullNumber();
+ if (!contextPullNumber) {
+ return { success: false, error: "pull_request_number is required for merge_pull_request" };
+ }
+ return { success: true, pullNumber: contextPullNumber, fromTemporaryId: false };
+}
+
+/**
+ * Handler factory for merge_pull_request.
+ * @type {HandlerFactoryFunction}
+ */
+async function main(config = {}) {
+ const githubClient = await createAuthenticatedGitHubClient(config);
+ const isStaged = isStagedMode(config);
+ const { defaultTargetRepo, allowedRepos } = resolveTargetRepoConfig(config);
+ const maxCount = Number(config.max || 1);
+ const requiredLabels = Array.isArray(config.required_labels) ? config.required_labels : [];
+ const allowedLabels = Array.isArray(config.allowed_labels) ? config.allowed_labels : [];
+ const allowedBranches = Array.isArray(config.allowed_branches) ? config.allowed_branches : [];
+ const allowedFiles = Array.isArray(config.allowed_files) ? config.allowed_files : [];
+ const protectedFiles = Array.isArray(config.protected_files) ? config.protected_files : [];
+
+ const allowedBranchPatterns = compilePathGlobs(allowedBranches);
+ const allowedFilePatterns = compilePathGlobs(allowedFiles);
+ const protectedFilePatterns = compilePathGlobs(protectedFiles);
+ core.info(
+ `merge_pull_request handler configured: max=${maxCount}, requiredLabels=${requiredLabels.length}, allowedLabels=${allowedLabels.length}, allowedBranches=${allowedBranches.length}, allowedFiles=${allowedFiles.length}, protectedFiles=${protectedFiles.length}, staged=${isStaged}`
+ );
+
+ let processedCount = 0;
+
+ return async function handleMergePullRequest(message, resolvedTemporaryIds) {
+ core.info(`Processing merge_pull_request message: ${JSON.stringify({ pull_request_number: message?.pull_request_number, repo: message?.repo, merge_method: message?.merge_method })}`);
+ if (processedCount >= maxCount) {
+ core.warning(`Skipping merge_pull_request: max count of ${maxCount} reached`);
+ return { success: false, error: `Max count of ${maxCount} reached` };
+ }
+ processedCount++;
+
+ const repoResult = resolveAndValidateRepo(message, defaultTargetRepo, allowedRepos, "merge pull request");
+ if (!repoResult.success) {
+ core.error(`Repository validation failed: ${repoResult.error}`);
+ return { success: false, error: repoResult.error };
+ }
+ const { owner, repo } = repoResult.repoParts;
+ core.info(`Resolved target repository: ${owner}/${repo}`);
+
+ const pullNumberResolution = resolvePullRequestNumber(message, resolvedTemporaryIds);
+ if (!pullNumberResolution.success) {
+ core.error(pullNumberResolution.error);
+ return { success: false, error: pullNumberResolution.error };
+ }
+ const pullNumber = pullNumberResolution.pullNumber;
+ if (pullNumberResolution.fromTemporaryId) {
+ core.info(`Resolved temporary ID '${String(message?.pull_request_number)}' to pull request #${pullNumber}`);
+ }
+ core.info(`Target PR number: ${pullNumber}`);
+
+ /** @type {Array<{code: string, message: string, details?: any}>} */
+ const failureReasons = [];
+
+ try {
+ const pr = await getPullRequestWithMergeability(githubClient, owner, repo, pullNumber);
+ if (!pr) {
+ core.error(`Pull request #${pullNumber} not found`);
+ return { success: false, error: `Pull request #${pullNumber} not found` };
+ }
+ const sourceBranchValidation = sanitizeBranchName(pr.head?.ref, "source");
+ if (!sourceBranchValidation.valid) {
+ failureReasons.push({
+ code: "source_branch_invalid",
+ message: sourceBranchValidation.error || "source branch is invalid",
+ details: { source_branch: pr.head?.ref, normalized: sourceBranchValidation.normalized || null },
+ });
+ }
+ const sourceBranch = sourceBranchValidation.valid ? sourceBranchValidation.value : null;
+
+ const baseBranchValidation = sanitizeBranchName(pr.base?.ref, "target base");
+ if (!baseBranchValidation.valid) {
+ failureReasons.push({
+ code: "target_base_branch_invalid",
+ message: baseBranchValidation.error || "target base branch is invalid",
+ details: { base_branch: pr.base?.ref, normalized: baseBranchValidation.normalized || null },
+ });
+ }
+ const baseBranch = baseBranchValidation.valid ? baseBranchValidation.value : null;
+
+ core.info(
+ `PR state: merged=${pr.merged}, draft=${pr.draft}, mergeable=${pr.mergeable}, mergeable_state=${pr.mergeable_state || "unknown"}, head=${JSON.stringify(sourceBranch || pr.head?.ref || null)}, base=${JSON.stringify(baseBranch || pr.base?.ref || null)}`
+ );
+ if (pr.merged) {
+ core.info(`PR #${pullNumber} is already merged, returning idempotent success`);
+ return {
+ success: true,
+ merged: true,
+ alreadyMerged: true,
+ pull_request_number: pr.number,
+ pull_request_url: pr.html_url,
+ checks_evaluated: [],
+ };
+ }
+
+ if (pr.draft) {
+ failureReasons.push({ code: "pr_is_draft", message: "Pull request is still in draft state" });
+ }
+ if (pr.mergeable === false || pr.mergeable_state === "dirty") {
+ failureReasons.push({ code: "merge_conflicts", message: "Pull request has unresolved merge conflicts" });
+ }
+ if (pr.mergeable !== true) {
+ failureReasons.push({ code: "not_mergeable", message: `Pull request is not mergeable (mergeable=${String(pr.mergeable)}, state=${pr.mergeable_state || "unknown"})` });
+ }
+
+ const labels = (pr.labels || []).map(l => l.name).filter(Boolean);
+ core.info(`PR labels (${labels.length}): ${labels.join(", ") || "(none)"}`);
+ const missingRequiredLabels = requiredLabels.filter(label => !labels.includes(label));
+ if (missingRequiredLabels.length > 0) {
+ failureReasons.push({
+ code: "missing_required_labels",
+ message: "Required labels are missing",
+ details: { missing: missingRequiredLabels, present: labels },
+ });
+ }
+
+ if (allowedLabels.length > 0) {
+ const matchedLabels = findAllowedLabelMatches(labels, allowedLabels);
+ core.info(`Allowed label match count: ${matchedLabels.length}`);
+ if (matchedLabels.length === 0) {
+ failureReasons.push({
+ code: "allowed_labels_no_match",
+ message: "No pull request label matches allowed-labels",
+ details: { present: labels, allowed_labels: allowedLabels },
+ });
+ }
+ }
+
+ if (allowedBranchPatterns.length > 0 && sourceBranch && !allowedBranchPatterns.some(re => re.test(sourceBranch))) {
+ failureReasons.push({
+ code: "branch_not_allowed",
+ message: `Source branch "${sourceBranch}" does not match allowed-branches`,
+ details: { source_branch: sourceBranch, patterns: allowedBranches },
+ });
+ }
+ if (allowedBranchPatterns.length > 0) {
+ core.info(`Allowed branch patterns: ${allowedBranches.join(", ")}`);
+ }
+
+ /** @type {{isProtected: boolean, isDefault: boolean, defaultBranch: string|null, requiredChecks: string[]}} */
+ let branchPolicy = { isProtected: false, isDefault: false, defaultBranch: null, requiredChecks: [] };
+ if (baseBranch) {
+ branchPolicy = await getBranchPolicy(githubClient, owner, repo, baseBranch);
+ if (branchPolicy.isProtected) {
+ failureReasons.push({
+ code: "target_branch_protected",
+ message: `Target branch "${baseBranch}" is protected`,
+ });
+ }
+ if (branchPolicy.isDefault) {
+ failureReasons.push({
+ code: "target_branch_default",
+ message: `Target branch "${baseBranch}" is the repository default branch`,
+ details: { default_branch: branchPolicy.defaultBranch },
+ });
+ }
+ }
+
+ const checkSummary = await evaluateRequiredChecks(githubClient, owner, repo, pr.head.sha, branchPolicy.requiredChecks);
+ core.info(`Required check summary: missing=${checkSummary.missing.length}, failing=${checkSummary.failing.length}`);
+ if (checkSummary.missing.length > 0) {
+ failureReasons.push({
+ code: "required_checks_missing",
+ message: "Required status checks are not completed",
+ details: { missing: checkSummary.missing },
+ });
+ }
+ if (checkSummary.failing.length > 0) {
+ failureReasons.push({
+ code: "required_checks_failing",
+ message: "Required status checks are not passing",
+ details: { failing: checkSummary.failing },
+ });
+ }
+
+ if ((pr.requested_reviewers || []).length > 0 || (pr.requested_teams || []).length > 0) {
+ failureReasons.push({
+ code: "pending_reviewers",
+ message: "All assigned reviewers have not approved yet",
+ details: {
+ requested_reviewers: (pr.requested_reviewers || []).map(r => r.login),
+ requested_teams: (pr.requested_teams || []).map(t => t.slug),
+ },
+ });
+ }
+
+ const reviewSummary = await getReviewSummary(githubClient, owner, repo, pullNumber);
+ if (reviewSummary.reviewDecision === "CHANGES_REQUESTED" || reviewSummary.reviewDecision === "REVIEW_REQUIRED") {
+ failureReasons.push({
+ code: "blocking_review_state",
+ message: `Blocking review state remains active (${reviewSummary.reviewDecision})`,
+ });
+ }
+ if (reviewSummary.unresolvedThreadCount > 0) {
+ failureReasons.push({
+ code: "unresolved_review_threads",
+ message: "Pull request has unresolved review threads",
+ details: { unresolved_count: reviewSummary.unresolvedThreadCount },
+ });
+ }
+
+ const changedFiles = await listChangedFiles(githubClient, owner, repo, pullNumber);
+ core.info(`Changed files sample: ${changedFiles.slice(0, 20).join(", ")}${changedFiles.length > 20 ? ", ..." : ""}`);
+
+ if (protectedFilePatterns.length > 0) {
+ const protectedMatches = findMatchingFiles(changedFiles, protectedFilePatterns);
+ core.info(`Protected file match count: ${protectedMatches.length}`);
+ if (protectedMatches.length > 0) {
+ failureReasons.push({
+ code: "protected_files_match",
+ message: "Protected files were changed",
+ details: { matched_files: protectedMatches, patterns: protectedFiles, protected_files_blocked: true },
+ });
+ }
+ }
+
+ if (allowedFilePatterns.length > 0) {
+ const disallowedFiles = findNonMatchingFiles(changedFiles, allowedFilePatterns);
+ core.info(`Allowed-file violations count: ${disallowedFiles.length}`);
+ if (disallowedFiles.length > 0) {
+ failureReasons.push({
+ code: "allowed_files_violation",
+ message: "Changed files outside allowed-files patterns",
+ details: { disallowed_files: disallowedFiles, patterns: allowedFiles },
+ });
+ }
+ }
+
+ if (failureReasons.length > 0) {
+ core.warning(`merge_pull_request blocked with ${failureReasons.length} gate failure(s): ${failureReasons.map(r => r.code).join(", ")}`);
+ return {
+ success: false,
+ error: "merge_pull_request gate checks failed",
+ failure_reasons: failureReasons,
+ checks_evaluated: branchPolicy.requiredChecks,
+ };
+ }
+
+ if (isStaged) {
+ core.info(`Staged mode: merge for PR #${pullNumber} not executed`);
+ return {
+ success: true,
+ staged: true,
+ merged: false,
+ pull_request_number: pr.number,
+ pull_request_url: pr.html_url,
+ checks_evaluated: branchPolicy.requiredChecks,
+ };
+ }
+
+ const mergeResponse = await githubClient.rest.pulls.merge({
+ owner,
+ repo,
+ pull_number: pullNumber,
+ merge_method: message.merge_method || "merge",
+ commit_title: message.commit_title,
+ commit_message: message.commit_message,
+ });
+
+ if (mergeResponse.data?.merged !== true) {
+ core.error(`Merge API returned merged=false for PR #${pullNumber}: ${mergeResponse.data?.message || "no message"}`);
+ return {
+ success: false,
+ error: mergeResponse.data?.message || "Merge API returned merged=false",
+ failure_reasons: [{ code: "merge_not_completed", message: mergeResponse.data?.message || "Merge was not completed" }],
+ checks_evaluated: branchPolicy.requiredChecks,
+ };
+ }
+
+ return {
+ success: true,
+ merged: true,
+ pull_request_number: pr.number,
+ pull_request_url: pr.html_url,
+ sha: mergeResponse.data?.sha,
+ message: mergeResponse.data?.message,
+ checks_evaluated: branchPolicy.requiredChecks,
+ };
+ } catch (error) {
+ core.error(`merge_pull_request failed for PR #${pullNumber}: ${getErrorMessage(error)}`);
+ return {
+ success: false,
+ error: getErrorMessage(error),
+ failure_reasons: [{ code: "merge_operation_error", message: getErrorMessage(error) }],
+ };
+ }
+ };
+}
+
+module.exports = {
+ main,
+ __testables: {
+ compilePathGlobs,
+ listChangedFiles,
+ resolveContextPullNumber,
+ sanitizeBranchName,
+ getBranchPolicy,
+ findAllowedLabelMatches,
+ resolvePullRequestNumber,
+ },
+};
diff --git a/actions/setup/js/merge_pull_request.test.cjs b/actions/setup/js/merge_pull_request.test.cjs
new file mode 100644
index 0000000000..611f742da8
--- /dev/null
+++ b/actions/setup/js/merge_pull_request.test.cjs
@@ -0,0 +1,127 @@
+import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
+
+describe("merge_pull_request branch validation", () => {
+ beforeEach(() => {
+ global.core = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ };
+ });
+
+ afterEach(() => {
+ vi.resetModules();
+ vi.clearAllMocks();
+ delete global.core;
+ });
+
+ it("sanitizes and rejects invalid branch names", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+
+ const valid = __testables.sanitizeBranchName("feature/ok-branch", "source");
+ expect(valid).toEqual({ valid: true, value: "feature/ok-branch" });
+
+ const invalid = __testables.sanitizeBranchName("feature/unsafe\nbranch", "source");
+ expect(invalid.valid).toBe(false);
+ expect(invalid.error).toContain("contains invalid characters");
+ });
+
+ it("marks protected base branch as protected", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+
+ const githubClient = {
+ rest: {
+ repos: {
+ getBranch: vi.fn().mockResolvedValue({ data: { protected: true } }),
+ get: vi.fn().mockResolvedValue({ data: { default_branch: "main" } }),
+ },
+ },
+ };
+
+ const policy = await __testables.getBranchPolicy(githubClient, "github", "gh-aw", "release/1.0");
+ expect(policy.isProtected).toBe(true);
+ expect(policy.requiredChecks).toEqual([]);
+ });
+
+ it("detects repository default branch", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+
+ const githubClient = {
+ rest: {
+ repos: {
+ getBranch: vi.fn().mockResolvedValue({
+ data: {
+ protected: false,
+ },
+ }),
+ getBranchProtection: vi.fn().mockResolvedValue({
+ data: { required_status_checks: { contexts: ["ci/test"] } },
+ }),
+ get: vi.fn().mockResolvedValue({ data: { default_branch: "main" } }),
+ },
+ },
+ };
+
+ const policy = await __testables.getBranchPolicy(githubClient, "github", "gh-aw", "main");
+ expect(policy.isDefault).toBe(true);
+ expect(policy.requiredChecks).toEqual(["ci/test"]);
+ });
+
+ it("does not mark non-default branches as default", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+
+ const githubClient = {
+ rest: {
+ repos: {
+ getBranch: vi.fn().mockResolvedValue({ data: { protected: false } }),
+ getBranchProtection: vi.fn().mockRejectedValue({ status: 404 }),
+ get: vi.fn().mockResolvedValue({ data: { default_branch: "main" } }),
+ },
+ },
+ };
+
+ const policy = await __testables.getBranchPolicy(githubClient, "github", "gh-aw", "feature-branch");
+ expect(policy.isDefault).toBe(false);
+ });
+
+ it("rejects unsafe base branch names before branch policy lookup", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+
+ const githubClient = {
+ rest: {
+ repos: {
+ getBranch: vi.fn(),
+ get: vi.fn(),
+ },
+ },
+ };
+
+ await expect(__testables.getBranchPolicy(githubClient, "github", "gh-aw", "main;rm -rf /")).rejects.toThrow("Invalid target base branch for policy evaluation");
+ expect(githubClient.rest.repos.getBranch).not.toHaveBeenCalled();
+ });
+
+ it("matches allowed labels by exact value (no glob matching)", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+
+ expect(__testables.findAllowedLabelMatches(["release/v1", "automerge/pr-1"], ["release/*", "automerge/*"])).toEqual([]);
+ expect(__testables.findAllowedLabelMatches(["automerge", "release"], ["automerge", "deploy"])).toEqual(["automerge"]);
+ expect(__testables.findAllowedLabelMatches(["release/*", "automerge/*"], ["release/*", "automerge/*"])).toEqual(["release/*", "automerge/*"]);
+ expect(__testables.findAllowedLabelMatches([], ["automerge"])).toEqual([]);
+ expect(__testables.findAllowedLabelMatches(["AutoMerge"], ["automerge"])).toEqual([]);
+ });
+
+ it("resolves temporary ID for pull_request_number", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+ const result = __testables.resolvePullRequestNumber({ pull_request_number: "aw_pr1" }, { aw_pr1: { number: 42 } });
+ expect(result).toEqual({ success: true, pullNumber: 42, fromTemporaryId: true });
+ });
+
+ it("fails on unresolved temporary ID for pull_request_number", async () => {
+ const { __testables } = await import("./merge_pull_request.cjs");
+ const result = __testables.resolvePullRequestNumber({ pull_request_number: "aw_missing" }, {});
+ expect(result.success).toBe(false);
+ if (!result.success) {
+ expect(result.error).toContain("Unresolved temporary ID");
+ }
+ });
+});
diff --git a/actions/setup/js/safe_output_handler_manager.cjs b/actions/setup/js/safe_output_handler_manager.cjs
index b5559147f4..1aa5cd9c50 100644
--- a/actions/setup/js/safe_output_handler_manager.cjs
+++ b/actions/setup/js/safe_output_handler_manager.cjs
@@ -49,6 +49,7 @@ const HANDLER_MAP = {
create_pull_request: "./create_pull_request.cjs",
push_to_pull_request_branch: "./push_to_pull_request_branch.cjs",
update_pull_request: "./update_pull_request.cjs",
+ merge_pull_request: "./merge_pull_request.cjs",
close_pull_request: "./close_pull_request.cjs",
mark_pull_request_as_ready_for_review: "./mark_pull_request_as_ready_for_review.cjs",
hide_comment: "./hide_comment.cjs",
diff --git a/actions/setup/js/safe_outputs_tools.json b/actions/setup/js/safe_outputs_tools.json
index 51594c0c98..5511fe84fd 100644
--- a/actions/setup/js/safe_outputs_tools.json
+++ b/actions/setup/js/safe_outputs_tools.json
@@ -807,6 +807,45 @@
"additionalProperties": false
}
},
+ {
+ "name": "merge_pull_request",
+ "description": "Merge an existing pull request only after policy checks pass (status checks, approvals, resolved review threads, label/branch/file constraints, and mergeability gates). Use this when workflows require controlled merges instead of direct merge operations.",
+ "inputSchema": {
+ "type": "object",
+ "properties": {
+ "pull_request_number": {
+ "type": ["number", "string"],
+ "description": "Pull request number to merge. This is the numeric ID from the GitHub URL (e.g., 321 in github.com/owner/repo/pull/321). If omitted, uses the triggering pull request context."
+ },
+ "merge_method": {
+ "type": "string",
+ "enum": ["merge", "squash", "rebase"],
+ "description": "Merge strategy to use: 'merge', 'squash', or 'rebase'. Defaults to 'merge'."
+ },
+ "commit_title": {
+ "type": "string",
+ "description": "Optional custom commit title to use for the merge commit/squash commit."
+ },
+ "commit_message": {
+ "type": "string",
+ "description": "Optional custom commit message body for the merge."
+ },
+ "repo": {
+ "type": "string",
+ "description": "Target repository in 'owner/repo' format for cross-repository merge operations. If omitted, uses the configured default target repository."
+ },
+ "secrecy": {
+ "type": "string",
+ "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")."
+ },
+ "integrity": {
+ "type": "string",
+ "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")."
+ }
+ },
+ "additionalProperties": false
+ }
+ },
{
"name": "push_to_pull_request_branch",
"description": "Push committed changes to a pull request's branch. Use this to add follow-up commits to an existing PR, such as addressing review feedback or fixing issues. Changes must be committed locally before calling this tool.",
diff --git a/actions/setup/js/types/safe-outputs-config.d.ts b/actions/setup/js/types/safe-outputs-config.d.ts
index a48f34c97b..4ff3a87b84 100644
--- a/actions/setup/js/types/safe-outputs-config.d.ts
+++ b/actions/setup/js/types/safe-outputs-config.d.ts
@@ -206,6 +206,17 @@ interface PushToPullRequestBranchConfig extends SafeOutputConfig {
"if-no-changes"?: string;
}
+/**
+ * Configuration for merging pull requests with policy checks.
+ */
+interface MergePullRequestConfig extends SafeOutputConfig {
+ "required-labels"?: string[];
+ "allowed-labels"?: string[];
+ "allowed-branches"?: string[];
+ "allowed-files"?: string[];
+ "protected-files"?: string[];
+}
+
/**
* Configuration for uploading assets
*/
@@ -335,6 +346,7 @@ type SpecificSafeOutputConfig =
| AddReviewerConfig
| UpdateIssueConfig
| UpdatePullRequestConfig
+ | MergePullRequestConfig
| PushToPullRequestBranchConfig
| UploadAssetConfig
| AssignMilestoneConfig
@@ -371,6 +383,7 @@ export {
AddReviewerConfig,
UpdateIssueConfig,
UpdatePullRequestConfig,
+ MergePullRequestConfig,
PushToPullRequestBranchConfig,
UploadAssetConfig,
AssignMilestoneConfig,
diff --git a/docs/adr/27193-gated-merge-pull-request-safe-output.md b/docs/adr/27193-gated-merge-pull-request-safe-output.md
new file mode 100644
index 0000000000..747d8370a9
--- /dev/null
+++ b/docs/adr/27193-gated-merge-pull-request-safe-output.md
@@ -0,0 +1,99 @@
+# ADR-27193: Gated `merge-pull-request` Safe-Output with Policy-Driven Merge Enforcement
+
+**Date**: 2026-04-19
+**Status**: Draft
+**Deciders**: pelikhan, Copilot
+
+---
+
+## Part 1 — Narrative (Human-Friendly)
+
+### Context
+
+The gh-aw agentic workflow platform already supports a safe-output model in which agents can perform real side-effects (creating issues, posting comments, etc.) only through a compiler-validated, runtime-gated execution path. Until this change, there was no way for an agent to merge a pull request through the same safety layer. Merging is a high-consequence, irreversible action that must be gated on repository policy (CI status, review approval, label constraints, branch restrictions, and file-scope rules) before it can be executed safely. The existing safe-output infrastructure — a Go compiler that validates frontmatter configuration and a Node.js runtime handler layer — already provides the extension point needed to add merge support without inventing a separate execution path.
+
+### Decision
+
+We will add `merge-pull-request` as a new safe-output type that integrates with the existing compiler and runtime handler model rather than introducing a standalone merge action. The merge handler evaluates a sequenced set of policy gates — CI checks, review decision, unresolved threads, required/allowed labels, source-branch allow-list, default-branch protection, file-scope (allowed-files / protected-files), draft state, mergeability, and conflict state — and only proceeds when all gates pass. Configuration is expressed in workflow YAML frontmatter under `safe-outputs.merge-pull-request` using the same typed-config pattern already used by other safe-output types.
+
+### Alternatives Considered
+
+#### Alternative 1: Standalone Merge Action Outside the Safe-Output Model
+
+A dedicated GitHub Actions action or a separate Go command could have been written to perform gated merges independently of the safe-output layer. This would have been simpler to prototype but would have forked the security model: safe-outputs validate permissions at compile time, enforce `max` call budgets, and provide a single auditable execution path. A standalone action would duplicate that plumbing or omit it entirely, leaving merge calls outside the auditable boundary.
+
+#### Alternative 2: Thin Merge Wrapper With No Policy Gates
+
+The handler could have simply called the merge API and relied on external branch-protection rules configured in GitHub repository settings. This reduces code but shifts policy configuration to GitHub UI settings, making it invisible to code reviewers and hard to version-control. Policy gates expressed in workflow frontmatter are auditable, diffable, and scoped to the specific workflow rather than globally to the repo.
+
+#### Alternative 3: Separate Runtime Execution Path for High-Risk Operations
+
+Merge could have been treated as a distinct risk tier requiring its own runtime pipeline separate from lower-risk safe-output types. This would allow future independent evolution of merge-specific policy but introduces architectural fragmentation immediately without a concrete need. The existing model already supports configuration-driven per-type gates, so a separate pipeline is premature.
+
+### Consequences
+
+#### Positive
+- Merge operations are now auditable through the same compiler + runtime path as all other safe-output types.
+- Policy gates (labels, branches, CI, reviews, files) are version-controlled in workflow YAML frontmatter rather than scattered across GitHub repository UI settings.
+- Shared `check_runs_helpers.cjs` eliminates logic duplication between merge gating and the existing `check_skip_if_check_failing` safe-output.
+- `withRetry` wrapping of mergeability and GraphQL review-summary calls handles eventual-consistency delays from the GitHub API without requiring callers to manage retry logic.
+- Idempotency: if the PR is already merged the handler returns success, making the operation safe to re-run.
+
+#### Negative
+- The gate evaluation logic is complex (10+ sequential checks) and lives entirely in a single `.cjs` handler file; future contributors extending the gate list must understand the full sequencing.
+- Retry-backed mergeability polling adds latency on every merge attempt, even when mergeability is immediately available.
+- Adding a new safe-output type increases schema surface area in `main_workflow_schema.json` and both `safe_outputs_tools.json` catalogs, which must be kept in sync manually.
+
+#### Neutral
+- The `contents:write` + `pull-requests:write` permission pair must be present in any workflow that uses `merge-pull-request`; this is enforced at compile time but requires authors to explicitly declare permissions.
+- The W3C-style Safe Outputs specification (`docs/src/content/docs/reference/safe-outputs-specification.md`) was updated to include a formal `merge_pull_request` section, continuing the precedent of spec-first documentation for safe-output types.
+- A Go spec-enforcement test (`safe_outputs_specification_merge_pull_request_test.go`) was added to prevent spec drift; this test must be updated if the type name or required policy statements change.
+
+---
+
+## Part 2 — Normative Specification (RFC 2119)
+
+> The key words **MUST**, **MUST NOT**, **REQUIRED**, **SHALL**, **SHALL NOT**, **SHOULD**, **SHOULD NOT**, **RECOMMENDED**, **MAY**, and **OPTIONAL** in this section are to be interpreted as described in [RFC 2119](https://www.rfc-editor.org/rfc/rfc2119).
+
+### Safe-Output Model Integration
+
+1. The `merge-pull-request` capability **MUST** be implemented as a safe-output type within the existing compiler-plus-runtime-handler model and **MUST NOT** introduce a separate merge execution path outside that model.
+2. Configuration for `merge-pull-request` **MUST** be expressed in workflow YAML frontmatter under the `safe-outputs.merge-pull-request` key, using the same typed-config parsing pattern used by other safe-output types.
+3. The compiler **MUST** validate `merge-pull-request` configuration at compile time, including glob-pattern fields (`allowed-files`, `protected-files`, `allowed-branches`, `allowed-labels`).
+4. The runtime handler **MUST** be registered in the safe-output handler manager alongside all other safe-output handlers.
+
+### Policy Gate Evaluation
+
+1. Before invoking the merge API, the runtime handler **MUST** evaluate all of the following gates in order, and **MUST** abort with a descriptive error if any gate fails:
+ a. Draft state — the PR **MUST NOT** be a draft.
+ b. Mergeability — the PR **MUST** be in a mergeable state (not conflicting, not blocked).
+ c. CI checks — all required check runs **MUST** be passing; the handler **MUST** exclude deployment-environment check runs from this evaluation.
+ d. Review decision — the PR's review decision **MUST NOT** be `CHANGES_REQUESTED` or `REVIEW_REQUIRED` when those states are present.
+ e. Unresolved review threads — the PR **MUST** have zero unresolved review threads.
+ f. Required labels — every label in `required-labels` **MUST** be present on the PR.
+ g. Allowed labels — when `allowed-labels` is configured, at least one PR label **MUST** exactly match a configured label name.
+ h. Allowed branches — when `allowed-branches` is configured, the PR source branch **MUST** match at least one configured glob pattern.
+ i. Default-branch protection — the PR target branch **MUST NOT** be the repository default branch.
+ j. Allowed files — when `allowed-files` is configured, every changed file **MUST** match at least one configured glob pattern.
+ k. Protected files — when `protected-files` is configured, no changed file **MUST** match any configured glob pattern.
+2. Gate evaluation **MUST** be idempotent: if the PR is already merged the handler **MUST** return a success response without attempting another merge.
+3. Mergeability retrieval **MUST** use retry logic to handle GitHub API eventual-consistency delays; implementations **SHOULD** retry at least 3 times with exponential back-off before reporting failure.
+
+### Shared Infrastructure
+
+1. Check-run filtering and deduplication logic **MUST** be implemented in a shared helper module (`check_runs_helpers.cjs`) and **MUST NOT** be duplicated in individual safe-output handlers.
+2. GraphQL calls used to retrieve review summary data **SHOULD** be wrapped with retry logic to tolerate transient API failures.
+
+### Schema and Permissions
+
+1. The `merge-pull-request` type **MUST** be declared in `main_workflow_schema.json` and in all `safe_outputs_tools.json` catalogs used by compiler and runtime.
+2. Any workflow using `merge-pull-request` **MUST** declare `contents: write` and `pull-requests: write` permissions; the compiler **MUST** enforce this at compile time.
+3. The W3C-style Safe Outputs specification **MUST** include a formal section documenting the `merge_pull_request` type, its policy gates, and its required permissions.
+
+### Conformance
+
+An implementation is considered conformant with this ADR if it satisfies all **MUST** and **MUST NOT** requirements above. Failure to meet any **MUST** or **MUST NOT** requirement constitutes non-conformance.
+
+---
+
+*This is a DRAFT ADR generated by the [Design Decision Gate](https://github.com/github/gh-aw/actions/runs/24632957089) workflow. The PR author must review, complete, and finalize this document before the PR can merge.*
diff --git a/docs/src/content/docs/agent-factory-status.mdx b/docs/src/content/docs/agent-factory-status.mdx
index 00921406af..a7b60fd0fb 100644
--- a/docs/src/content/docs/agent-factory-status.mdx
+++ b/docs/src/content/docs/agent-factory-status.mdx
@@ -9,6 +9,7 @@ These are experimental agentic workflows used by the GitHub Next team to learn,
| Workflow | Agent | Status | Schedule | Command |
|:---------|:-----:|:------:|:--------:|:-------:|
+| [[aw] Failure Investigator (6h)](https://github.com/github/gh-aw/blob/main/.github/workflows/aw-failure-investigator.md) | claude | [![[aw] Failure Investigator (6h)](https://github.com/github/gh-aw/actions/workflows/aw-failure-investigator.lock.yml/badge.svg)](https://github.com/github/gh-aw/actions/workflows/aw-failure-investigator.lock.yml) | `every 6h` | - |
| [/cloclo](https://github.com/github/gh-aw/blob/main/.github/workflows/cloclo.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/cloclo.lock.yml) | - | `/cloclo` |
| [ACE Editor Session](https://github.com/github/gh-aw/blob/main/.github/workflows/ace-editor.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/ace-editor.lock.yml) | - | `/ace` |
| [Agent Container Smoke Test](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-test-tools.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/smoke-test-tools.lock.yml) | - | - |
@@ -47,10 +48,12 @@ These are experimental agentic workflows used by the GitHub Next team to learn,
| [Copilot Agent PR Analysis](https://github.com/github/gh-aw/blob/main/.github/workflows/copilot-agent-analysis.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/copilot-agent-analysis.lock.yml) | - | - |
| [Copilot Agent Prompt Clustering Analysis](https://github.com/github/gh-aw/blob/main/.github/workflows/prompt-clustering-analysis.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/prompt-clustering-analysis.lock.yml) | - | - |
| [Copilot CLI Deep Research Agent](https://github.com/github/gh-aw/blob/main/.github/workflows/copilot-cli-deep-research.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/copilot-cli-deep-research.lock.yml) | - | - |
+| [Copilot Opt](https://github.com/github/gh-aw/blob/main/.github/workflows/copilot-opt.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/copilot-opt.lock.yml) | `weekly on monday` | - |
| [Copilot PR Conversation NLP Analysis](https://github.com/github/gh-aw/blob/main/.github/workflows/copilot-pr-nlp-analysis.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/copilot-pr-nlp-analysis.lock.yml) | `daily around 10:00 on weekdays` | - |
| [Copilot PR Prompt Pattern Analysis](https://github.com/github/gh-aw/blob/main/.github/workflows/copilot-pr-prompt-analysis.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/copilot-pr-prompt-analysis.lock.yml) | - | - |
| [Copilot Session Insights](https://github.com/github/gh-aw/blob/main/.github/workflows/copilot-session-insights.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/copilot-session-insights.lock.yml) | - | - |
| [Copilot Token Usage Optimizer](https://github.com/github/gh-aw/blob/main/.github/workflows/copilot-token-optimizer.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/copilot-token-optimizer.lock.yml) | `daily around 14:00 on weekdays` | - |
+| [Daily AW Cross-Repo Compile Check](https://github.com/github/gh-aw/blob/main/.github/workflows/daily-aw-cross-repo-compile-check.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/daily-aw-cross-repo-compile-check.lock.yml) | - | - |
| [Daily Choice Type Test](https://github.com/github/gh-aw/blob/main/.github/workflows/daily-choice-test.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/daily-choice-test.lock.yml) | `daily around 12:00 on weekdays` | - |
| [Daily CLI Performance Agent](https://github.com/github/gh-aw/blob/main/.github/workflows/daily-cli-performance.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/daily-cli-performance.lock.yml) | - | - |
| [Daily CLI Tools Exploratory Tester](https://github.com/github/gh-aw/blob/main/.github/workflows/daily-cli-tools-tester.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/daily-cli-tools-tester.lock.yml) | - | - |
@@ -159,11 +162,13 @@ These are experimental agentic workflows used by the GitHub Next team to learn,
| [Smoke Agent: public/none](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-agent-public-none.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/smoke-agent-public-none.lock.yml) | - | - |
| [Smoke Agent: scoped/approved](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-agent-scoped-approved.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/smoke-agent-scoped-approved.lock.yml) | - | - |
| [Smoke Call Workflow](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-call-workflow.md) | codex | [](https://github.com/github/gh-aw/actions/workflows/smoke-call-workflow.lock.yml) | - | - |
+| [Smoke CI](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-ci.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/smoke-ci.lock.yml) | - | - |
| [Smoke Claude](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-claude.md) | claude | [](https://github.com/github/gh-aw/actions/workflows/smoke-claude.lock.yml) | - | - |
| [Smoke Codex](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-codex.md) | codex | [](https://github.com/github/gh-aw/actions/workflows/smoke-codex.lock.yml) | - | - |
| [Smoke Copilot](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-copilot.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/smoke-copilot.lock.yml) | - | - |
| [Smoke Copilot ARM64](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-copilot-arm.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/smoke-copilot-arm.lock.yml) | - | - |
| [Smoke Create Cross-Repo PR](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-create-cross-repo-pr.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/smoke-create-cross-repo-pr.lock.yml) | - | - |
+| [Smoke Crush](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-crush.md) | crush | [](https://github.com/github/gh-aw/actions/workflows/smoke-crush.lock.yml) | - | - |
| [Smoke Gemini](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-gemini.md) | gemini | [](https://github.com/github/gh-aw/actions/workflows/smoke-gemini.lock.yml) | - | - |
| [Smoke Multi PR](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-multi-pr.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/smoke-multi-pr.lock.yml) | - | - |
| [Smoke Project](https://github.com/github/gh-aw/blob/main/.github/workflows/smoke-project.md) | copilot | [](https://github.com/github/gh-aw/actions/workflows/smoke-project.lock.yml) | - | - |
diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md
index 70993cac0b..028af7046a 100644
--- a/docs/src/content/docs/reference/frontmatter-full.md
+++ b/docs/src/content/docs/reference/frontmatter-full.md
@@ -4571,6 +4571,66 @@ safe-outputs:
# body updates enabled)
update-pull-request: null
+ # Enable AI agents to merge pull requests under configured policy gates.
+ # (optional)
+ # This field supports multiple formats (oneOf):
+
+ # Option 1: Enable pull request merge with default policy configuration
+ merge-pull-request: null
+
+ # Option 2: Configuration for controlled pull request merges. The merge is blocked
+ # unless all configured gates pass.
+ merge-pull-request:
+ # Maximum number of pull request merges to perform per run (default: 1). Supports
+ # integer or GitHub Actions expression (e.g. '${{ inputs.max }}').
+ # (optional)
+ # This field supports multiple formats (oneOf):
+
+ # Option 1: integer
+ max: 1
+
+ # Option 2: GitHub Actions expression that resolves to an integer at runtime
+ max: "example-value"
+
+ # List of labels that must all be present on the pull request before merge is
+ # allowed.
+ # (optional)
+ required-labels: []
+ # Array of strings
+
+ # Exact pull request label names. At least one existing PR label must exactly
+ # match one of these values when configured.
+ # (optional)
+ allowed-labels: []
+ # Array of strings
+
+ # Glob patterns for allowed source branch names (pull request head ref).
+ # (optional)
+ allowed-branches: []
+ # Array of strings
+
+ # Exclusive allowlist of file path glob patterns. When configured, every changed
+ # file in the pull request must match at least one pattern.
+ # (optional)
+ allowed-files: []
+ # Array of strings
+
+ # Glob patterns for protected files. If any changed file matches, merge is
+ # blocked. This check takes precedence over allowed-files.
+ # (optional)
+ protected-files: []
+ # Array of strings
+
+ # GitHub token to use for this specific output type. Overrides global github-token
+ # if specified.
+ # (optional)
+ github-token: "${{ secrets.GITHUB_TOKEN }}"
+
+ # If true, evaluate merge gates and emit preview results without executing the
+ # merge API call.
+ # (optional)
+ staged: true
+
# Enable AI agents to push commits directly to pull request branches for automated
# fixes or improvements.
# (optional)
diff --git a/docs/src/content/docs/reference/safe-outputs-specification.md b/docs/src/content/docs/reference/safe-outputs-specification.md
index 9ce93b58a5..0b78cbb3da 100644
--- a/docs/src/content/docs/reference/safe-outputs-specification.md
+++ b/docs/src/content/docs/reference/safe-outputs-specification.md
@@ -7,9 +7,9 @@ sidebar:
# Safe Outputs MCP Gateway Specification
-**Version**: 1.16.0
+**Version**: 1.17.0
**Status**: Working Draft
-**Publication Date**: 2026-04-06
+**Publication Date**: 2026-04-19
**Editor**: GitHub Agentic Workflows Team
**This Version**: [safe-outputs-specification](/gh-aw/reference/safe-outputs-specification/)
**Latest Published Version**: This document
@@ -2760,6 +2760,86 @@ This section provides complete definitions for all remaining safe output types.
---
+#### Type: merge_pull_request
+
+**Purpose**: Merge pull requests only when configured policy gates pass.
+
+**Default Max**: 1
+**Cross-Repository Support**: Yes
+**Mandatory**: No
+
+**MCP Tool Schema**:
+
+```json
+{
+ "name": "merge_pull_request",
+ "description": "Merge an existing pull request only after policy checks pass (status checks, approvals, resolved review threads, label/branch/file constraints, and mergeability gates).",
+ "inputSchema": {
+ "type": "object",
+ "properties": {
+ "pull_request_number": {
+ "type": ["number", "string"],
+ "description": "Pull request number to merge. Supports numeric values or temporary IDs from prior safe-output operations. If omitted, uses triggering pull request context."
+ },
+ "merge_method": {
+ "type": "string",
+ "enum": ["merge", "squash", "rebase"]
+ },
+ "commit_title": {"type": "string"},
+ "commit_message": {"type": "string"},
+ "repo": {
+ "type": "string",
+ "description": "Target repository in owner/repo format."
+ }
+ },
+ "additionalProperties": false
+ }
+}
+```
+
+**Operational Semantics**:
+
+1. **Repository/PR Resolution**: Resolves target repository and pull request from context or explicit input.
+2. **Mergeability Check**: Validates pull request is mergeable and not draft/conflicted.
+3. **Policy Gates**: Enforces required checks, review decision, unresolved review thread gating, label constraints, source branch constraints, and file-scope constraints.
+4. **Base Branch Protection**: Refuses merges when the target base branch is protected or is the repository default branch.
+5. **Idempotency**: Returns success when the pull request is already merged.
+
+**Configuration Parameters**:
+
+- `max`: Operation limit (default: 1)
+- `required-labels`: Labels that must exist on the pull request
+- `allowed-labels`: Exact label names; at least one pull request label must exactly match when configured
+- `allowed-branches`: Source branch glob patterns
+- `allowed-files`: Changed-file glob allowlist
+- `protected-files`: Changed-file glob denylist (any match blocks merge)
+- `target-repo`: Cross-repository target
+- `allowed-repos`: Cross-repository allowlist
+- `staged`: Staged mode override
+
+**Required Permissions**:
+
+*GitHub Actions Token*:
+
+- `contents: write` - Merge operation execution
+- `pull-requests: write` - Pull request metadata and merge operations
+
+*GitHub App*:
+
+- `contents: write` - Merge operation execution
+- `pull-requests: write` - Pull request metadata and merge operations
+- `metadata: read` - Repository metadata (automatically granted)
+
+**Notes**:
+
+- Merge execution is blocked unless all configured gates pass.
+- Merge to the repository default branch is always refused by this safe output type.
+- `allowed-files` and `protected-files` are evaluated independently; both must pass.
+- `pull_request_number` may be a temporary ID that resolves to a pull request number from earlier safe-output operations.
+- GraphQL mergeability and review-summary queries are retried with transient-error retry logic.
+
+---
+
#### Type: mark_pull_request_as_ready_for_review
**Purpose**: Convert draft pull request to ready-for-review status.
@@ -4715,6 +4795,12 @@ safe-outputs:
## Appendix F: Document History
+**Version 1.17.0** (2026-04-19):
+
+- **Added**: `merge_pull_request` safe output type definition in Section 7.3, including schema, policy gate semantics, and required permissions
+- **Documented**: Merge policy gates for checks, reviews, labels, branch constraints, file constraints, and base-branch restrictions
+- **Updated**: Publication metadata to 1.17.0
+
**Version 1.15.0** (2026-03-29):
- **Added**: Section 11 "Cache Memory Integrity" specifying integrity-aware cache key format, git-backed branching, merge-down semantics, pre-agent setup, and post-agent commit requirements (CI1–CI12)
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 118e679d68..2f2b65ad8a 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -4515,7 +4515,7 @@
},
"safe-outputs": {
"type": "object",
- "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.",
+ "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, merge-pull-request, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.",
"description": "Safe output processing configuration that automatically creates GitHub issues, comments, and pull requests from AI workflow output without requiring write permissions in the main job",
"examples": [
{
@@ -7028,6 +7028,82 @@
],
"description": "Enable AI agents to edit and update existing pull request content, titles, labels, reviewers, and metadata."
},
+ "merge-pull-request": {
+ "oneOf": [
+ {
+ "type": "null",
+ "description": "Enable pull request merge with default policy configuration"
+ },
+ {
+ "type": "object",
+ "description": "Configuration for controlled pull request merges. The merge is blocked unless all configured gates pass.",
+ "properties": {
+ "max": {
+ "description": "Maximum number of pull request merges to perform per run (default: 1). Supports integer or GitHub Actions expression (e.g. '${{ inputs.max }}').",
+ "oneOf": [
+ {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 10,
+ "default": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^\\$\\{\\{.*\\}\\}$",
+ "description": "GitHub Actions expression that resolves to an integer at runtime"
+ }
+ ]
+ },
+ "required-labels": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "List of labels that must all be present on the pull request before merge is allowed."
+ },
+ "allowed-labels": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Exact pull request label names. At least one existing PR label must exactly match one of these values when configured."
+ },
+ "allowed-branches": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Glob patterns for allowed source branch names (pull request head ref)."
+ },
+ "allowed-files": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Exclusive allowlist of file path glob patterns. When configured, every changed file in the pull request must match at least one pattern."
+ },
+ "protected-files": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Glob patterns for protected files. If any changed file matches, merge is blocked. This check takes precedence over allowed-files."
+ },
+ "github-token": {
+ "$ref": "#/$defs/github_token",
+ "description": "GitHub token to use for this specific output type. Overrides global github-token if specified."
+ },
+ "staged": {
+ "type": "boolean",
+ "description": "If true, evaluate merge gates and emit preview results without executing the merge API call.",
+ "examples": [true, false]
+ }
+ },
+ "additionalProperties": false
+ }
+ ],
+ "description": "Enable AI agents to merge pull requests under configured policy gates."
+ },
"push-to-pull-request-branch": {
"oneOf": [
{
diff --git a/pkg/workflow/compiler.go b/pkg/workflow/compiler.go
index 6853cc26d0..937bf259d7 100644
--- a/pkg/workflow/compiler.go
+++ b/pkg/workflow/compiler.go
@@ -170,6 +170,12 @@ func (c *Compiler) validateWorkflowData(workflowData *WorkflowData, markdownPath
return formatCompilerError(markdownPath, "error", err.Error(), err)
}
+ // Validate safe-outputs merge-pull-request configuration
+ log.Printf("Validating safe-outputs merge-pull-request")
+ if err := validateSafeOutputsMergePullRequest(workflowData.SafeOutputs); err != nil {
+ return formatCompilerError(markdownPath, "error", err.Error(), err)
+ }
+
// Validate safe-job needs: declarations against known generated job IDs
log.Printf("Validating safe-job needs declarations")
if err := validateSafeJobNeeds(workflowData); err != nil {
diff --git a/pkg/workflow/compiler_safe_outputs_handlers.go b/pkg/workflow/compiler_safe_outputs_handlers.go
index 62f17b071f..c98fcbb18f 100644
--- a/pkg/workflow/compiler_safe_outputs_handlers.go
+++ b/pkg/workflow/compiler_safe_outputs_handlers.go
@@ -447,6 +447,22 @@ var handlerRegistry = map[string]handlerBuilder{
AddIfTrue("staged", c.Staged).
Build()
},
+ "merge_pull_request": func(cfg *SafeOutputsConfig) map[string]any {
+ if cfg.MergePullRequest == nil {
+ return nil
+ }
+ c := cfg.MergePullRequest
+ return newHandlerConfigBuilder().
+ AddTemplatableInt("max", c.Max).
+ AddStringSlice("required_labels", c.RequiredLabels).
+ AddStringSlice("allowed_labels", c.AllowedLabels).
+ AddStringSlice("allowed_branches", c.AllowedBranches).
+ AddStringSlice("allowed_files", c.AllowedFiles).
+ AddStringSlice("protected_files", c.ProtectedFiles).
+ AddIfNotEmpty("github-token", c.GitHubToken).
+ AddIfTrue("staged", c.Staged).
+ Build()
+ },
"close_pull_request": func(cfg *SafeOutputsConfig) map[string]any {
if cfg.ClosePullRequests == nil {
return nil
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index 579552333f..dee9caa5e6 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -548,6 +548,7 @@ type SafeOutputsConfig struct {
UnassignFromUser *UnassignFromUserConfig `yaml:"unassign-from-user,omitempty"` // Remove assignees from issues
UpdateIssues *UpdateIssuesConfig `yaml:"update-issue,omitempty"`
UpdatePullRequests *UpdatePullRequestsConfig `yaml:"update-pull-request,omitempty"` // Update GitHub pull request title/body
+ MergePullRequest *MergePullRequestConfig `yaml:"merge-pull-request,omitempty"` // Merge pull requests under constrained policy checks
PushToPullRequestBranch *PushToPullRequestBranchConfig `yaml:"push-to-pull-request-branch,omitempty"`
UploadAssets *UploadAssetsConfig `yaml:"upload-asset,omitempty"`
UploadArtifact *UploadArtifactConfig `yaml:"upload-artifact,omitempty"` // Upload files as run-scoped GitHub Actions artifacts
diff --git a/pkg/workflow/js/safe_outputs_tools.json b/pkg/workflow/js/safe_outputs_tools.json
index f842835971..9fe2073d99 100644
--- a/pkg/workflow/js/safe_outputs_tools.json
+++ b/pkg/workflow/js/safe_outputs_tools.json
@@ -962,6 +962,52 @@
"additionalProperties": false
}
},
+ {
+ "name": "merge_pull_request",
+ "description": "Merge an existing pull request only after policy checks pass (status checks, approvals, resolved review threads, label/branch/file constraints, and mergeability gates). Use this when workflows require controlled merges instead of direct merge operations.",
+ "inputSchema": {
+ "type": "object",
+ "properties": {
+ "pull_request_number": {
+ "type": [
+ "number",
+ "string"
+ ],
+ "description": "Pull request number to merge. This is the numeric ID from the GitHub URL (e.g., 321 in github.com/owner/repo/pull/321). If omitted, uses the triggering pull request context."
+ },
+ "merge_method": {
+ "type": "string",
+ "enum": [
+ "merge",
+ "squash",
+ "rebase"
+ ],
+ "description": "Merge strategy to use: 'merge', 'squash', or 'rebase'. Defaults to 'merge'."
+ },
+ "commit_title": {
+ "type": "string",
+ "description": "Optional custom commit title to use for the merge commit/squash commit."
+ },
+ "commit_message": {
+ "type": "string",
+ "description": "Optional custom commit message body for the merge."
+ },
+ "repo": {
+ "type": "string",
+ "description": "Target repository in 'owner/repo' format for cross-repository merge operations. If omitted, uses the configured default target repository."
+ },
+ "secrecy": {
+ "type": "string",
+ "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\")."
+ },
+ "integrity": {
+ "type": "string",
+ "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\")."
+ }
+ },
+ "additionalProperties": false
+ }
+ },
{
"name": "push_to_pull_request_branch",
"description": "Push committed changes to a pull request's branch. Use this to add follow-up commits to an existing PR, such as addressing review feedback or fixing issues. Changes must be committed locally before calling this tool.",
diff --git a/pkg/workflow/merge_pull_request.go b/pkg/workflow/merge_pull_request.go
new file mode 100644
index 0000000000..d7dc06c09c
--- /dev/null
+++ b/pkg/workflow/merge_pull_request.go
@@ -0,0 +1,38 @@
+package workflow
+
+import "github.com/github/gh-aw/pkg/logger"
+
+var mergePullRequestLog = logger.New("workflow:merge_pull_request")
+
+// MergePullRequestConfig holds configuration for merging pull requests with policy checks.
+type MergePullRequestConfig struct {
+ BaseSafeOutputConfig `yaml:",inline"`
+ RequiredLabels []string `yaml:"required-labels,omitempty"` // Labels that must be present on the PR
+ AllowedLabels []string `yaml:"allowed-labels,omitempty"` // Exact label names; at least one PR label must match when configured
+ AllowedBranches []string `yaml:"allowed-branches,omitempty"` // Glob patterns for source branch names
+ AllowedFiles []string `yaml:"allowed-files,omitempty"` // Glob patterns; all changed files must match when configured
+ ProtectedFiles []string `yaml:"protected-files,omitempty"` // Glob patterns; any match blocks merge
+}
+
+// parseMergePullRequestConfig handles merge-pull-request configuration.
+func (c *Compiler) parseMergePullRequestConfig(outputMap map[string]any) *MergePullRequestConfig {
+ configData, exists := outputMap["merge-pull-request"]
+ if !exists {
+ return nil
+ }
+
+ cfg := &MergePullRequestConfig{}
+ if configMap, ok := configData.(map[string]any); ok {
+ cfg.RequiredLabels = ParseStringArrayFromConfig(configMap, "required-labels", mergePullRequestLog)
+ cfg.AllowedLabels = ParseStringArrayFromConfig(configMap, "allowed-labels", mergePullRequestLog)
+ cfg.AllowedBranches = ParseStringArrayFromConfig(configMap, "allowed-branches", mergePullRequestLog)
+ cfg.AllowedFiles = ParseStringArrayFromConfig(configMap, "allowed-files", mergePullRequestLog)
+ cfg.ProtectedFiles = ParseStringArrayFromConfig(configMap, "protected-files", mergePullRequestLog)
+ c.parseBaseSafeOutputConfig(configMap, &cfg.BaseSafeOutputConfig, 1)
+ return cfg
+ }
+
+ // merge-pull-request: null enables defaults
+ cfg.Max = defaultIntStr(1)
+ return cfg
+}
diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go
index f96ee96e87..1ac733fb14 100644
--- a/pkg/workflow/safe_outputs_config.go
+++ b/pkg/workflow/safe_outputs_config.go
@@ -258,6 +258,12 @@ func (c *Compiler) extractSafeOutputsConfig(frontmatter map[string]any) *SafeOut
config.UpdatePullRequests = updatePullRequestsConfig
}
+ // Handle merge-pull-request
+ mergePullRequestConfig := c.parseMergePullRequestConfig(outputMap)
+ if mergePullRequestConfig != nil {
+ config.MergePullRequest = mergePullRequestConfig
+ }
+
// Handle push-to-pull-request-branch
pushToBranchConfig := c.parsePushToPullRequestBranchConfig(outputMap)
if pushToBranchConfig != nil {
diff --git a/pkg/workflow/safe_outputs_permissions.go b/pkg/workflow/safe_outputs_permissions.go
index 3fe88cbc7d..07950ac4eb 100644
--- a/pkg/workflow/safe_outputs_permissions.go
+++ b/pkg/workflow/safe_outputs_permissions.go
@@ -167,6 +167,10 @@ func ComputePermissionsForSafeOutputs(safeOutputs *SafeOutputsConfig) *Permissio
permissions.Merge(NewPermissionsContentsReadPRWrite())
}
}
+ if safeOutputs.MergePullRequest != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.MergePullRequest.Staged) {
+ safeOutputsPermissionsLog.Print("Adding permissions for merge-pull-request")
+ permissions.Merge(NewPermissionsContentsWritePRWrite())
+ }
if safeOutputs.ClosePullRequests != nil && !isHandlerStaged(safeOutputs.Staged, safeOutputs.ClosePullRequests.Staged) {
safeOutputsPermissionsLog.Print("Adding permissions for close-pull-request")
permissions.Merge(NewPermissionsContentsReadPRWrite())
@@ -329,6 +333,8 @@ func SafeOutputsConfigFromKeys(keys []string) *SafeOutputsConfig {
config.UpdateIssues = &UpdateIssuesConfig{}
case "update-pull-request":
config.UpdatePullRequests = &UpdatePullRequestsConfig{}
+ case "merge-pull-request":
+ config.MergePullRequest = &MergePullRequestConfig{}
case "push-to-pull-request-branch":
config.PushToPullRequestBranch = &PushToPullRequestBranchConfig{}
case "upload-asset":
diff --git a/pkg/workflow/safe_outputs_specification_merge_pull_request_test.go b/pkg/workflow/safe_outputs_specification_merge_pull_request_test.go
new file mode 100644
index 0000000000..21cf0b90dd
--- /dev/null
+++ b/pkg/workflow/safe_outputs_specification_merge_pull_request_test.go
@@ -0,0 +1,78 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestSafeOutputsSpecificationDocumentsMergePullRequest(t *testing.T) {
+ specPath := findRepoFile(t, filepath.Join("docs", "src", "content", "docs", "reference", "safe-outputs-specification.md"))
+ specBytes, err := os.ReadFile(specPath)
+ require.NoError(t, err, "should read safe outputs specification")
+
+ spec := string(specBytes)
+ section := extractSpecTypeSection(t, spec, "merge_pull_request")
+
+ assert.Contains(t, section, "**Purpose**: Merge pull requests only when configured policy gates pass.",
+ "spec should define merge_pull_request purpose")
+ assert.Contains(t, section, "Base Branch Protection",
+ "spec should document base branch restrictions for merge_pull_request")
+ assert.Contains(t, section, "repository default branch",
+ "spec should explicitly refuse merge_pull_request to repository default branch")
+ assert.Contains(t, section, "`required-labels`",
+ "spec should document required-labels configuration for merge_pull_request")
+ assert.Contains(t, section, "`allowed-files`",
+ "spec should document allowed-files configuration for merge_pull_request")
+ assert.Contains(t, section, "`protected-files`",
+ "spec should document protected-files configuration for merge_pull_request")
+ assert.Contains(t, section, "`contents: write`",
+ "spec should document contents: write permission for merge_pull_request")
+ assert.Contains(t, section, "`pull-requests: write`",
+ "spec should document pull-requests: write permission for merge_pull_request")
+ assert.Contains(t, section, "temporary ID",
+ "spec should document temporary ID support for merge_pull_request pull_request_number")
+}
+
+func extractSpecTypeSection(t *testing.T, spec, typeName string) string {
+ t.Helper()
+
+ header := "#### Type: " + typeName
+ start := strings.Index(spec, header)
+ require.NotEqual(t, -1, start, "spec should include section header for %s", typeName)
+
+ rest := spec[start+len(header):]
+ nextOffset := strings.Index(rest, "\n#### Type: ")
+ if nextOffset == -1 {
+ return spec[start:]
+ }
+
+ return spec[start : start+len(header)+nextOffset]
+}
+
+func findRepoFile(t *testing.T, relativePath string) string {
+ t.Helper()
+
+ wd, err := os.Getwd()
+ require.NoError(t, err, "should get current working directory")
+
+ dir := wd
+ for {
+ candidate := filepath.Join(dir, relativePath)
+ if _, err := os.Stat(candidate); err == nil {
+ return candidate
+ }
+
+ parent := filepath.Dir(dir)
+ if parent == dir {
+ t.Fatalf("could not find %s from %s", relativePath, wd)
+ }
+ dir = parent
+ }
+}
diff --git a/pkg/workflow/safe_outputs_state.go b/pkg/workflow/safe_outputs_state.go
index cb3def0c99..70317dc848 100644
--- a/pkg/workflow/safe_outputs_state.go
+++ b/pkg/workflow/safe_outputs_state.go
@@ -45,6 +45,7 @@ var safeOutputFieldMapping = map[string]string{
"UnassignFromUser": "unassign_from_user",
"UpdateIssues": "update_issue",
"UpdatePullRequests": "update_pull_request",
+ "MergePullRequest": "merge_pull_request",
"PushToPullRequestBranch": "push_to_pull_request_branch",
"UploadAssets": "upload_asset",
"UploadArtifact": "upload_artifact",
diff --git a/pkg/workflow/safe_outputs_validation.go b/pkg/workflow/safe_outputs_validation.go
index ce05f5e7a1..247f3138fd 100644
--- a/pkg/workflow/safe_outputs_validation.go
+++ b/pkg/workflow/safe_outputs_validation.go
@@ -181,6 +181,71 @@ func validateTargetValue(configName, target string) error {
var safeOutputsAllowWorkflowsValidationLog = newValidationLogger("safe_outputs_allow_workflows")
+var safeOutputsMergePullRequestValidationLog = newValidationLogger("safe_outputs_merge_pull_request")
+
+// validateSafeOutputsMergePullRequest validates merge-pull-request policy configuration.
+func validateSafeOutputsMergePullRequest(config *SafeOutputsConfig) error {
+ if config == nil || config.MergePullRequest == nil {
+ return nil
+ }
+
+ c := config.MergePullRequest
+ safeOutputsMergePullRequestValidationLog.Print("Validating merge-pull-request policy fields")
+
+ validatePathGlobList := func(field string, patterns []string) error {
+ for i, pat := range patterns {
+ if errs := validatePathGlob(pat); len(errs) > 0 {
+ msgs := make([]string, 0, len(errs))
+ for _, e := range errs {
+ msgs = append(msgs, e.Message)
+ }
+ return fmt.Errorf("invalid glob pattern %q in safe-outputs.merge-pull-request.%s[%d]: %s", pat, field, i, strings.Join(msgs, "; "))
+ }
+ }
+ return nil
+ }
+
+ validateNonEmptyStringList := func(field string, values []string) error {
+ for i, value := range values {
+ if strings.TrimSpace(value) == "" {
+ return fmt.Errorf("safe-outputs.merge-pull-request.%s[%d] cannot be empty", field, i)
+ }
+ }
+ return nil
+ }
+
+ validateRefGlobList := func(field string, patterns []string) error {
+ for i, pat := range patterns {
+ if errs := validateRefGlob(pat); len(errs) > 0 {
+ msgs := make([]string, 0, len(errs))
+ for _, e := range errs {
+ msgs = append(msgs, e.Message)
+ }
+ return fmt.Errorf("invalid glob pattern %q in safe-outputs.merge-pull-request.%s[%d]: %s", pat, field, i, strings.Join(msgs, "; "))
+ }
+ }
+ return nil
+ }
+
+ if err := validateNonEmptyStringList("required-labels", c.RequiredLabels); err != nil {
+ return err
+ }
+ if err := validateNonEmptyStringList("allowed-labels", c.AllowedLabels); err != nil {
+ return err
+ }
+ if err := validateRefGlobList("allowed-branches", c.AllowedBranches); err != nil {
+ return err
+ }
+ if err := validatePathGlobList("allowed-files", c.AllowedFiles); err != nil {
+ return err
+ }
+ if err := validatePathGlobList("protected-files", c.ProtectedFiles); err != nil {
+ return err
+ }
+
+ return nil
+}
+
// validateSafeOutputsAllowWorkflows validates that allow-workflows: true requires
// a GitHub App to be configured in safe-outputs.github-app. The workflows permission
// is a GitHub App-only permission and cannot be granted via GITHUB_TOKEN.
diff --git a/pkg/workflow/safe_outputs_validation_config.go b/pkg/workflow/safe_outputs_validation_config.go
index 88751ca7eb..0afa39bb73 100644
--- a/pkg/workflow/safe_outputs_validation_config.go
+++ b/pkg/workflow/safe_outputs_validation_config.go
@@ -165,6 +165,16 @@ var ValidationConfig = map[string]TypeValidationConfig{
"repo": {Type: "string", MaxLength: 256}, // Optional: target repository in format "owner/repo"
},
},
+ "merge_pull_request": {
+ DefaultMax: 1,
+ Fields: map[string]FieldValidation{
+ "pull_request_number": {IssueOrPRNumber: true},
+ "merge_method": {Type: "string", Enum: []string{"merge", "squash", "rebase"}},
+ "commit_title": {Type: "string", Sanitize: true, MaxLength: 256},
+ "commit_message": {Type: "string", Sanitize: true, MaxLength: MaxBodyLength},
+ "repo": {Type: "string", MaxLength: 256}, // Optional: target repository in format "owner/repo"
+ },
+ },
"push_to_pull_request_branch": {
DefaultMax: 1,
Fields: map[string]FieldValidation{
diff --git a/pkg/workflow/safe_outputs_validation_merge_pull_request_test.go b/pkg/workflow/safe_outputs_validation_merge_pull_request_test.go
new file mode 100644
index 0000000000..3552b2ea6e
--- /dev/null
+++ b/pkg/workflow/safe_outputs_validation_merge_pull_request_test.go
@@ -0,0 +1,59 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestValidateSafeOutputsMergePullRequestLabelValidation(t *testing.T) {
+ tests := []struct {
+ name string
+ config *SafeOutputsConfig
+ wantErr string
+ }{
+ {
+ name: "empty required-labels entry fails",
+ config: &SafeOutputsConfig{
+ MergePullRequest: &MergePullRequestConfig{
+ RequiredLabels: []string{"safe-to-merge", " "},
+ },
+ },
+ wantErr: "safe-outputs.merge-pull-request.required-labels[1] cannot be empty",
+ },
+ {
+ name: "empty allowed-labels entry fails",
+ config: &SafeOutputsConfig{
+ MergePullRequest: &MergePullRequestConfig{
+ AllowedLabels: []string{"release", ""},
+ },
+ },
+ wantErr: "safe-outputs.merge-pull-request.allowed-labels[1] cannot be empty",
+ },
+ {
+ name: "non-empty labels pass",
+ config: &SafeOutputsConfig{
+ MergePullRequest: &MergePullRequestConfig{
+ RequiredLabels: []string{"safe-to-merge"},
+ AllowedLabels: []string{"release", "automerge"},
+ },
+ },
+ wantErr: "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := validateSafeOutputsMergePullRequest(tt.config)
+ if tt.wantErr == "" {
+ assert.NoError(t, err, "expected merge-pull-request label validation to pass")
+ return
+ }
+ require.Error(t, err, "expected merge-pull-request label validation to fail")
+ assert.Contains(t, err.Error(), tt.wantErr, "expected validation error to include field-specific message")
+ })
+ }
+}