From 465b20e0d9d15b825d3ab9048036b660f26a489b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 01:47:37 +0000
Subject: [PATCH 01/23] Initial plan: add upload_artifact safe output type
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/06153ed3-a241-400b-9414-3dc304516475
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/smoke-call-workflow.lock.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/smoke-call-workflow.lock.yml b/.github/workflows/smoke-call-workflow.lock.yml
index ecd0e36a4e0..3d5fa1f6f9a 100644
--- a/.github/workflows/smoke-call-workflow.lock.yml
+++ b/.github/workflows/smoke-call-workflow.lock.yml
@@ -863,6 +863,7 @@ jobs:
needs: safe_outputs
if: needs.safe_outputs.outputs.call_workflow_name == 'smoke-workflow-call'
permissions:
+ actions: read
contents: read
discussions: write
issues: write
From ff14be6987cc64a92a31f7f0be9b957733bc1760 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 02:18:04 +0000
Subject: [PATCH 02/23] feat: add upload_artifact safe output type with native
GitHub Actions artifact support
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/06153ed3-a241-400b-9414-3dc304516475
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.../setup/js/safe_output_handler_manager.cjs | 4 +-
actions/setup/js/upload_artifact.cjs | 427 ++++++++++++++++++
actions/setup/js/upload_artifact.test.cjs | 374 +++++++++++++++
pkg/workflow/compiler_safe_output_jobs.go | 18 +
pkg/workflow/compiler_safe_outputs_config.go | 31 ++
pkg/workflow/compiler_types.go | 1 +
pkg/workflow/compiler_yaml_main_job.go | 5 +
pkg/workflow/js/safe_outputs_tools.json | 52 +++
pkg/workflow/publish_artifacts.go | 361 +++++++++++++++
pkg/workflow/publish_artifacts_test.go | 314 +++++++++++++
pkg/workflow/safe_outputs_config.go | 6 +
pkg/workflow/safe_outputs_state.go | 1 +
.../safe_outputs_tools_computation.go | 3 +
.../basic-copilot.golden | 1 +
.../with-imports.golden | 1 +
15 files changed, 1597 insertions(+), 2 deletions(-)
create mode 100644 actions/setup/js/upload_artifact.cjs
create mode 100644 actions/setup/js/upload_artifact.test.cjs
create mode 100644 pkg/workflow/publish_artifacts.go
create mode 100644 pkg/workflow/publish_artifacts_test.go
diff --git a/actions/setup/js/safe_output_handler_manager.cjs b/actions/setup/js/safe_output_handler_manager.cjs
index 05ea9f2cef7..bc11c8758aa 100644
--- a/actions/setup/js/safe_output_handler_manager.cjs
+++ b/actions/setup/js/safe_output_handler_manager.cjs
@@ -80,10 +80,10 @@ const HANDLER_MAP = {
* Message types handled by standalone steps (not through the handler manager)
* These types should not trigger warnings when skipped by the handler manager
*
- * Standalone types: upload_asset, noop
+ * Standalone types: upload_asset, upload_artifact, noop
* - Have dedicated processing steps with specialized logic
*/
-const STANDALONE_STEP_TYPES = new Set(["upload_asset", "noop"]);
+const STANDALONE_STEP_TYPES = new Set(["upload_asset", "upload_artifact", "noop"]);
/**
* Code-push safe output types that must succeed before remaining outputs are processed.
diff --git a/actions/setup/js/upload_artifact.cjs b/actions/setup/js/upload_artifact.cjs
new file mode 100644
index 00000000000..9e6ace0a243
--- /dev/null
+++ b/actions/setup/js/upload_artifact.cjs
@@ -0,0 +1,427 @@
+// @ts-check
+///
+
+/**
+ * upload_artifact handler
+ *
+ * Validates and stages artifact upload requests emitted by the model via the upload_artifact
+ * safe output tool. The model must have already copied the files it wants to upload to
+ * /tmp/gh-aw/safeoutputs/upload-artifacts/ before calling the tool.
+ *
+ * This handler:
+ * 1. Reads upload_artifact records from agent output.
+ * 2. Validates each request against the workflow's policy configuration.
+ * 3. Resolves the requested files (path or filter-based) from the staging directory.
+ * 4. Copies approved files into per-slot directories under /tmp/gh-aw/upload-artifacts/slot_N/.
+ * 5. Sets step outputs so the wrapping job's actions/upload-artifact steps can run conditionally.
+ * 6. Generates a temporary artifact ID for each slot.
+ *
+ * Environment variables consumed (set by the Go job builder):
+ * GH_AW_ARTIFACT_MAX_UPLOADS - Max number of upload_artifact calls allowed
+ * GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS - Default retention period
+ * GH_AW_ARTIFACT_MAX_RETENTION_DAYS - Maximum retention cap
+ * GH_AW_ARTIFACT_MAX_SIZE_BYTES - Maximum total bytes per upload
+ * GH_AW_ARTIFACT_ALLOWED_PATHS - JSON array of allowed path patterns
+ * GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE - "true" if skip_archive is permitted
+ * GH_AW_ARTIFACT_DEFAULT_SKIP_ARCHIVE - "true" if skip_archive defaults to true
+ * GH_AW_ARTIFACT_DEFAULT_IF_NO_FILES - "error" or "ignore"
+ * GH_AW_ARTIFACT_FILTERS_INCLUDE - JSON array of default include patterns
+ * GH_AW_ARTIFACT_FILTERS_EXCLUDE - JSON array of default exclude patterns
+ * GH_AW_AGENT_OUTPUT - Path to agent output file
+ * GH_AW_SAFE_OUTPUTS_STAGED - "true" for staged/dry-run mode
+ */
+
+const fs = require("fs");
+const path = require("path");
+const crypto = require("crypto");
+const { loadAgentOutput } = require("./load_agent_output.cjs");
+const { getErrorMessage } = require("./error_helpers.cjs");
+const { globPatternToRegex } = require("./glob_pattern_helpers.cjs");
+const { ERR_CONFIG, ERR_SYSTEM, ERR_VALIDATION } = require("./error_codes.cjs");
+
+/** Staging directory where the model places files to be uploaded. */
+const STAGING_DIR = "/tmp/gh-aw/safeoutputs/upload-artifacts/";
+
+/** Base directory for per-slot artifact staging used by actions/upload-artifact. */
+const SLOT_BASE_DIR = "/tmp/gh-aw/upload-artifacts/";
+
+/** Prefix for temporary artifact IDs returned to the caller. */
+const TEMP_ID_PREFIX = "tmp_artifact_";
+
+/** Path where the resolver mapping (tmpId → artifact name) is written. */
+const RESOLVER_FILE = "/tmp/gh-aw/artifact-resolver.json";
+
+/**
+ * Generate a temporary artifact ID.
+ * Format: tmp_artifact_<26 uppercase alphanumeric characters>
+ * @returns {string}
+ */
+function generateTemporaryArtifactId() {
+ const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
+ let id = TEMP_ID_PREFIX;
+ for (let i = 0; i < 26; i++) {
+ id += chars[Math.floor(Math.random() * chars.length)];
+ }
+ return id;
+}
+
+/**
+ * Parse a JSON array from an environment variable, returning an empty array on failure.
+ * @param {string|undefined} envVar
+ * @returns {string[]}
+ */
+function parseJsonArrayEnv(envVar) {
+ if (!envVar) return [];
+ try {
+ const parsed = JSON.parse(envVar);
+ return Array.isArray(parsed) ? parsed.filter(v => typeof v === "string") : [];
+ } catch {
+ return [];
+ }
+}
+
+/**
+ * Check whether a relative path matches any of the provided glob patterns.
+ * @param {string} relPath - Path relative to the staging root
+ * @param {string[]} patterns
+ * @returns {boolean}
+ */
+function matchesAnyPattern(relPath, patterns) {
+ if (patterns.length === 0) return false;
+ return patterns.some(pattern => {
+ const regex = globPatternToRegex(pattern);
+ return regex.test(relPath);
+ });
+}
+
+/**
+ * Validate that a path does not escape the staging root using traversal sequences.
+ * @param {string} filePath - Absolute path
+ * @param {string} root - Absolute root directory (must end with /)
+ * @returns {boolean}
+ */
+function isWithinRoot(filePath, root) {
+ const resolved = path.resolve(filePath);
+ const normalRoot = path.resolve(root);
+ return resolved.startsWith(normalRoot + path.sep) || resolved === normalRoot;
+}
+
+/**
+ * Recursively list all regular files under a directory.
+ * @param {string} dir - Absolute directory path
+ * @param {string} baseDir - Root used to compute relative paths
+ * @returns {string[]} Relative paths from baseDir
+ */
+function listFilesRecursive(dir, baseDir) {
+ /** @type {string[]} */
+ const files = [];
+ if (!fs.existsSync(dir)) return files;
+
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
+ for (const entry of entries) {
+ const fullPath = path.join(dir, entry.name);
+ if (entry.isDirectory()) {
+ files.push(...listFilesRecursive(fullPath, baseDir));
+ } else if (entry.isFile()) {
+ // Reject symlinks – entry.isFile() returns false for symlinks unless dereferenced.
+ // We check explicitly to avoid following symlinks.
+ const stat = fs.lstatSync(fullPath);
+ if (!stat.isSymbolicLink()) {
+ files.push(path.relative(baseDir, fullPath));
+ } else {
+ core.warning(`Skipping symlink: ${fullPath}`);
+ }
+ }
+ }
+ return files;
+}
+
+/**
+ * Resolve the list of files to upload for a single request.
+ * Applies: staging root → allowed-paths → request include/exclude → dedup + sort.
+ *
+ * @param {Record} request - Parsed upload_artifact record
+ * @param {string[]} allowedPaths - Policy allowed-paths patterns
+ * @param {string[]} defaultInclude - Policy default include patterns
+ * @param {string[]} defaultExclude - Policy default exclude patterns
+ * @returns {{ files: string[], error: string|null }}
+ */
+function resolveFiles(request, allowedPaths, defaultInclude, defaultExclude) {
+ const hasMutuallyExclusive = ("path" in request ? 1 : 0) + ("filters" in request ? 1 : 0);
+ if (hasMutuallyExclusive !== 1) {
+ return { files: [], error: "exactly one of 'path' or 'filters' must be present" };
+ }
+
+ /** @type {string[]} candidateRelPaths */
+ let candidateRelPaths;
+
+ if ("path" in request) {
+ const reqPath = String(request.path);
+ // Reject absolute paths
+ if (path.isAbsolute(reqPath)) {
+ return { files: [], error: `path must be relative (staging-dir-relative), got absolute path: ${reqPath}` };
+ }
+ // Reject traversal
+ const resolved = path.resolve(STAGING_DIR, reqPath);
+ if (!isWithinRoot(resolved, STAGING_DIR)) {
+ return { files: [], error: `path must not traverse outside staging directory: ${reqPath}` };
+ }
+ if (!fs.existsSync(resolved)) {
+ return { files: [], error: `path does not exist in staging directory: ${reqPath}` };
+ }
+ const stat = fs.lstatSync(resolved);
+ if (stat.isSymbolicLink()) {
+ return { files: [], error: `symlinks are not allowed: ${reqPath}` };
+ }
+ if (stat.isDirectory()) {
+ candidateRelPaths = listFilesRecursive(resolved, STAGING_DIR);
+ } else {
+ candidateRelPaths = [reqPath];
+ }
+ } else {
+ // Filter-based selection: start from all files in the staging directory.
+ const allFiles = listFilesRecursive(STAGING_DIR, STAGING_DIR);
+ const requestFilters = request.filters || {};
+ const include = /** @type {string[]} */ requestFilters.include || defaultInclude;
+ const exclude = /** @type {string[]} */ requestFilters.exclude || defaultExclude;
+
+ candidateRelPaths = allFiles.filter(f => {
+ if (include.length > 0 && !matchesAnyPattern(f, include)) return false;
+ if (exclude.length > 0 && matchesAnyPattern(f, exclude)) return false;
+ return true;
+ });
+ }
+
+ // Apply allowed-paths policy filter.
+ if (allowedPaths.length > 0) {
+ candidateRelPaths = candidateRelPaths.filter(f => matchesAnyPattern(f, allowedPaths));
+ }
+
+ // Deduplicate and sort deterministically.
+ const unique = Array.from(new Set(candidateRelPaths)).sort();
+ return { files: unique, error: null };
+}
+
+/**
+ * Validate skip_archive constraints:
+ * - skip_archive may only be used for a single file.
+ * - directories are rejected (already expanded to file list).
+ *
+ * @param {boolean} skipArchive
+ * @param {string[]} files
+ * @returns {string|null} Error message or null
+ */
+function validateSkipArchive(skipArchive, files) {
+ if (!skipArchive) return null;
+ if (files.length !== 1) {
+ return `skip_archive=true requires exactly one selected file, but ${files.length} files matched`;
+ }
+ return null;
+}
+
+/**
+ * Compute total size of the given file list (relative paths from STAGING_DIR).
+ * @param {string[]} files
+ * @returns {number} Total size in bytes
+ */
+function computeTotalSize(files) {
+ let total = 0;
+ for (const f of files) {
+ const abs = path.join(STAGING_DIR, f);
+ try {
+ total += fs.statSync(abs).size;
+ } catch {
+ // Ignore missing files (already validated upstream).
+ }
+ }
+ return total;
+}
+
+/**
+ * Derive a sanitised artifact name from a path or a slot index.
+ * @param {Record} request
+ * @param {number} slotIndex
+ * @returns {string}
+ */
+function deriveArtifactName(request, slotIndex) {
+ if (typeof request.name === "string" && request.name.trim()) {
+ return request.name.trim().replace(/[^a-zA-Z0-9._\-]/g, "-");
+ }
+ if ("path" in request && typeof request.path === "string") {
+ const base = path.basename(String(request.path)).replace(/[^a-zA-Z0-9._\-]/g, "-");
+ if (base) return base;
+ }
+ return `artifact-slot-${slotIndex}`;
+}
+
+/**
+ * Clamp a retention value between 1 and the policy maximum.
+ * @param {number|undefined} requested
+ * @param {number} defaultDays
+ * @param {number} maxDays
+ * @returns {number}
+ */
+function clampRetention(requested, defaultDays, maxDays) {
+ if (typeof requested !== "number" || requested < 1) return defaultDays;
+ return Math.min(requested, maxDays);
+}
+
+/**
+ * Copy resolved files from STAGING_DIR into the per-slot directory.
+ * @param {string[]} files - Relative paths from STAGING_DIR
+ * @param {string} slotDir - Absolute target slot directory
+ */
+function stageFilesToSlot(files, slotDir) {
+ fs.mkdirSync(slotDir, { recursive: true });
+ for (const relPath of files) {
+ const src = path.join(STAGING_DIR, relPath);
+ const dest = path.join(slotDir, relPath);
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
+ fs.copyFileSync(src, dest);
+ }
+}
+
+async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+
+ // Load policy configuration from environment variables.
+ const maxUploads = parseInt(process.env.GH_AW_ARTIFACT_MAX_UPLOADS || "1", 10) || 1;
+ const defaultRetentionDays = parseInt(process.env.GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS || "7", 10) || 7;
+ const maxRetentionDays = parseInt(process.env.GH_AW_ARTIFACT_MAX_RETENTION_DAYS || "30", 10) || 30;
+ const maxSizeBytes = parseInt(process.env.GH_AW_ARTIFACT_MAX_SIZE_BYTES || "104857600", 10) || 104857600;
+ const allowSkipArchive = process.env.GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE === "true";
+ const defaultSkipArchive = process.env.GH_AW_ARTIFACT_DEFAULT_SKIP_ARCHIVE === "true";
+ const defaultIfNoFiles = process.env.GH_AW_ARTIFACT_DEFAULT_IF_NO_FILES || "error";
+ const allowedPaths = parseJsonArrayEnv(process.env.GH_AW_ARTIFACT_ALLOWED_PATHS);
+ const filtersInclude = parseJsonArrayEnv(process.env.GH_AW_ARTIFACT_FILTERS_INCLUDE);
+ const filtersExclude = parseJsonArrayEnv(process.env.GH_AW_ARTIFACT_FILTERS_EXCLUDE);
+
+ core.info(`upload_artifact handler: max_uploads=${maxUploads}, default_retention=${defaultRetentionDays}, max_retention=${maxRetentionDays}`);
+ core.info(`Allowed paths: ${allowedPaths.length > 0 ? allowedPaths.join(", ") : "(none – all staging files allowed)"}`);
+
+ // Load agent output to find upload_artifact records.
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.info("No agent output found, skipping upload_artifact processing");
+ core.setOutput("artifact_count", "0");
+ return;
+ }
+
+ const uploadRequests = result.items.filter(/** @param {any} item */ item => item.type === "upload_artifact");
+
+ if (uploadRequests.length === 0) {
+ core.info("No upload_artifact records in agent output");
+ core.setOutput("artifact_count", "0");
+ return;
+ }
+
+ core.info(`Found ${uploadRequests.length} upload_artifact request(s)`);
+
+ // Enforce max-uploads policy.
+ if (uploadRequests.length > maxUploads) {
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact: ${uploadRequests.length} requests exceed max-uploads policy (${maxUploads}). ` + `Reduce the number of upload_artifact calls or raise max-uploads in workflow configuration.`);
+ return;
+ }
+
+ if (!fs.existsSync(STAGING_DIR)) {
+ core.warning(`Staging directory ${STAGING_DIR} does not exist. Did the model copy files there before calling upload_artifact?`);
+ fs.mkdirSync(STAGING_DIR, { recursive: true });
+ }
+
+ /** @type {Record} resolver: tmpId → artifact name */
+ const resolver = {};
+
+ let successfulUploads = 0;
+
+ for (let i = 0; i < uploadRequests.length; i++) {
+ const request = uploadRequests[i];
+ core.info(`Processing upload_artifact request ${i + 1}/${uploadRequests.length}`);
+
+ // Resolve skip_archive.
+ const skipArchive = typeof request.skip_archive === "boolean" ? request.skip_archive : defaultSkipArchive;
+ if (skipArchive && !allowSkipArchive) {
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: skip_archive=true is not permitted. ` + `Enable it with allow.skip-archive: true in workflow configuration.`);
+ return;
+ }
+
+ // Resolve files.
+ const { files, error: resolveError } = resolveFiles(request, allowedPaths, filtersInclude, filtersExclude);
+ if (resolveError) {
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: ${resolveError}`);
+ return;
+ }
+
+ if (files.length === 0) {
+ if (defaultIfNoFiles === "ignore") {
+ core.warning(`upload_artifact request ${i + 1}: no files matched, skipping (if-no-files=ignore)`);
+ continue;
+ } else {
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: no files matched the selection criteria. ` + `Check allowed-paths, filters, or use defaults.if-no-files: ignore to skip empty uploads.`);
+ return;
+ }
+ }
+
+ // Validate skip_archive file-count constraint.
+ const skipArchiveError = validateSkipArchive(skipArchive, files);
+ if (skipArchiveError) {
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: ${skipArchiveError}`);
+ return;
+ }
+
+ // Validate total size.
+ const totalSize = computeTotalSize(files);
+ if (totalSize > maxSizeBytes) {
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: total file size ${totalSize} bytes exceeds ` + `max-size-bytes limit of ${maxSizeBytes} bytes.`);
+ return;
+ }
+
+ // Compute retention days.
+ const retentionDays = clampRetention(typeof request.retention_days === "number" ? request.retention_days : undefined, defaultRetentionDays, maxRetentionDays);
+
+ // Derive artifact name and generate temporary ID.
+ const artifactName = deriveArtifactName(request, i);
+ const tmpId = generateTemporaryArtifactId();
+ resolver[tmpId] = artifactName;
+
+ core.info(`Slot ${i}: artifact="${artifactName}", files=${files.length}, size=${totalSize}B, ` + `retention=${retentionDays}d, skip_archive=${skipArchive}, tmp_id=${tmpId}`);
+
+ if (!isStaged) {
+ // Stage files into the per-slot directory for the actions/upload-artifact step.
+ const slotDir = path.join(SLOT_BASE_DIR, `slot_${i}`);
+ stageFilesToSlot(files, slotDir);
+ core.info(`Staged ${files.length} file(s) to ${slotDir}`);
+ } else {
+ core.info(`Staged mode: skipping file staging for slot ${i}`);
+ }
+
+ // Set step outputs for the conditional actions/upload-artifact steps in the job YAML.
+ core.setOutput(`slot_${i}_enabled`, "true");
+ core.setOutput(`slot_${i}_name`, artifactName);
+ core.setOutput(`slot_${i}_retention_days`, String(retentionDays));
+ core.setOutput(`slot_${i}_tmp_id`, tmpId);
+ core.setOutput(`slot_${i}_file_count`, String(files.length));
+ core.setOutput(`slot_${i}_size_bytes`, String(totalSize));
+
+ successfulUploads++;
+ }
+
+ // Write resolver mapping so downstream steps can resolve tmp IDs to artifact names.
+ try {
+ fs.mkdirSync(path.dirname(RESOLVER_FILE), { recursive: true });
+ fs.writeFileSync(RESOLVER_FILE, JSON.stringify(resolver, null, 2));
+ core.info(`Wrote artifact resolver mapping to ${RESOLVER_FILE}`);
+ } catch (err) {
+ core.warning(`Failed to write artifact resolver file: ${getErrorMessage(err)}`);
+ }
+
+ core.setOutput("artifact_count", String(successfulUploads));
+ core.info(`upload_artifact handler complete: ${successfulUploads} artifact(s) staged`);
+
+ if (isStaged) {
+ core.summary.addHeading("🎭 Staged Mode: Artifact Upload Preview", 2);
+ core.summary.addRaw(`Would upload **${successfulUploads}** artifact(s). Files staged at ${STAGING_DIR}.`);
+ await core.summary.write();
+ }
+}
+
+module.exports = { main };
diff --git a/actions/setup/js/upload_artifact.test.cjs b/actions/setup/js/upload_artifact.test.cjs
new file mode 100644
index 00000000000..ef59a262a9a
--- /dev/null
+++ b/actions/setup/js/upload_artifact.test.cjs
@@ -0,0 +1,374 @@
+// @ts-check
+import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
+import fs from "fs";
+import path from "path";
+import os from "os";
+import { fileURLToPath } from "url";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+
+const STAGING_DIR = "/tmp/gh-aw/safeoutputs/upload-artifacts/";
+const SLOT_BASE_DIR = "/tmp/gh-aw/upload-artifacts/";
+const RESOLVER_FILE = "/tmp/gh-aw/artifact-resolver.json";
+
+describe("upload_artifact.cjs", () => {
+ let mockCore;
+ let agentOutputPath;
+ let originalEnv;
+
+ /**
+ * @param {object} data
+ */
+ function writeAgentOutput(data) {
+ agentOutputPath = path.join(os.tmpdir(), `test_upload_artifact_${Date.now()}_${Math.random().toString(36).slice(2)}.json`);
+ fs.writeFileSync(agentOutputPath, JSON.stringify(data));
+ process.env.GH_AW_AGENT_OUTPUT = agentOutputPath;
+ }
+
+ /**
+ * @param {string} relPath
+ * @param {string} content
+ */
+ function writeStaging(relPath, content = "test content") {
+ const fullPath = path.join(STAGING_DIR, relPath);
+ fs.mkdirSync(path.dirname(fullPath), { recursive: true });
+ fs.writeFileSync(fullPath, content);
+ }
+
+ /**
+ * @returns {Promise}
+ */
+ async function runMain() {
+ const scriptText = fs.readFileSync(path.join(__dirname, "upload_artifact.cjs"), "utf8");
+ global.core = mockCore;
+ await eval(`(async () => { ${scriptText}; await main(); })()`);
+ }
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+
+ mockCore = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ setOutput: vi.fn(),
+ setFailed: vi.fn(),
+ summary: {
+ addHeading: vi.fn().mockReturnThis(),
+ addRaw: vi.fn().mockReturnThis(),
+ write: vi.fn().mockResolvedValue(undefined),
+ },
+ };
+
+ originalEnv = { ...process.env };
+
+ // Set reasonable defaults
+ process.env.GH_AW_ARTIFACT_MAX_UPLOADS = "3";
+ process.env.GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS = "7";
+ process.env.GH_AW_ARTIFACT_MAX_RETENTION_DAYS = "30";
+ process.env.GH_AW_ARTIFACT_MAX_SIZE_BYTES = "104857600";
+ delete process.env.GH_AW_ARTIFACT_ALLOWED_PATHS;
+ delete process.env.GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE;
+ delete process.env.GH_AW_ARTIFACT_DEFAULT_SKIP_ARCHIVE;
+ delete process.env.GH_AW_ARTIFACT_DEFAULT_IF_NO_FILES;
+ delete process.env.GH_AW_ARTIFACT_FILTERS_INCLUDE;
+ delete process.env.GH_AW_ARTIFACT_FILTERS_EXCLUDE;
+ delete process.env.GH_AW_SAFE_OUTPUTS_STAGED;
+
+ // Ensure staging dir exists and is clean
+ if (fs.existsSync(STAGING_DIR)) {
+ fs.rmSync(STAGING_DIR, { recursive: true });
+ }
+ fs.mkdirSync(STAGING_DIR, { recursive: true });
+
+ // Clean slot dir
+ if (fs.existsSync(SLOT_BASE_DIR)) {
+ fs.rmSync(SLOT_BASE_DIR, { recursive: true });
+ }
+
+ // Clean resolver file
+ if (fs.existsSync(RESOLVER_FILE)) {
+ fs.unlinkSync(RESOLVER_FILE);
+ }
+ });
+
+ afterEach(() => {
+ // Restore env
+ process.env = originalEnv;
+
+ if (agentOutputPath && fs.existsSync(agentOutputPath)) {
+ fs.unlinkSync(agentOutputPath);
+ }
+ });
+
+ describe("no agent output", () => {
+ it("sets artifact_count to 0 when no agent output is present", async () => {
+ delete process.env.GH_AW_AGENT_OUTPUT;
+ await runMain();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("artifact_count", "0");
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ });
+ });
+
+ describe("no upload_artifact records", () => {
+ it("sets artifact_count to 0 when output has no upload_artifact items", async () => {
+ writeAgentOutput({ items: [{ type: "create_issue", title: "test" }] });
+ await runMain();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("artifact_count", "0");
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ });
+ });
+
+ describe("path-based upload", () => {
+ it("stages a single file and sets slot outputs", async () => {
+ writeStaging("report.json", '{"result": "ok"}');
+ writeAgentOutput({
+ items: [{ type: "upload_artifact", path: "report.json", retention_days: 14 }],
+ });
+
+ await runMain();
+
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_enabled", "true");
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_retention_days", "14");
+ expect(mockCore.setOutput).toHaveBeenCalledWith("artifact_count", "1");
+
+ // Verify the file was staged into slot_0.
+ const slotFile = path.join(SLOT_BASE_DIR, "slot_0", "report.json");
+ expect(fs.existsSync(slotFile)).toBe(true);
+ });
+
+ it("clamps retention days to max-retention-days", async () => {
+ writeStaging("report.json");
+ writeAgentOutput({
+ items: [{ type: "upload_artifact", path: "report.json", retention_days: 999 }],
+ });
+
+ await runMain();
+
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_retention_days", "30");
+ });
+
+ it("uses default retention when retention_days is absent", async () => {
+ writeStaging("report.json");
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "report.json" }] });
+
+ await runMain();
+
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_retention_days", "7");
+ });
+ });
+
+ describe("validation errors", () => {
+ it("fails when both path and filters are present", async () => {
+ writeStaging("report.json");
+ writeAgentOutput({
+ items: [
+ {
+ type: "upload_artifact",
+ path: "report.json",
+ filters: { include: ["**/*.json"] },
+ },
+ ],
+ });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("exactly one of 'path' or 'filters'"));
+ });
+
+ it("fails when neither path nor filters are present", async () => {
+ writeAgentOutput({ items: [{ type: "upload_artifact", retention_days: 7 }] });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("exactly one of 'path' or 'filters'"));
+ });
+
+ it("fails when path traverses outside staging dir", async () => {
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "../etc/passwd" }] });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("must not traverse outside staging directory"));
+ });
+
+ it("fails when absolute path is provided", async () => {
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "/etc/passwd" }] });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("must be relative"));
+ });
+
+ it("fails when path does not exist in staging dir", async () => {
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "nonexistent.json" }] });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("does not exist in staging directory"));
+ });
+
+ it("fails when max-uploads is exceeded", async () => {
+ process.env.GH_AW_ARTIFACT_MAX_UPLOADS = "1";
+ writeStaging("a.json");
+ writeStaging("b.json");
+ writeAgentOutput({
+ items: [
+ { type: "upload_artifact", path: "a.json" },
+ { type: "upload_artifact", path: "b.json" },
+ ],
+ });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("exceed max-uploads policy"));
+ });
+
+ it("fails when skip_archive is requested but not allowed", async () => {
+ writeStaging("app.bin");
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "app.bin", skip_archive: true }] });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("skip_archive=true is not permitted"));
+ });
+
+ it("fails when skip_archive=true with multiple files", async () => {
+ process.env.GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE = "true";
+ writeStaging("output/a.json");
+ writeStaging("output/b.json");
+ writeAgentOutput({
+ items: [
+ {
+ type: "upload_artifact",
+ // Use "output/**" which matches output/a.json and output/b.json
+ filters: { include: ["output/**"] },
+ skip_archive: true,
+ },
+ ],
+ });
+
+ await runMain();
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("skip_archive=true requires exactly one selected file"));
+ });
+ });
+
+ describe("skip_archive allowed", () => {
+ it("succeeds with skip_archive=true and a single file", async () => {
+ process.env.GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE = "true";
+ writeStaging("app.bin", "binary data");
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "app.bin", skip_archive: true }] });
+
+ await runMain();
+
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_enabled", "true");
+ });
+ });
+
+ describe("filter-based upload", () => {
+ it("selects files matching include pattern", async () => {
+ writeStaging("reports/daily/summary.json", "{}");
+ writeStaging("reports/weekly/summary.json", "{}");
+ writeStaging("reports/private/secret.json", "{}");
+ writeAgentOutput({
+ items: [
+ {
+ type: "upload_artifact",
+ filters: {
+ include: ["reports/**/*.json"],
+ exclude: ["reports/private/**"],
+ },
+ },
+ ],
+ });
+
+ await runMain();
+
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_enabled", "true");
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_file_count", "2");
+ });
+
+ it("handles no-files with if-no-files=ignore", async () => {
+ process.env.GH_AW_ARTIFACT_DEFAULT_IF_NO_FILES = "ignore";
+ writeAgentOutput({
+ items: [
+ {
+ type: "upload_artifact",
+ filters: { include: ["nonexistent/**"] },
+ },
+ ],
+ });
+
+ await runMain();
+
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("artifact_count", "0");
+ });
+
+ it("fails when no files match and if-no-files=error (default)", async () => {
+ writeAgentOutput({
+ items: [
+ {
+ type: "upload_artifact",
+ filters: { include: ["nonexistent/**"] },
+ },
+ ],
+ });
+
+ await runMain();
+
+ expect(mockCore.setFailed).toHaveBeenCalledWith(expect.stringContaining("no files matched"));
+ });
+ });
+
+ describe("allowed-paths policy", () => {
+ it("filters out files not in allowed-paths", async () => {
+ process.env.GH_AW_ARTIFACT_ALLOWED_PATHS = JSON.stringify(["dist/**"]);
+ writeStaging("dist/app.js");
+ writeStaging("secret.env");
+ writeAgentOutput({
+ items: [
+ {
+ type: "upload_artifact",
+ filters: { include: ["**"] },
+ },
+ ],
+ });
+
+ await runMain();
+
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_file_count", "1");
+ });
+ });
+
+ describe("staged mode", () => {
+ it("skips file staging but sets outputs in staged mode", async () => {
+ process.env.GH_AW_SAFE_OUTPUTS_STAGED = "true";
+ writeStaging("report.json");
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "report.json" }] });
+
+ await runMain();
+
+ expect(mockCore.setFailed).not.toHaveBeenCalled();
+ expect(mockCore.setOutput).toHaveBeenCalledWith("slot_0_enabled", "true");
+
+ // In staged mode, files are NOT copied to the slot directory.
+ const slotFile = path.join(SLOT_BASE_DIR, "slot_0", "report.json");
+ expect(fs.existsSync(slotFile)).toBe(false);
+ });
+ });
+
+ describe("resolver file", () => {
+ it("writes a resolver mapping with temporary IDs", async () => {
+ writeStaging("report.json");
+ writeAgentOutput({ items: [{ type: "upload_artifact", path: "report.json" }] });
+
+ await runMain();
+
+ expect(fs.existsSync(RESOLVER_FILE)).toBe(true);
+ const resolver = JSON.parse(fs.readFileSync(RESOLVER_FILE, "utf8"));
+ const keys = Object.keys(resolver);
+ expect(keys.length).toBe(1);
+ expect(keys[0]).toMatch(/^tmp_artifact_[A-Z0-9]{26}$/);
+ });
+ });
+});
diff --git a/pkg/workflow/compiler_safe_output_jobs.go b/pkg/workflow/compiler_safe_output_jobs.go
index f1d6d83c436..dabfa512dfa 100644
--- a/pkg/workflow/compiler_safe_output_jobs.go
+++ b/pkg/workflow/compiler_safe_output_jobs.go
@@ -89,6 +89,24 @@ func (c *Compiler) buildSafeOutputsJobs(data *WorkflowData, jobName, markdownPat
compilerSafeOutputJobsLog.Printf("Added separate upload_assets job")
}
+ // Build upload_artifact job as a separate job if configured.
+ // This is separate from the consolidated safe_outputs job because it needs to:
+ // 1. Download the staging artifact produced by the main job
+ // 2. Validate and filter the requested files
+ // 3. Upload each approved set of files as a proper GitHub Actions artifact
+ if data.SafeOutputs != nil && data.SafeOutputs.UploadArtifact != nil {
+ compilerSafeOutputJobsLog.Print("Building separate upload_artifact job")
+ uploadArtifactJob, err := c.buildUploadArtifactJob(data, jobName, threatDetectionEnabled)
+ if err != nil {
+ return fmt.Errorf("failed to build upload_artifact job: %w", err)
+ }
+ if err := c.jobManager.AddJob(uploadArtifactJob); err != nil {
+ return fmt.Errorf("failed to add upload_artifact job: %w", err)
+ }
+ safeOutputJobNames = append(safeOutputJobNames, uploadArtifactJob.Name)
+ compilerSafeOutputJobsLog.Printf("Added separate upload_artifact job")
+ }
+
// Build upload_code_scanning_sarif job as a separate job if create-code-scanning-alert is configured.
// This job runs after safe_outputs and only when the safe_outputs job exported a SARIF file.
// It is separate to avoid the checkout step (needed to restore HEAD to github.sha) from
diff --git a/pkg/workflow/compiler_safe_outputs_config.go b/pkg/workflow/compiler_safe_outputs_config.go
index 4eb757e46ec..4bc1638fb2e 100644
--- a/pkg/workflow/compiler_safe_outputs_config.go
+++ b/pkg/workflow/compiler_safe_outputs_config.go
@@ -768,6 +768,37 @@ var handlerRegistry = map[string]handlerBuilder{
AddIfTrue("staged", c.Staged).
Build()
},
+ "upload_artifact": func(cfg *SafeOutputsConfig) map[string]any {
+ if cfg.UploadArtifact == nil {
+ return nil
+ }
+ c := cfg.UploadArtifact
+ b := newHandlerConfigBuilder().
+ AddTemplatableInt("max", c.Max).
+ AddIfPositive("max-uploads", c.MaxUploads).
+ AddIfPositive("default-retention-days", c.DefaultRetentionDays).
+ AddIfPositive("max-retention-days", c.MaxRetentionDays).
+ AddIfNotEmpty("github-token", c.GitHubToken).
+ AddIfTrue("staged", c.Staged)
+ if c.MaxSizeBytes > 0 {
+ b = b.AddDefault("max-size-bytes", c.MaxSizeBytes)
+ }
+ if len(c.AllowedPaths) > 0 {
+ b = b.AddStringSlice("allowed-paths", c.AllowedPaths)
+ }
+ if c.Allow != nil && c.Allow.SkipArchive {
+ b = b.AddIfTrue("allow-skip-archive", true)
+ }
+ if c.Defaults != nil {
+ if c.Defaults.SkipArchive {
+ b = b.AddIfTrue("default-skip-archive", true)
+ }
+ if c.Defaults.IfNoFiles != "" {
+ b = b.AddIfNotEmpty("default-if-no-files", c.Defaults.IfNoFiles)
+ }
+ }
+ return b.Build()
+ },
"autofix_code_scanning_alert": func(cfg *SafeOutputsConfig) map[string]any {
if cfg.AutofixCodeScanningAlert == nil {
return nil
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index 91fa861f4ab..5c430b4f948 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -472,6 +472,7 @@ type SafeOutputsConfig struct {
UpdatePullRequests *UpdatePullRequestsConfig `yaml:"update-pull-request,omitempty"` // Update GitHub pull request title/body
PushToPullRequestBranch *PushToPullRequestBranchConfig `yaml:"push-to-pull-request-branch,omitempty"`
UploadAssets *UploadAssetsConfig `yaml:"upload-asset,omitempty"`
+ UploadArtifact *UploadArtifactConfig `yaml:"upload-artifact,omitempty"` // Upload files as run-scoped GitHub Actions artifacts
UpdateRelease *UpdateReleaseConfig `yaml:"update-release,omitempty"` // Update GitHub release descriptions
CreateAgentSessions *CreateAgentSessionConfig `yaml:"create-agent-session,omitempty"` // Create GitHub Copilot coding agent sessions
UpdateProjects *UpdateProjectConfig `yaml:"update-project,omitempty"` // Smart project board management (create/add/update)
diff --git a/pkg/workflow/compiler_yaml_main_job.go b/pkg/workflow/compiler_yaml_main_job.go
index 37b7bb5d974..e9ca525be53 100644
--- a/pkg/workflow/compiler_yaml_main_job.go
+++ b/pkg/workflow/compiler_yaml_main_job.go
@@ -513,6 +513,11 @@ func (c *Compiler) generateMainJobSteps(yaml *strings.Builder, data *WorkflowDat
// This creates a separate artifact for assets that will be downloaded by upload_assets job
generateSafeOutputsAssetsArtifactUpload(yaml, data)
+ // Add safe-outputs upload-artifact staging upload (after agent execution)
+ // This creates a separate artifact for files the model staged for artifact upload,
+ // to be downloaded and processed by the upload_artifact job
+ generateSafeOutputsArtifactStagingUpload(yaml, data)
+
// Collect git patch path if safe-outputs with PR operations is configured
// NOTE: Git patch generation has been moved to the safe-outputs MCP server
// The patch is now generated when create_pull_request or push_to_pull_request_branch
diff --git a/pkg/workflow/js/safe_outputs_tools.json b/pkg/workflow/js/safe_outputs_tools.json
index 9614a6f42dd..ac0196a526d 100644
--- a/pkg/workflow/js/safe_outputs_tools.json
+++ b/pkg/workflow/js/safe_outputs_tools.json
@@ -1583,5 +1583,57 @@
},
"additionalProperties": false
}
+ },
+ {
+ "name": "upload_artifact",
+ "description": "Upload files as a run-scoped GitHub Actions artifact. The model must first copy files to /tmp/gh-aw/safeoutputs/upload-artifacts/ then request upload using this tool. Returns a temporary artifact ID that can be resolved to a download URL by an authorised step. Exactly one of path or filters must be present.",
+ "inputSchema": {
+ "type": "object",
+ "properties": {
+ "path": {
+ "type": "string",
+ "description": "Path to the file or directory to upload, relative to /tmp/gh-aw/safeoutputs/upload-artifacts/ (e.g., \"report.json\" or \"dist/\"). Required unless filters is provided."
+ },
+ "filters": {
+ "type": "object",
+ "description": "Glob-based file selection filters. Required unless path is provided.",
+ "properties": {
+ "include": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Glob patterns for files to include (e.g., [\"reports/**/*.json\"])"
+ },
+ "exclude": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Glob patterns for files to exclude (e.g., [\"**/*.env\", \"**/*.pem\"])"
+ }
+ },
+ "additionalProperties": false
+ },
+ "retention_days": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Number of days to retain the artifact. Capped by workflow configuration."
+ },
+ "skip_archive": {
+ "type": "boolean",
+ "description": "Upload the file directly without archiving. Only allowed for single-file uploads when enabled in workflow configuration."
+ },
+ "secrecy": {
+ "type": "string",
+ "description": "Confidentiality level of the artifact content (e.g., \"public\", \"internal\", \"private\")."
+ },
+ "integrity": {
+ "type": "string",
+ "description": "Trustworthiness level of the artifact source (e.g., \"low\", \"medium\", \"high\")."
+ }
+ },
+ "additionalProperties": false
+ }
}
]
diff --git a/pkg/workflow/publish_artifacts.go b/pkg/workflow/publish_artifacts.go
new file mode 100644
index 00000000000..aaacea24412
--- /dev/null
+++ b/pkg/workflow/publish_artifacts.go
@@ -0,0 +1,361 @@
+package workflow
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/github/gh-aw/pkg/constants"
+ "github.com/github/gh-aw/pkg/logger"
+)
+
+var publishArtifactsLog = logger.New("workflow:publish_artifacts")
+
+// defaultArtifactMaxUploads is the default maximum number of upload_artifact tool calls allowed per run.
+const defaultArtifactMaxUploads = 1
+
+// defaultArtifactRetentionDays is the default artifact retention period in days.
+const defaultArtifactRetentionDays = 7
+
+// defaultArtifactMaxRetentionDays is the default maximum retention cap in days.
+const defaultArtifactMaxRetentionDays = 30
+
+// defaultArtifactMaxSizeBytes is the default maximum total upload size (100 MB).
+const defaultArtifactMaxSizeBytes int64 = 104857600
+
+// artifactStagingDir is the path where the model stages files to be uploaded as artifacts.
+const artifactStagingDir = "/tmp/gh-aw/safeoutputs/upload-artifacts/"
+
+// artifactSlotDir is the per-slot directory used by the handler to organise staged files.
+const artifactSlotDir = "/tmp/gh-aw/upload-artifacts/"
+
+// SafeOutputsUploadArtifactStagingArtifactName is the artifact that carries the staging directory
+// from the main agent job to the upload_artifact job.
+const SafeOutputsUploadArtifactStagingArtifactName = "safe-outputs-upload-artifacts"
+
+// ArtifactFiltersConfig holds include/exclude glob patterns for artifact file selection.
+type ArtifactFiltersConfig struct {
+ Include []string `yaml:"include,omitempty"` // Glob patterns for files to include
+ Exclude []string `yaml:"exclude,omitempty"` // Glob patterns for files to exclude
+}
+
+// ArtifactDefaultsConfig holds default request settings applied when the model does not
+// specify a value explicitly.
+type ArtifactDefaultsConfig struct {
+ SkipArchive bool `yaml:"skip-archive,omitempty"` // Default value for skip_archive
+ IfNoFiles string `yaml:"if-no-files,omitempty"` // Behaviour when no files match: "error" or "ignore"
+}
+
+// ArtifactAllowConfig holds policy settings for optional behaviours that must be explicitly
+// opted-in to by the workflow author.
+type ArtifactAllowConfig struct {
+ SkipArchive bool `yaml:"skip-archive,omitempty"` // Allow skip_archive: true in model requests
+}
+
+// UploadArtifactConfig holds configuration for the upload-artifact safe output type.
+type UploadArtifactConfig struct {
+ BaseSafeOutputConfig `yaml:",inline"`
+ MaxUploads int `yaml:"max-uploads,omitempty"` // Max upload_artifact tool calls allowed (default: 1)
+ DefaultRetentionDays int `yaml:"default-retention-days,omitempty"` // Default retention period (default: 7 days)
+ MaxRetentionDays int `yaml:"max-retention-days,omitempty"` // Maximum retention cap (default: 30 days)
+ MaxSizeBytes int64 `yaml:"max-size-bytes,omitempty"` // Max total bytes per upload (default: 100 MB)
+ AllowedPaths []string `yaml:"allowed-paths,omitempty"` // Glob patterns restricting which paths the model may upload
+ Filters *ArtifactFiltersConfig `yaml:"filters,omitempty"` // Default include/exclude filters applied on top of allowed-paths
+ Defaults *ArtifactDefaultsConfig `yaml:"defaults,omitempty"` // Default values injected when the model omits a field
+ Allow *ArtifactAllowConfig `yaml:"allow,omitempty"` // Opt-in behaviours
+}
+
+// parseUploadArtifactConfig parses the upload-artifact key from the safe-outputs map.
+func (c *Compiler) parseUploadArtifactConfig(outputMap map[string]any) *UploadArtifactConfig {
+ configData, exists := outputMap["upload-artifact"]
+ if !exists {
+ return nil
+ }
+
+ // Explicit false disables upload-artifact (e.g. when passed via import-inputs).
+ if b, ok := configData.(bool); ok && !b {
+ publishArtifactsLog.Print("upload-artifact explicitly set to false, skipping")
+ return nil
+ }
+
+ publishArtifactsLog.Print("Parsing upload-artifact configuration")
+ config := &UploadArtifactConfig{
+ MaxUploads: defaultArtifactMaxUploads,
+ DefaultRetentionDays: defaultArtifactRetentionDays,
+ MaxRetentionDays: defaultArtifactMaxRetentionDays,
+ MaxSizeBytes: defaultArtifactMaxSizeBytes,
+ }
+
+ configMap, ok := configData.(map[string]any)
+ if !ok {
+ // No config map (e.g. upload-artifact: true) – use defaults.
+ publishArtifactsLog.Print("Using default upload-artifact configuration")
+ return config
+ }
+
+ // Parse max-uploads.
+ if maxUploads, exists := configMap["max-uploads"]; exists {
+ if v, ok := parseIntValue(maxUploads); ok && v > 0 {
+ config.MaxUploads = v
+ }
+ }
+
+ // Parse default-retention-days.
+ if retDays, exists := configMap["default-retention-days"]; exists {
+ if v, ok := parseIntValue(retDays); ok && v > 0 {
+ config.DefaultRetentionDays = v
+ }
+ }
+
+ // Parse max-retention-days.
+ if maxRetDays, exists := configMap["max-retention-days"]; exists {
+ if v, ok := parseIntValue(maxRetDays); ok && v > 0 {
+ config.MaxRetentionDays = v
+ }
+ }
+
+ // Parse max-size-bytes.
+ if maxBytes, exists := configMap["max-size-bytes"]; exists {
+ if v, ok := parseIntValue(maxBytes); ok && v > 0 {
+ config.MaxSizeBytes = int64(v)
+ }
+ }
+
+ // Parse allowed-paths.
+ if allowedPaths, exists := configMap["allowed-paths"]; exists {
+ if arr, ok := allowedPaths.([]any); ok {
+ for _, p := range arr {
+ if s, ok := p.(string); ok && s != "" {
+ config.AllowedPaths = append(config.AllowedPaths, s)
+ }
+ }
+ }
+ }
+
+ // Parse filters.
+ if filtersData, exists := configMap["filters"]; exists {
+ if filtersMap, ok := filtersData.(map[string]any); ok {
+ filters := &ArtifactFiltersConfig{}
+ if inc, ok := filtersMap["include"].([]any); ok {
+ for _, v := range inc {
+ if s, ok := v.(string); ok {
+ filters.Include = append(filters.Include, s)
+ }
+ }
+ }
+ if exc, ok := filtersMap["exclude"].([]any); ok {
+ for _, v := range exc {
+ if s, ok := v.(string); ok {
+ filters.Exclude = append(filters.Exclude, s)
+ }
+ }
+ }
+ if len(filters.Include) > 0 || len(filters.Exclude) > 0 {
+ config.Filters = filters
+ }
+ }
+ }
+
+ // Parse defaults.
+ if defaultsData, exists := configMap["defaults"]; exists {
+ if defaultsMap, ok := defaultsData.(map[string]any); ok {
+ defaults := &ArtifactDefaultsConfig{}
+ if skipArchive, ok := defaultsMap["skip-archive"].(bool); ok {
+ defaults.SkipArchive = skipArchive
+ }
+ if ifNoFiles, ok := defaultsMap["if-no-files"].(string); ok && ifNoFiles != "" {
+ defaults.IfNoFiles = ifNoFiles
+ }
+ config.Defaults = defaults
+ }
+ }
+
+ // Parse allow.
+ if allowData, exists := configMap["allow"]; exists {
+ if allowMap, ok := allowData.(map[string]any); ok {
+ allow := &ArtifactAllowConfig{}
+ if skipArchive, ok := allowMap["skip-archive"].(bool); ok {
+ allow.SkipArchive = skipArchive
+ }
+ config.Allow = allow
+ }
+ }
+
+ // Parse common base fields (max, github-token, staged).
+ c.parseBaseSafeOutputConfig(configMap, &config.BaseSafeOutputConfig, 0)
+
+ publishArtifactsLog.Printf("Parsed upload-artifact config: max_uploads=%d, default_retention=%d, max_retention=%d, max_size_bytes=%d",
+ config.MaxUploads, config.DefaultRetentionDays, config.MaxRetentionDays, config.MaxSizeBytes)
+ return config
+}
+
+// buildUploadArtifactJob creates the upload_artifact standalone job.
+//
+// Architecture:
+// 1. The model stages files to artifactStagingDir during its run.
+// 2. The main agent job uploads that directory as a GitHub Actions staging artifact.
+// 3. This job downloads the staging artifact, validates each upload_artifact request,
+// copies approved files into per-slot directories, and then uploads each slot using
+// actions/upload-artifact with a conditional step per MaxUploads slot.
+// 4. A temporary artifact ID is returned for each slot via job outputs.
+func (c *Compiler) buildUploadArtifactJob(data *WorkflowData, mainJobName string, threatDetectionEnabled bool) (*Job, error) {
+ publishArtifactsLog.Printf("Building upload_artifact job: workflow=%s, main_job=%s, threat_detection=%v",
+ data.Name, mainJobName, threatDetectionEnabled)
+
+ if data.SafeOutputs == nil || data.SafeOutputs.UploadArtifact == nil {
+ return nil, errors.New("safe-outputs.upload-artifact configuration is required")
+ }
+
+ cfg := data.SafeOutputs.UploadArtifact
+
+ var preSteps []string
+
+ // Add setup step so scripts are available at SetupActionDestination.
+ setupActionRef := c.resolveActionReference("./actions/setup", data)
+ if setupActionRef != "" || c.actionMode.IsScript() {
+ preSteps = append(preSteps, c.generateCheckoutActionsFolder(data)...)
+ publishTraceID := fmt.Sprintf("${{ needs.%s.outputs.setup-trace-id }}", constants.ActivationJobName)
+ preSteps = append(preSteps, c.generateSetupStep(setupActionRef, SetupActionDestination, false, publishTraceID)...)
+ }
+
+ // Download agent output artifact (to read upload_artifact requests).
+ artifactPrefix := artifactPrefixExprForAgentDownstreamJob(data)
+ preSteps = append(preSteps,
+ buildAgentOutputDownloadSteps(artifactPrefix)...,
+ )
+
+ // Download the staging artifact that holds the files the model wants to upload.
+ stagingArtifactName := artifactPrefix + SafeOutputsUploadArtifactStagingArtifactName
+ preSteps = append(preSteps,
+ " - name: Download upload-artifact staging\n",
+ " continue-on-error: true\n",
+ fmt.Sprintf(" uses: %s\n", GetActionPin("actions/download-artifact")),
+ " with:\n",
+ fmt.Sprintf(" name: %s\n", stagingArtifactName),
+ fmt.Sprintf(" path: %s\n", artifactStagingDir),
+ )
+
+ // Build custom environment variables consumed by upload_artifact.cjs.
+ var customEnvVars []string
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_MAX_UPLOADS: %d\n", cfg.MaxUploads))
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS: %d\n", cfg.DefaultRetentionDays))
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_MAX_RETENTION_DAYS: %d\n", cfg.MaxRetentionDays))
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_MAX_SIZE_BYTES: %d\n", cfg.MaxSizeBytes))
+
+ if len(cfg.AllowedPaths) > 0 {
+ allowedPathsJSON := marshalStringSliceJSON(cfg.AllowedPaths)
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_ALLOWED_PATHS: %q\n", allowedPathsJSON))
+ }
+
+ if cfg.Allow != nil && cfg.Allow.SkipArchive {
+ customEnvVars = append(customEnvVars, " GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE: \"true\"\n")
+ }
+ if cfg.Defaults != nil {
+ if cfg.Defaults.SkipArchive {
+ customEnvVars = append(customEnvVars, " GH_AW_ARTIFACT_DEFAULT_SKIP_ARCHIVE: \"true\"\n")
+ }
+ if cfg.Defaults.IfNoFiles != "" {
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_DEFAULT_IF_NO_FILES: %q\n", cfg.Defaults.IfNoFiles))
+ }
+ }
+ if cfg.Filters != nil {
+ if len(cfg.Filters.Include) > 0 {
+ filtersIncJSON := marshalStringSliceJSON(cfg.Filters.Include)
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_FILTERS_INCLUDE: %q\n", filtersIncJSON))
+ }
+ if len(cfg.Filters.Exclude) > 0 {
+ filtersExcJSON := marshalStringSliceJSON(cfg.Filters.Exclude)
+ customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ARTIFACT_FILTERS_EXCLUDE: %q\n", filtersExcJSON))
+ }
+ }
+
+ // Add standard env vars (run ID, repo, etc.).
+ customEnvVars = append(customEnvVars, c.buildStandardSafeOutputEnvVars(data, "")...)
+
+ // Build conditional actions/upload-artifact steps – one per MaxUploads slot.
+ // The handler sets slot_N_enabled=true and outputs the slot name / retention when
+ // the Nth upload_artifact request was successfully validated and staged.
+ var postSteps []string
+ for i := range cfg.MaxUploads {
+ slotDir := fmt.Sprintf("%sslot_%d/", artifactSlotDir, i)
+ postSteps = append(postSteps,
+ fmt.Sprintf(" - name: Upload artifact slot %d\n", i),
+ fmt.Sprintf(" if: steps.upload_artifacts.outputs.slot_%d_enabled == 'true'\n", i),
+ fmt.Sprintf(" uses: %s\n", GetActionPin("actions/upload-artifact")),
+ " with:\n",
+ fmt.Sprintf(" name: ${{ steps.upload_artifacts.outputs.slot_%d_name }}\n", i),
+ fmt.Sprintf(" path: %s\n", slotDir),
+ fmt.Sprintf(" retention-days: ${{ steps.upload_artifacts.outputs.slot_%d_retention_days }}\n", i),
+ " if-no-files-found: ignore\n",
+ )
+ }
+
+ // In dev mode, restore the actions/setup folder so the post-step cleanup succeeds.
+ if c.actionMode.IsDev() {
+ postSteps = append(postSteps, c.generateRestoreActionsSetupStep())
+ publishArtifactsLog.Print("Added restore actions folder step to upload_artifact job (dev mode)")
+ }
+
+ jobCondition := BuildSafeOutputType("upload_artifact")
+ needs := []string{mainJobName, string(constants.ActivationJobName)}
+
+ // Collect job outputs for all slots so downstream jobs can reference them.
+ outputs := map[string]string{
+ "artifact_count": "${{ steps.upload_artifacts.outputs.artifact_count }}",
+ }
+ for i := range cfg.MaxUploads {
+ outputs[fmt.Sprintf("slot_%d_tmp_id", i)] = fmt.Sprintf("${{ steps.upload_artifacts.outputs.slot_%d_tmp_id }}", i)
+ }
+
+ return c.buildSafeOutputJob(data, SafeOutputJobConfig{
+ JobName: "upload_artifact",
+ StepName: "Upload artifacts",
+ StepID: "upload_artifacts",
+ ScriptName: "upload_artifact",
+ MainJobName: mainJobName,
+ CustomEnvVars: customEnvVars,
+ Script: "",
+ Permissions: NewPermissions(),
+ Outputs: outputs,
+ Condition: jobCondition,
+ PreSteps: preSteps,
+ PostSteps: postSteps,
+ Token: cfg.GitHubToken,
+ Needs: needs,
+ })
+}
+
+// generateSafeOutputsArtifactStagingUpload generates a step in the main agent job that uploads
+// the artifact staging directory so the upload_artifact job can download it.
+// This step only appears when upload-artifact is configured in safe-outputs.
+func generateSafeOutputsArtifactStagingUpload(builder *strings.Builder, data *WorkflowData) {
+ if data.SafeOutputs == nil || data.SafeOutputs.UploadArtifact == nil {
+ return
+ }
+
+ publishArtifactsLog.Print("Generating safe-outputs artifact staging upload step")
+
+ prefix := artifactPrefixExprForDownstreamJob(data)
+
+ builder.WriteString(" # Upload safe-outputs upload-artifact staging for the upload_artifact job\n")
+ builder.WriteString(" - name: Upload Upload-Artifact Staging\n")
+ builder.WriteString(" if: always()\n")
+ fmt.Fprintf(builder, " uses: %s\n", GetActionPin("actions/upload-artifact"))
+ builder.WriteString(" with:\n")
+ fmt.Fprintf(builder, " name: %s%s\n", prefix, SafeOutputsUploadArtifactStagingArtifactName)
+ fmt.Fprintf(builder, " path: %s\n", artifactStagingDir)
+ builder.WriteString(" retention-days: 1\n")
+ builder.WriteString(" if-no-files-found: ignore\n")
+}
+
+// marshalStringSliceJSON serialises a []string to a compact JSON array string.
+// This is used to pass multi-value config fields as environment variables.
+func marshalStringSliceJSON(values []string) string {
+ data, err := json.Marshal(values)
+ if err != nil {
+ // Should never happen for plain string slices.
+ return "[]"
+ }
+ return string(data)
+}
diff --git a/pkg/workflow/publish_artifacts_test.go b/pkg/workflow/publish_artifacts_test.go
new file mode 100644
index 00000000000..459fc3a8649
--- /dev/null
+++ b/pkg/workflow/publish_artifacts_test.go
@@ -0,0 +1,314 @@
+//go:build !integration
+
+package workflow
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestParseUploadArtifactConfig(t *testing.T) {
+ c := &Compiler{}
+
+ tests := []struct {
+ name string
+ input map[string]any
+ expected *UploadArtifactConfig
+ isNil bool
+ }{
+ {
+ name: "no upload-artifact key",
+ input: map[string]any{},
+ isNil: true,
+ },
+ {
+ name: "upload-artifact explicitly false",
+ input: map[string]any{"upload-artifact": false},
+ isNil: true,
+ },
+ {
+ name: "upload-artifact true uses defaults",
+ input: map[string]any{"upload-artifact": true},
+ expected: &UploadArtifactConfig{
+ MaxUploads: defaultArtifactMaxUploads,
+ DefaultRetentionDays: defaultArtifactRetentionDays,
+ MaxRetentionDays: defaultArtifactMaxRetentionDays,
+ MaxSizeBytes: defaultArtifactMaxSizeBytes,
+ },
+ },
+ {
+ name: "upload-artifact with custom values",
+ input: map[string]any{
+ "upload-artifact": map[string]any{
+ "max-uploads": 3,
+ "default-retention-days": 14,
+ "max-retention-days": 60,
+ "max-size-bytes": 52428800,
+ "allowed-paths": []any{"dist/**", "reports/**"},
+ "github-token": "${{ secrets.MY_TOKEN }}",
+ },
+ },
+ expected: &UploadArtifactConfig{
+ MaxUploads: 3,
+ DefaultRetentionDays: 14,
+ MaxRetentionDays: 60,
+ MaxSizeBytes: 52428800,
+ AllowedPaths: []string{"dist/**", "reports/**"},
+ BaseSafeOutputConfig: BaseSafeOutputConfig{GitHubToken: "${{ secrets.MY_TOKEN }}"},
+ },
+ },
+ {
+ name: "upload-artifact with filters",
+ input: map[string]any{
+ "upload-artifact": map[string]any{
+ "filters": map[string]any{
+ "include": []any{"reports/**/*.json"},
+ "exclude": []any{"**/*.env", "**/*.pem"},
+ },
+ },
+ },
+ expected: &UploadArtifactConfig{
+ MaxUploads: defaultArtifactMaxUploads,
+ DefaultRetentionDays: defaultArtifactRetentionDays,
+ MaxRetentionDays: defaultArtifactMaxRetentionDays,
+ MaxSizeBytes: defaultArtifactMaxSizeBytes,
+ Filters: &ArtifactFiltersConfig{
+ Include: []string{"reports/**/*.json"},
+ Exclude: []string{"**/*.env", "**/*.pem"},
+ },
+ },
+ },
+ {
+ name: "upload-artifact with defaults and allow",
+ input: map[string]any{
+ "upload-artifact": map[string]any{
+ "defaults": map[string]any{
+ "skip-archive": false,
+ "if-no-files": "ignore",
+ },
+ "allow": map[string]any{
+ "skip-archive": true,
+ },
+ },
+ },
+ expected: &UploadArtifactConfig{
+ MaxUploads: defaultArtifactMaxUploads,
+ DefaultRetentionDays: defaultArtifactRetentionDays,
+ MaxRetentionDays: defaultArtifactMaxRetentionDays,
+ MaxSizeBytes: defaultArtifactMaxSizeBytes,
+ Defaults: &ArtifactDefaultsConfig{
+ SkipArchive: false,
+ IfNoFiles: "ignore",
+ },
+ Allow: &ArtifactAllowConfig{
+ SkipArchive: true,
+ },
+ },
+ },
+ {
+ name: "upload-artifact with max field",
+ input: map[string]any{
+ "upload-artifact": map[string]any{
+ "max": 5,
+ },
+ },
+ expected: &UploadArtifactConfig{
+ MaxUploads: defaultArtifactMaxUploads,
+ DefaultRetentionDays: defaultArtifactRetentionDays,
+ MaxRetentionDays: defaultArtifactMaxRetentionDays,
+ MaxSizeBytes: defaultArtifactMaxSizeBytes,
+ BaseSafeOutputConfig: BaseSafeOutputConfig{Max: strPtr("5")},
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := c.parseUploadArtifactConfig(tt.input)
+
+ if tt.isNil {
+ assert.Nil(t, result, "expected nil result")
+ return
+ }
+
+ require.NotNil(t, result, "expected non-nil result")
+ assert.Equal(t, tt.expected.MaxUploads, result.MaxUploads, "MaxUploads mismatch")
+ assert.Equal(t, tt.expected.DefaultRetentionDays, result.DefaultRetentionDays, "DefaultRetentionDays mismatch")
+ assert.Equal(t, tt.expected.MaxRetentionDays, result.MaxRetentionDays, "MaxRetentionDays mismatch")
+ assert.Equal(t, tt.expected.MaxSizeBytes, result.MaxSizeBytes, "MaxSizeBytes mismatch")
+ assert.Equal(t, tt.expected.AllowedPaths, result.AllowedPaths, "AllowedPaths mismatch")
+ assert.Equal(t, tt.expected.GitHubToken, result.GitHubToken, "GitHubToken mismatch")
+
+ if tt.expected.Max == nil {
+ assert.Nil(t, result.Max, "Max should be nil")
+ } else {
+ require.NotNil(t, result.Max, "Max should not be nil")
+ assert.Equal(t, *tt.expected.Max, *result.Max, "Max value mismatch")
+ }
+
+ if tt.expected.Filters == nil {
+ assert.Nil(t, result.Filters, "Filters should be nil")
+ } else {
+ require.NotNil(t, result.Filters, "Filters should not be nil")
+ assert.Equal(t, tt.expected.Filters.Include, result.Filters.Include, "Filters.Include mismatch")
+ assert.Equal(t, tt.expected.Filters.Exclude, result.Filters.Exclude, "Filters.Exclude mismatch")
+ }
+
+ if tt.expected.Defaults == nil {
+ assert.Nil(t, result.Defaults, "Defaults should be nil")
+ } else {
+ require.NotNil(t, result.Defaults, "Defaults should not be nil")
+ assert.Equal(t, tt.expected.Defaults.SkipArchive, result.Defaults.SkipArchive, "Defaults.SkipArchive mismatch")
+ assert.Equal(t, tt.expected.Defaults.IfNoFiles, result.Defaults.IfNoFiles, "Defaults.IfNoFiles mismatch")
+ }
+
+ if tt.expected.Allow == nil {
+ assert.Nil(t, result.Allow, "Allow should be nil")
+ } else {
+ require.NotNil(t, result.Allow, "Allow should not be nil")
+ assert.Equal(t, tt.expected.Allow.SkipArchive, result.Allow.SkipArchive, "Allow.SkipArchive mismatch")
+ }
+ })
+ }
+}
+
+func TestHasSafeOutputsEnabledWithUploadArtifact(t *testing.T) {
+ t.Run("UploadArtifact is detected as enabled", func(t *testing.T) {
+ config := &SafeOutputsConfig{
+ UploadArtifact: &UploadArtifactConfig{},
+ }
+ assert.True(t, HasSafeOutputsEnabled(config), "UploadArtifact should be detected as enabled")
+ })
+
+ t.Run("nil SafeOutputsConfig returns false", func(t *testing.T) {
+ assert.False(t, HasSafeOutputsEnabled(nil), "nil config should return false")
+ })
+
+ t.Run("empty SafeOutputsConfig returns false", func(t *testing.T) {
+ assert.False(t, HasSafeOutputsEnabled(&SafeOutputsConfig{}), "empty config should return false")
+ })
+}
+
+func TestComputeEnabledToolNamesIncludesUploadArtifact(t *testing.T) {
+ data := &WorkflowData{
+ SafeOutputs: &SafeOutputsConfig{
+ UploadArtifact: &UploadArtifactConfig{},
+ },
+ }
+ tools := computeEnabledToolNames(data)
+ assert.True(t, tools["upload_artifact"], "upload_artifact should be in enabled tools")
+}
+
+func TestBuildUploadArtifactJobBasicStructure(t *testing.T) {
+ c := NewCompiler()
+ data := &WorkflowData{
+ Name: "Test Workflow",
+ SafeOutputs: &SafeOutputsConfig{
+ UploadArtifact: &UploadArtifactConfig{
+ MaxUploads: 2,
+ DefaultRetentionDays: 7,
+ MaxRetentionDays: 30,
+ MaxSizeBytes: defaultArtifactMaxSizeBytes,
+ AllowedPaths: []string{"dist/**", "reports/**"},
+ },
+ },
+ }
+
+ job, err := c.buildUploadArtifactJob(data, "agent", false)
+ require.NoError(t, err, "buildUploadArtifactJob should not return error")
+ require.NotNil(t, job, "job should not be nil")
+
+ assert.Equal(t, "upload_artifact", job.Name, "job name should be upload_artifact")
+
+ // Convert steps to string for inspection.
+ var stepsStr strings.Builder
+ for _, step := range job.Steps {
+ stepsStr.WriteString(step)
+ }
+ s := stepsStr.String()
+
+ assert.Contains(t, s, "Download agent output artifact", "should have agent output download step")
+ assert.Contains(t, s, "Download upload-artifact staging", "should have staging artifact download step")
+ assert.Contains(t, s, "GH_AW_ARTIFACT_MAX_UPLOADS", "should have max uploads env var")
+ assert.Contains(t, s, "GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS", "should have default retention env var")
+ assert.Contains(t, s, "GH_AW_ARTIFACT_MAX_RETENTION_DAYS", "should have max retention env var")
+ assert.Contains(t, s, "GH_AW_ARTIFACT_MAX_SIZE_BYTES", "should have max size bytes env var")
+ assert.Contains(t, s, "GH_AW_ARTIFACT_ALLOWED_PATHS", "should have allowed paths env var")
+
+ // Should have upload steps for each slot (MaxUploads = 2).
+ assert.Contains(t, s, "Upload artifact slot 0", "should have upload step for slot 0")
+ assert.Contains(t, s, "Upload artifact slot 1", "should have upload step for slot 1")
+ assert.NotContains(t, s, "Upload artifact slot 2", "should NOT have upload step for slot 2")
+}
+
+func TestBuildUploadArtifactJobRequiresConfig(t *testing.T) {
+ c := NewCompiler()
+
+ t.Run("nil SafeOutputs returns error", func(t *testing.T) {
+ data := &WorkflowData{Name: "Test", SafeOutputs: nil}
+ _, err := c.buildUploadArtifactJob(data, "agent", false)
+ assert.Error(t, err, "should return error when SafeOutputs is nil")
+ })
+
+ t.Run("nil UploadArtifact returns error", func(t *testing.T) {
+ data := &WorkflowData{
+ Name: "Test",
+ SafeOutputs: &SafeOutputsConfig{UploadArtifact: nil},
+ }
+ _, err := c.buildUploadArtifactJob(data, "agent", false)
+ assert.Error(t, err, "should return error when UploadArtifact is nil")
+ })
+}
+
+func TestGenerateSafeOutputsArtifactStagingUpload(t *testing.T) {
+ t.Run("generates step when UploadArtifact is configured", func(t *testing.T) {
+ var b strings.Builder
+ data := &WorkflowData{
+ SafeOutputs: &SafeOutputsConfig{
+ UploadArtifact: &UploadArtifactConfig{},
+ },
+ }
+ generateSafeOutputsArtifactStagingUpload(&b, data)
+ result := b.String()
+ assert.Contains(t, result, "safe-outputs-upload-artifacts", "should reference staging artifact name")
+ assert.Contains(t, result, artifactStagingDir, "should reference staging directory")
+ assert.Contains(t, result, "if: always()", "should have always() condition")
+ })
+
+ t.Run("generates nothing when UploadArtifact is nil", func(t *testing.T) {
+ var b strings.Builder
+ data := &WorkflowData{
+ SafeOutputs: &SafeOutputsConfig{UploadArtifact: nil},
+ }
+ generateSafeOutputsArtifactStagingUpload(&b, data)
+ assert.Empty(t, b.String(), "should generate nothing when UploadArtifact is nil")
+ })
+
+ t.Run("generates nothing when SafeOutputs is nil", func(t *testing.T) {
+ var b strings.Builder
+ data := &WorkflowData{SafeOutputs: nil}
+ generateSafeOutputsArtifactStagingUpload(&b, data)
+ assert.Empty(t, b.String(), "should generate nothing when SafeOutputs is nil")
+ })
+}
+
+func TestMarshalStringSliceJSON(t *testing.T) {
+ tests := []struct {
+ name string
+ input []string
+ expected string
+ }{
+ {"empty slice", []string{}, "[]"},
+ {"single value", []string{"dist/**"}, `["dist/**"]`},
+ {"multiple values", []string{"dist/**", "reports/**/*.json"}, `["dist/**","reports/**/*.json"]`},
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := marshalStringSliceJSON(tt.input)
+ assert.Equal(t, tt.expected, result, "JSON output mismatch")
+ })
+ }
+}
diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go
index a26d7ac4f2e..047c3d3eed9 100644
--- a/pkg/workflow/safe_outputs_config.go
+++ b/pkg/workflow/safe_outputs_config.go
@@ -269,6 +269,12 @@ func (c *Compiler) extractSafeOutputsConfig(frontmatter map[string]any) *SafeOut
config.UploadAssets = uploadAssetsConfig
}
+ // Handle upload-artifact
+ uploadArtifactConfig := c.parseUploadArtifactConfig(outputMap)
+ if uploadArtifactConfig != nil {
+ config.UploadArtifact = uploadArtifactConfig
+ }
+
// Handle update-release
updateReleaseConfig := c.parseUpdateReleaseConfig(outputMap)
if updateReleaseConfig != nil {
diff --git a/pkg/workflow/safe_outputs_state.go b/pkg/workflow/safe_outputs_state.go
index bc727c5e6da..cb3def0c99a 100644
--- a/pkg/workflow/safe_outputs_state.go
+++ b/pkg/workflow/safe_outputs_state.go
@@ -47,6 +47,7 @@ var safeOutputFieldMapping = map[string]string{
"UpdatePullRequests": "update_pull_request",
"PushToPullRequestBranch": "push_to_pull_request_branch",
"UploadAssets": "upload_asset",
+ "UploadArtifact": "upload_artifact",
"UpdateRelease": "update_release",
"UpdateProjects": "update_project",
"CreateProjects": "create_project",
diff --git a/pkg/workflow/safe_outputs_tools_computation.go b/pkg/workflow/safe_outputs_tools_computation.go
index 4502e1e2b73..2b477f07923 100644
--- a/pkg/workflow/safe_outputs_tools_computation.go
+++ b/pkg/workflow/safe_outputs_tools_computation.go
@@ -95,6 +95,9 @@ func computeEnabledToolNames(data *WorkflowData) map[string]bool {
if data.SafeOutputs.UploadAssets != nil {
enabledTools["upload_asset"] = true
}
+ if data.SafeOutputs.UploadArtifact != nil {
+ enabledTools["upload_artifact"] = true
+ }
if data.SafeOutputs.MissingTool != nil {
enabledTools["missing_tool"] = true
}
diff --git a/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/basic-copilot.golden b/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/basic-copilot.golden
index bd5d6472956..479d7255169 100644
--- a/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/basic-copilot.golden
+++ b/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/basic-copilot.golden
@@ -21,6 +21,7 @@ jobs:
if: needs.pre_activation.outputs.activated == 'true'
runs-on: ubuntu-slim
permissions:
+ actions: read
contents: read
outputs:
comment_id: ""
diff --git a/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/with-imports.golden b/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/with-imports.golden
index c29fa80098e..4bf8f9a4c20 100644
--- a/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/with-imports.golden
+++ b/pkg/workflow/testdata/TestWasmGolden_CompileFixtures/with-imports.golden
@@ -21,6 +21,7 @@ jobs:
if: needs.pre_activation.outputs.activated == 'true'
runs-on: ubuntu-slim
permissions:
+ actions: read
contents: read
outputs:
comment_id: ""
From 60b037602cc827384c731e26f6d65e37c5eca7fd Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 02:19:18 +0000
Subject: [PATCH 03/23] feat: add upload_artifact safe output type with native
GitHub Actions artifact support
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/06153ed3-a241-400b-9414-3dc304516475
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
pkg/workflow/publish_artifacts.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pkg/workflow/publish_artifacts.go b/pkg/workflow/publish_artifacts.go
index aaacea24412..434ca5fa2fc 100644
--- a/pkg/workflow/publish_artifacts.go
+++ b/pkg/workflow/publish_artifacts.go
@@ -90,7 +90,7 @@ func (c *Compiler) parseUploadArtifactConfig(outputMap map[string]any) *UploadAr
configMap, ok := configData.(map[string]any)
if !ok {
// No config map (e.g. upload-artifact: true) – use defaults.
- publishArtifactsLog.Print("Using default upload-artifact configuration")
+ publishArtifactsLog.Print("upload-artifact enabled with default configuration")
return config
}
From 1d4b6ad8e69cc3cd2346478802f7ca2923d1e0bb Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 02:49:29 +0000
Subject: [PATCH 04/23] fix: consolidate template literal concatenations in
upload_artifact.cjs
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/e2c3d991-4ccd-4c47-8a7b-53d9905e1444
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
actions/setup/js/upload_artifact.cjs | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/actions/setup/js/upload_artifact.cjs b/actions/setup/js/upload_artifact.cjs
index 9e6ace0a243..4d624951f42 100644
--- a/actions/setup/js/upload_artifact.cjs
+++ b/actions/setup/js/upload_artifact.cjs
@@ -319,7 +319,7 @@ async function main() {
// Enforce max-uploads policy.
if (uploadRequests.length > maxUploads) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact: ${uploadRequests.length} requests exceed max-uploads policy (${maxUploads}). ` + `Reduce the number of upload_artifact calls or raise max-uploads in workflow configuration.`);
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact: ${uploadRequests.length} requests exceed max-uploads policy (${maxUploads}). Reduce the number of upload_artifact calls or raise max-uploads in workflow configuration.`);
return;
}
@@ -340,7 +340,7 @@ async function main() {
// Resolve skip_archive.
const skipArchive = typeof request.skip_archive === "boolean" ? request.skip_archive : defaultSkipArchive;
if (skipArchive && !allowSkipArchive) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: skip_archive=true is not permitted. ` + `Enable it with allow.skip-archive: true in workflow configuration.`);
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: skip_archive=true is not permitted. Enable it with allow.skip-archive: true in workflow configuration.`);
return;
}
@@ -356,7 +356,7 @@ async function main() {
core.warning(`upload_artifact request ${i + 1}: no files matched, skipping (if-no-files=ignore)`);
continue;
} else {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: no files matched the selection criteria. ` + `Check allowed-paths, filters, or use defaults.if-no-files: ignore to skip empty uploads.`);
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: no files matched the selection criteria. Check allowed-paths, filters, or use defaults.if-no-files: ignore to skip empty uploads.`);
return;
}
}
@@ -371,7 +371,7 @@ async function main() {
// Validate total size.
const totalSize = computeTotalSize(files);
if (totalSize > maxSizeBytes) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: total file size ${totalSize} bytes exceeds ` + `max-size-bytes limit of ${maxSizeBytes} bytes.`);
+ core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: total file size ${totalSize} bytes exceeds max-size-bytes limit of ${maxSizeBytes} bytes.`);
return;
}
@@ -383,7 +383,7 @@ async function main() {
const tmpId = generateTemporaryArtifactId();
resolver[tmpId] = artifactName;
- core.info(`Slot ${i}: artifact="${artifactName}", files=${files.length}, size=${totalSize}B, ` + `retention=${retentionDays}d, skip_archive=${skipArchive}, tmp_id=${tmpId}`);
+ core.info(`Slot ${i}: artifact="${artifactName}", files=${files.length}, size=${totalSize}B, retention=${retentionDays}d, skip_archive=${skipArchive}, tmp_id=${tmpId}`);
if (!isStaged) {
// Stage files into the per-slot directory for the actions/upload-artifact step.
From a9e8f8f5b760c7982b163c4f9bfcc9e34c75d723 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 03:03:27 +0000
Subject: [PATCH 05/23] feat: add upload-artifact to JSON schema (no enabled
field required)
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/8dce883a-59fd-437b-a8ba-9dc9587acc98
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
pkg/parser/schemas/main_workflow_schema.json | 108 ++++++++++++++++++-
1 file changed, 107 insertions(+), 1 deletion(-)
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 8c34b92db88..415cb386934 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -4344,7 +4344,7 @@
},
"safe-outputs": {
"type": "object",
- "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-asset. See documentation for complete details.",
+ "$comment": "Required if workflow creates or modifies GitHub resources. Operations requiring safe-outputs: autofix-code-scanning-alert, add-comment, add-labels, add-reviewer, assign-milestone, assign-to-agent, assign-to-user, close-discussion, close-issue, close-pull-request, create-agent-session, create-agent-task (deprecated, use create-agent-session), create-code-scanning-alert, create-discussion, create-issue, create-project, create-project-status-update, create-pull-request, create-pull-request-review-comment, dispatch-workflow, hide-comment, link-sub-issue, mark-pull-request-as-ready-for-review, missing-data, missing-tool, noop, push-to-pull-request-branch, remove-labels, reply-to-pull-request-review-comment, resolve-pull-request-review-thread, set-issue-type, submit-pull-request-review, threat-detection, unassign-from-user, update-discussion, update-issue, update-project, update-pull-request, update-release, upload-artifact, upload-asset. See documentation for complete details.",
"description": "Safe output processing configuration that automatically creates GitHub issues, comments, and pull requests from AI workflow output without requiring write permissions in the main job",
"examples": [
{
@@ -7445,6 +7445,112 @@
],
"description": "Enable AI agents to publish files (images, charts, reports) to an orphaned git branch for persistent storage and web access."
},
+ "upload-artifact": {
+ "oneOf": [
+ {
+ "type": "object",
+ "description": "Configuration for uploading files as run-scoped GitHub Actions artifacts",
+ "properties": {
+ "max-uploads": {
+ "type": "integer",
+ "description": "Maximum number of upload_artifact tool calls allowed per run (default: 1)",
+ "minimum": 1,
+ "maximum": 20,
+ "default": 1
+ },
+ "default-retention-days": {
+ "type": "integer",
+ "description": "Default artifact retention period in days (default: 7)",
+ "minimum": 1,
+ "maximum": 90,
+ "default": 7
+ },
+ "max-retention-days": {
+ "type": "integer",
+ "description": "Maximum retention cap in days; model requests are clamped to this value (default: 30)",
+ "minimum": 1,
+ "maximum": 90,
+ "default": 30
+ },
+ "max-size-bytes": {
+ "type": "integer",
+ "description": "Maximum total upload size in bytes per slot (default: 104857600 = 100 MB)",
+ "minimum": 1,
+ "default": 104857600
+ },
+ "allowed-paths": {
+ "type": "array",
+ "description": "Glob patterns restricting which paths relative to the staging directory the model may upload",
+ "items": {
+ "type": "string"
+ }
+ },
+ "filters": {
+ "type": "object",
+ "description": "Default include/exclude glob filters applied on top of allowed-paths",
+ "properties": {
+ "include": {
+ "type": "array",
+ "items": { "type": "string" },
+ "description": "Glob patterns for files to include"
+ },
+ "exclude": {
+ "type": "array",
+ "items": { "type": "string" },
+ "description": "Glob patterns for files to exclude"
+ }
+ },
+ "additionalProperties": false
+ },
+ "defaults": {
+ "type": "object",
+ "description": "Default values injected when the model omits a field",
+ "properties": {
+ "skip-archive": {
+ "type": "boolean",
+ "description": "Default value for skip_archive (default: false)",
+ "default": false
+ },
+ "if-no-files": {
+ "type": "string",
+ "description": "Behaviour when no files match: 'error' (default) or 'ignore'",
+ "enum": ["error", "ignore"],
+ "default": "error"
+ }
+ },
+ "additionalProperties": false
+ },
+ "allow": {
+ "type": "object",
+ "description": "Opt-in behaviours that must be explicitly enabled by the workflow author",
+ "properties": {
+ "skip-archive": {
+ "type": "boolean",
+ "description": "Allow the model to set skip_archive: true (uploads the file directly without archiving)",
+ "default": false
+ }
+ },
+ "additionalProperties": false
+ },
+ "github-token": {
+ "$ref": "#/$defs/github_token",
+ "description": "GitHub token to use for this specific output type. Overrides global github-token if specified."
+ },
+ "staged": {
+ "type": "boolean",
+ "description": "If true, emit step summary messages instead of making GitHub Actions artifact uploads (preview mode)",
+ "examples": [true, false]
+ }
+ },
+ "additionalProperties": false
+ },
+ {
+ "type": "null",
+ "description": "Enable artifact uploads with default configuration"
+ }
+ ],
+ "description": "Enable AI agents to upload files as run-scoped GitHub Actions artifacts. Returns a temporary artifact ID rather than a raw download URL, keeping authorization centralized."
+ },
"update-release": {
"oneOf": [
{
From ae61a262b9a730fcba4451c1596d9acd84bf86c0 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 03:30:55 +0000
Subject: [PATCH 06/23] fix: use RUNNER_TEMP env var for artifact staging and
slot paths
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/05ddc6f7-ae5e-4faa-9423-6f551a67af06
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
actions/setup/js/upload_artifact.cjs | 10 +++++-----
actions/setup/js/upload_artifact.test.cjs | 11 ++++++++---
pkg/workflow/js/safe_outputs_tools.json | 4 ++--
pkg/workflow/publish_artifacts.go | 4 ++--
4 files changed, 17 insertions(+), 12 deletions(-)
diff --git a/actions/setup/js/upload_artifact.cjs b/actions/setup/js/upload_artifact.cjs
index 4d624951f42..2ad264b17e3 100644
--- a/actions/setup/js/upload_artifact.cjs
+++ b/actions/setup/js/upload_artifact.cjs
@@ -6,13 +6,13 @@
*
* Validates and stages artifact upload requests emitted by the model via the upload_artifact
* safe output tool. The model must have already copied the files it wants to upload to
- * /tmp/gh-aw/safeoutputs/upload-artifacts/ before calling the tool.
+ * ${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/ before calling the tool.
*
* This handler:
* 1. Reads upload_artifact records from agent output.
* 2. Validates each request against the workflow's policy configuration.
* 3. Resolves the requested files (path or filter-based) from the staging directory.
- * 4. Copies approved files into per-slot directories under /tmp/gh-aw/upload-artifacts/slot_N/.
+ * 4. Copies approved files into per-slot directories under ${RUNNER_TEMP}/gh-aw/upload-artifacts/slot_N/.
* 5. Sets step outputs so the wrapping job's actions/upload-artifact steps can run conditionally.
* 6. Generates a temporary artifact ID for each slot.
*
@@ -40,16 +40,16 @@ const { globPatternToRegex } = require("./glob_pattern_helpers.cjs");
const { ERR_CONFIG, ERR_SYSTEM, ERR_VALIDATION } = require("./error_codes.cjs");
/** Staging directory where the model places files to be uploaded. */
-const STAGING_DIR = "/tmp/gh-aw/safeoutputs/upload-artifacts/";
+const STAGING_DIR = `${process.env.RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/`;
/** Base directory for per-slot artifact staging used by actions/upload-artifact. */
-const SLOT_BASE_DIR = "/tmp/gh-aw/upload-artifacts/";
+const SLOT_BASE_DIR = `${process.env.RUNNER_TEMP}/gh-aw/upload-artifacts/`;
/** Prefix for temporary artifact IDs returned to the caller. */
const TEMP_ID_PREFIX = "tmp_artifact_";
/** Path where the resolver mapping (tmpId → artifact name) is written. */
-const RESOLVER_FILE = "/tmp/gh-aw/artifact-resolver.json";
+const RESOLVER_FILE = `${process.env.RUNNER_TEMP}/gh-aw/artifact-resolver.json`;
/**
* Generate a temporary artifact ID.
diff --git a/actions/setup/js/upload_artifact.test.cjs b/actions/setup/js/upload_artifact.test.cjs
index ef59a262a9a..cf1055530ec 100644
--- a/actions/setup/js/upload_artifact.test.cjs
+++ b/actions/setup/js/upload_artifact.test.cjs
@@ -8,9 +8,11 @@ import { fileURLToPath } from "url";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
-const STAGING_DIR = "/tmp/gh-aw/safeoutputs/upload-artifacts/";
-const SLOT_BASE_DIR = "/tmp/gh-aw/upload-artifacts/";
-const RESOLVER_FILE = "/tmp/gh-aw/artifact-resolver.json";
+// Use RUNNER_TEMP as the base so paths match what upload_artifact.cjs computes at runtime.
+const RUNNER_TEMP = "/tmp";
+const STAGING_DIR = `${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/`;
+const SLOT_BASE_DIR = `${RUNNER_TEMP}/gh-aw/upload-artifacts/`;
+const RESOLVER_FILE = `${RUNNER_TEMP}/gh-aw/artifact-resolver.json`;
describe("upload_artifact.cjs", () => {
let mockCore;
@@ -63,6 +65,9 @@ describe("upload_artifact.cjs", () => {
originalEnv = { ...process.env };
+ // Set RUNNER_TEMP so the script resolves paths to the same directories as the test helpers.
+ process.env.RUNNER_TEMP = RUNNER_TEMP;
+
// Set reasonable defaults
process.env.GH_AW_ARTIFACT_MAX_UPLOADS = "3";
process.env.GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS = "7";
diff --git a/pkg/workflow/js/safe_outputs_tools.json b/pkg/workflow/js/safe_outputs_tools.json
index ac0196a526d..a0579d792a1 100644
--- a/pkg/workflow/js/safe_outputs_tools.json
+++ b/pkg/workflow/js/safe_outputs_tools.json
@@ -1586,13 +1586,13 @@
},
{
"name": "upload_artifact",
- "description": "Upload files as a run-scoped GitHub Actions artifact. The model must first copy files to /tmp/gh-aw/safeoutputs/upload-artifacts/ then request upload using this tool. Returns a temporary artifact ID that can be resolved to a download URL by an authorised step. Exactly one of path or filters must be present.",
+ "description": "Upload files as a run-scoped GitHub Actions artifact. The model must first copy files to $RUNNER_TEMP/gh-aw/safeoutputs/upload-artifacts/ then request upload using this tool. Returns a temporary artifact ID that can be resolved to a download URL by an authorised step. Exactly one of path or filters must be present.",
"inputSchema": {
"type": "object",
"properties": {
"path": {
"type": "string",
- "description": "Path to the file or directory to upload, relative to /tmp/gh-aw/safeoutputs/upload-artifacts/ (e.g., \"report.json\" or \"dist/\"). Required unless filters is provided."
+ "description": "Path to the file or directory to upload, relative to $RUNNER_TEMP/gh-aw/safeoutputs/upload-artifacts/ (e.g., \"report.json\" or \"dist/\"). Required unless filters is provided."
},
"filters": {
"type": "object",
diff --git a/pkg/workflow/publish_artifacts.go b/pkg/workflow/publish_artifacts.go
index 434ca5fa2fc..a565b3c2c3a 100644
--- a/pkg/workflow/publish_artifacts.go
+++ b/pkg/workflow/publish_artifacts.go
@@ -25,10 +25,10 @@ const defaultArtifactMaxRetentionDays = 30
const defaultArtifactMaxSizeBytes int64 = 104857600
// artifactStagingDir is the path where the model stages files to be uploaded as artifacts.
-const artifactStagingDir = "/tmp/gh-aw/safeoutputs/upload-artifacts/"
+const artifactStagingDir = "${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/"
// artifactSlotDir is the per-slot directory used by the handler to organise staged files.
-const artifactSlotDir = "/tmp/gh-aw/upload-artifacts/"
+const artifactSlotDir = "${RUNNER_TEMP}/gh-aw/upload-artifacts/"
// SafeOutputsUploadArtifactStagingArtifactName is the artifact that carries the staging directory
// from the main agent job to the upload_artifact job.
From f962e66767a2a73d2f49f73408643e0c0c9f0822 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 04:09:45 +0000
Subject: [PATCH 07/23] feat: update smoke-copilot to upload gh-aw binary as
artifact; fix upload_artifact job permissions and duplicate step bug
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/c901fdd4-ee9b-47d2-a4e6-ebe13975e3a4
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/smoke-copilot.lock.yml | 148 +++++++++++++++++++----
.github/workflows/smoke-copilot.md | 13 +-
pkg/workflow/publish_artifacts.go | 12 +-
3 files changed, 137 insertions(+), 36 deletions(-)
diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml
index 4408af85f4b..5d75913a2e5 100644
--- a/.github/workflows/smoke-copilot.lock.yml
+++ b/.github/workflows/smoke-copilot.lock.yml
@@ -1,4 +1,4 @@
-# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"6d0a385e47ce5ed241f4358e1578525037722f288b64d3dc18289d01bd352fbd","agent_id":"copilot"}
+# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"2a931073663f42902da7a9ca2f3f56370ad310f3e6bbcf1308329503eeabccd9","agent_id":"copilot"}
# ___ _ _
# / _ \ | | (_)
# | |_| | __ _ ___ _ __ | |_ _ ___
@@ -230,9 +230,9 @@ jobs:
run: |
bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh
{
- cat << 'GH_AW_PROMPT_2d91fec7281e9c47_EOF'
+ cat << 'GH_AW_PROMPT_9896dd1a279d5d86_EOF'
- GH_AW_PROMPT_2d91fec7281e9c47_EOF
+ GH_AW_PROMPT_9896dd1a279d5d86_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md"
@@ -240,7 +240,7 @@ jobs:
cat "${RUNNER_TEMP}/gh-aw/prompts/agentic_workflows_guide.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/cache_memory_prompt.md"
cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md"
- cat << 'GH_AW_PROMPT_2d91fec7281e9c47_EOF'
+ cat << 'GH_AW_PROMPT_9896dd1a279d5d86_EOF'
Tools: add_comment(max:2), create_issue, create_discussion, create_pull_request_review_comment(max:5), submit_pull_request_review, reply_to_pull_request_review_comment(max:5), add_labels, remove_labels, set_issue_type, dispatch_workflow, missing_tool, missing_data, noop, send_slack_message
@@ -272,9 +272,9 @@ jobs:
{{/if}}
- GH_AW_PROMPT_2d91fec7281e9c47_EOF
+ GH_AW_PROMPT_9896dd1a279d5d86_EOF
cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md"
- cat << 'GH_AW_PROMPT_2d91fec7281e9c47_EOF'
+ cat << 'GH_AW_PROMPT_9896dd1a279d5d86_EOF'
## Serena Code Analysis
@@ -314,7 +314,7 @@ jobs:
{{#runtime-import .github/workflows/shared/mcp/serena-go.md}}
{{#runtime-import .github/workflows/shared/observability-otlp.md}}
{{#runtime-import .github/workflows/smoke-copilot.md}}
- GH_AW_PROMPT_2d91fec7281e9c47_EOF
+ GH_AW_PROMPT_9896dd1a279d5d86_EOF
} > "$GH_AW_PROMPT"
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
@@ -579,9 +579,9 @@ jobs:
mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
- cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_8c3103569671ea37_EOF'
- {"add_comment":{"allowed_repos":["github/gh-aw"],"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-copilot"],"allowed_repos":["github/gh-aw"]},"create_discussion":{"category":"announcements","close_older_discussions":true,"close_older_key":"smoke-copilot","expires":2,"fallback_to_issue":true,"labels":["ai-generated"],"max":1},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-copilot","expires":2,"group":true,"labels":["automation","testing"],"max":1},"create_pull_request_review_comment":{"max":5,"side":"RIGHT"},"create_report_incomplete_issue":{},"dispatch_workflow":{"max":1,"workflow_files":{"haiku-printer":".yml"},"workflows":["haiku-printer"]},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"reply_to_pull_request_review_comment":{"max":5},"report_incomplete":{},"send-slack-message":{"description":"Send a message to Slack (stub for testing)","inputs":{"message":{"description":"The message to send","required":false,"type":"string"}},"output":"Slack message stub executed!"},"set_issue_type":{},"submit_pull_request_review":{"max":1}}
- GH_AW_SAFE_OUTPUTS_CONFIG_8c3103569671ea37_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_37135a487e85aeac_EOF'
+ {"add_comment":{"allowed_repos":["github/gh-aw"],"hide_older_comments":true,"max":2},"add_labels":{"allowed":["smoke-copilot"],"allowed_repos":["github/gh-aw"]},"create_discussion":{"category":"announcements","close_older_discussions":true,"close_older_key":"smoke-copilot","expires":2,"fallback_to_issue":true,"labels":["ai-generated"],"max":1},"create_issue":{"close_older_issues":true,"close_older_key":"smoke-copilot","expires":2,"group":true,"labels":["automation","testing"],"max":1},"create_pull_request_review_comment":{"max":5,"side":"RIGHT"},"create_report_incomplete_issue":{},"dispatch_workflow":{"max":1,"workflow_files":{"haiku-printer":".yml"},"workflows":["haiku-printer"]},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"remove_labels":{"allowed":["smoke"]},"reply_to_pull_request_review_comment":{"max":5},"report_incomplete":{},"send-slack-message":{"description":"Send a message to Slack (stub for testing)","inputs":{"message":{"description":"The message to send","required":false,"type":"string"}},"output":"Slack message stub executed!"},"set_issue_type":{},"submit_pull_request_review":{"max":1},"upload_artifact":{"allow-skip-archive":true,"default-retention-days":1,"max-retention-days":1,"max-size-bytes":104857600,"max-uploads":1}}
+ GH_AW_SAFE_OUTPUTS_CONFIG_37135a487e85aeac_EOF
- name: Write Safe Outputs Tools
env:
GH_AW_TOOLS_META_JSON: |
@@ -978,7 +978,7 @@ jobs:
- name: Write MCP Scripts Config
run: |
mkdir -p ${RUNNER_TEMP}/gh-aw/mcp-scripts/logs
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_7babc89e6d790778_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/tools.json << 'GH_AW_MCP_SCRIPTS_TOOLS_d58c0e40e52491a9_EOF'
{
"serverName": "mcpscripts",
"version": "1.0.0",
@@ -1094,8 +1094,8 @@ jobs:
}
]
}
- GH_AW_MCP_SCRIPTS_TOOLS_7babc89e6d790778_EOF
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_ef1fbc7ce3eca295_EOF'
+ GH_AW_MCP_SCRIPTS_TOOLS_d58c0e40e52491a9_EOF
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs << 'GH_AW_MCP_SCRIPTS_SERVER_dd0c3af6b77b1bf9_EOF'
const path = require("path");
const { startHttpServer } = require("./mcp_scripts_mcp_server_http.cjs");
const configPath = path.join(__dirname, "tools.json");
@@ -1109,12 +1109,12 @@ jobs:
console.error("Failed to start mcp-scripts HTTP server:", error);
process.exit(1);
});
- GH_AW_MCP_SCRIPTS_SERVER_ef1fbc7ce3eca295_EOF
+ GH_AW_MCP_SCRIPTS_SERVER_dd0c3af6b77b1bf9_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/mcp-server.cjs
- name: Write MCP Scripts Tool Files
run: |
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_5a6688685d632c08_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh << 'GH_AW_MCP_SCRIPTS_SH_GH_413a2d9b16bce3b7_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: gh
# Execute any gh CLI command. This tool is accessible as 'mcpscripts-gh'. Provide the full command after 'gh' (e.g., args: 'pr list --limit 5'). The tool will run: gh . Use single quotes ' for complex args to avoid shell interpretation issues.
@@ -1125,9 +1125,9 @@ jobs:
echo " token: ${GH_AW_GH_TOKEN:0:6}..."
GH_TOKEN="$GH_AW_GH_TOKEN" gh $INPUT_ARGS
- GH_AW_MCP_SCRIPTS_SH_GH_5a6688685d632c08_EOF
+ GH_AW_MCP_SCRIPTS_SH_GH_413a2d9b16bce3b7_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/gh.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_acccc7340415fad4_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_ecb08d56af922c60_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-discussion-query
# Query GitHub discussions with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1262,9 +1262,9 @@ jobs:
EOF
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_acccc7340415fad4_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-DISCUSSION-QUERY_ecb08d56af922c60_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-discussion-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_a6eacbb65c40c0ed_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_b2c3240691c382a4_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-issue-query
# Query GitHub issues with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1343,9 +1343,9 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_a6eacbb65c40c0ed_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-ISSUE-QUERY_b2c3240691c382a4_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-issue-query.sh
- cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_cba8eb127506e4a8_EOF'
+ cat > ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh << 'GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_5cd7ef183044e7f8_EOF'
#!/bin/bash
# Auto-generated mcp-script tool: github-pr-query
# Query GitHub pull requests with jq filtering support. Without --jq, returns schema and data size info. Use --jq '.' to get all data, or specific jq expressions to filter.
@@ -1430,7 +1430,7 @@ jobs:
fi
- GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_cba8eb127506e4a8_EOF
+ GH_AW_MCP_SCRIPTS_SH_GITHUB-PR-QUERY_5cd7ef183044e7f8_EOF
chmod +x ${RUNNER_TEMP}/gh-aw/mcp-scripts/github-pr-query.sh
- name: Generate MCP Scripts Server Config
@@ -1506,7 +1506,7 @@ jobs:
if [ -n "${OTEL_EXPORTER_OTLP_HEADERS:-}" ]; then
_GH_AW_OTLP_HEADERS_JSON=$(node -e 'const h=process.env["OTEL_EXPORTER_OTLP_HEADERS"]||"";const o={};h.split(",").forEach(function(p){const i=p.indexOf("=");if(i>0)o[p.slice(0,i).trim()]=p.slice(i+1).trim();});console.log(JSON.stringify(o));' 2>/dev/null || echo "{}")
fi
- cat << GH_AW_MCP_CONFIG_8d31e9e79e8b0709_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
+ cat << GH_AW_MCP_CONFIG_b2fa325b88dbf094_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh
{
"mcpServers": {
"agenticworkflows": {
@@ -1632,7 +1632,7 @@ jobs:
}
}
}
- GH_AW_MCP_CONFIG_8d31e9e79e8b0709_EOF
+ GH_AW_MCP_CONFIG_b2fa325b88dbf094_EOF
- name: Download activation artifact
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
@@ -1829,6 +1829,15 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ # Upload safe-outputs upload-artifact staging for the upload_artifact job
+ - name: Upload Upload-Artifact Staging
+ if: always()
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: safe-outputs-upload-artifacts
+ path: ${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/
+ retention-days: 1
+ if-no-files-found: ignore
- name: Upload agent artifacts
if: always()
continue-on-error: true
@@ -1871,6 +1880,7 @@ jobs:
- safe_outputs
- send_slack_message
- update_cache_memory
+ - upload_artifact
if: always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true')
runs-on: ubuntu-slim
permissions:
@@ -2001,7 +2011,7 @@ jobs:
GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }}
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📰 *BREAKING: Report filed by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"appendOnlyComments\":true,\"runStarted\":\"📰 BREAKING: [{workflow_name}]({run_url}) is now investigating this {event_type}. Sources say the story is developing...\",\"runSuccess\":\"📰 VERDICT: [{workflow_name}]({run_url}) has concluded. All systems operational. This is a developing story. 🎤\",\"runFailure\":\"📰 DEVELOPING STORY: [{workflow_name}]({run_url}) reports {status}. Our correspondents are investigating the incident...\"}"
- GH_AW_SAFE_OUTPUT_JOBS: "{\"send_slack_message\":\"\"}"
+ GH_AW_SAFE_OUTPUT_JOBS: "{\"send_slack_message\":\"\",\"upload_artifact\":\"\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -2285,7 +2295,7 @@ jobs:
GITHUB_SERVER_URL: ${{ github.server_url }}
GITHUB_API_URL: ${{ github.api_url }}
GH_AW_SAFE_OUTPUT_JOBS: "{\"send_slack_message\":\"\"}"
- GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"allowed_repos\":[\"github/gh-aw\"],\"hide_older_comments\":true,\"max\":2},\"add_labels\":{\"allowed\":[\"smoke-copilot\"],\"allowed_repos\":[\"github/gh-aw\"]},\"create_discussion\":{\"category\":\"announcements\",\"close_older_discussions\":true,\"close_older_key\":\"smoke-copilot\",\"expires\":2,\"fallback_to_issue\":true,\"labels\":[\"ai-generated\"],\"max\":1},\"create_issue\":{\"close_older_issues\":true,\"close_older_key\":\"smoke-copilot\",\"expires\":2,\"group\":true,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_pull_request_review_comment\":{\"max\":5,\"side\":\"RIGHT\"},\"create_report_incomplete_issue\":{},\"dispatch_workflow\":{\"max\":1,\"workflow_files\":{\"haiku-printer\":\".yml\"},\"workflows\":[\"haiku-printer\"]},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"remove_labels\":{\"allowed\":[\"smoke\"]},\"reply_to_pull_request_review_comment\":{\"max\":5},\"report_incomplete\":{},\"set_issue_type\":{},\"submit_pull_request_review\":{\"max\":1}}"
+ GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"allowed_repos\":[\"github/gh-aw\"],\"hide_older_comments\":true,\"max\":2},\"add_labels\":{\"allowed\":[\"smoke-copilot\"],\"allowed_repos\":[\"github/gh-aw\"]},\"create_discussion\":{\"category\":\"announcements\",\"close_older_discussions\":true,\"close_older_key\":\"smoke-copilot\",\"expires\":2,\"fallback_to_issue\":true,\"labels\":[\"ai-generated\"],\"max\":1},\"create_issue\":{\"close_older_issues\":true,\"close_older_key\":\"smoke-copilot\",\"expires\":2,\"group\":true,\"labels\":[\"automation\",\"testing\"],\"max\":1},\"create_pull_request_review_comment\":{\"max\":5,\"side\":\"RIGHT\"},\"create_report_incomplete_issue\":{},\"dispatch_workflow\":{\"max\":1,\"workflow_files\":{\"haiku-printer\":\".yml\"},\"workflows\":[\"haiku-printer\"]},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"remove_labels\":{\"allowed\":[\"smoke\"]},\"reply_to_pull_request_review_comment\":{\"max\":5},\"report_incomplete\":{},\"set_issue_type\":{},\"submit_pull_request_review\":{\"max\":1},\"upload_artifact\":{\"allow-skip-archive\":true,\"default-retention-days\":1,\"max-retention-days\":1,\"max-size-bytes\":104857600,\"max-uploads\":1}}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -2390,3 +2400,89 @@ jobs:
key: memory-approved-a3cea483-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}
path: /tmp/gh-aw/cache-memory
+ upload_artifact:
+ needs:
+ - activation
+ - agent
+ if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'upload_artifact')
+ runs-on: ubuntu-slim
+ permissions:
+ actions: write
+ timeout-minutes: 10
+ outputs:
+ artifact_count: ${{ steps.upload_artifacts.outputs.artifact_count }}
+ slot_0_tmp_id: ${{ steps.upload_artifacts.outputs.slot_0_tmp_id }}
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+ - name: Setup Scripts
+ id: setup
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+ job-name: ${{ github.job }}
+ trace-id: ${{ needs.activation.outputs.setup-trace-id }}
+ - name: Download upload-artifact staging
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: safe-outputs-upload-artifacts
+ path: ${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/
+ - name: Download agent output artifact
+ id: download-agent-output
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: agent
+ path: /tmp/gh-aw/
+ - name: Setup agent output environment variable
+ id: setup-agent-output-env
+ if: steps.download-agent-output.outcome == 'success'
+ run: |
+ mkdir -p /tmp/gh-aw/
+ find "/tmp/gh-aw/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT"
+ - name: Upload artifacts
+ id: upload_artifacts
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ARTIFACT_MAX_UPLOADS: 1
+ GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS: 1
+ GH_AW_ARTIFACT_MAX_RETENTION_DAYS: 1
+ GH_AW_ARTIFACT_MAX_SIZE_BYTES: 104857600
+ GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE: "true"
+ GH_AW_WORKFLOW_NAME: "Smoke Copilot"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }}
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📰 *BREAKING: Report filed by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"appendOnlyComments\":true,\"runStarted\":\"📰 BREAKING: [{workflow_name}]({run_url}) is now investigating this {event_type}. Sources say the story is developing...\",\"runSuccess\":\"📰 VERDICT: [{workflow_name}]({run_url}) has concluded. All systems operational. This is a developing story. 🎤\",\"runFailure\":\"📰 DEVELOPING STORY: [{workflow_name}]({run_url}) reports {status}. Our correspondents are investigating the incident...\"}"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/upload_artifact.cjs');
+ await main();
+ - name: Upload artifact slot 0
+ if: steps.upload_artifacts.outputs.slot_0_enabled == 'true'
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: ${{ steps.upload_artifacts.outputs.slot_0_name }}
+ path: ${RUNNER_TEMP}/gh-aw/upload-artifacts/slot_0/
+ retention-days: ${{ steps.upload_artifacts.outputs.slot_0_retention_days }}
+ if-no-files-found: ignore
+ - name: Restore actions folder
+ if: always()
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ repository: github/gh-aw
+ sparse-checkout: |
+ actions/setup
+ sparse-checkout-cone-mode: true
+ persist-credentials: false
+
diff --git a/.github/workflows/smoke-copilot.md b/.github/workflows/smoke-copilot.md
index 8c6087573ed..554c219011b 100644
--- a/.github/workflows/smoke-copilot.md
+++ b/.github/workflows/smoke-copilot.md
@@ -49,6 +49,12 @@ runtimes:
version: "1.25"
safe-outputs:
allowed-domains: [default-safe-outputs]
+ upload-artifact:
+ max-uploads: 1
+ default-retention-days: 1
+ max-retention-days: 1
+ allow:
+ skip-archive: true
add-comment:
allowed-repos: ["github/gh-aw"]
hide-older-comments: true
@@ -140,9 +146,10 @@ strict: false
- Extract the discussion number from the result (e.g., if the result is `{"number": 123, "title": "...", ...}`, extract 123)
- Use the `add_comment` tool with `discussion_number: ` to add a fun, playful comment stating that the smoke test agent was here
9. **Build gh-aw**: Run `GOCACHE=/tmp/go-cache GOMODCACHE=/tmp/go-mod make build` to verify the agent can successfully build the gh-aw project (both caches must be set to /tmp because the default cache locations are not writable). If the command fails, mark this test as ❌ and report the failure.
-10. **Discussion Creation Testing**: Use the `create_discussion` safe-output tool to create a discussion in the announcements category titled "copilot was here" with the label "ai-generated"
-11. **Workflow Dispatch Testing**: Use the `dispatch_workflow` safe output tool to trigger the `haiku-printer` workflow with a haiku as the message input. Create an original, creative haiku about software testing or automation.
-12. **PR Review Testing**: Review the diff of the current pull request. Leave 1-2 inline `create_pull_request_review_comment` comments on specific lines, then call `submit_pull_request_review` with a brief body summarizing your review and event `COMMENT`. To test `reply_to_pull_request_review_comment`: use the `pull_request_read` tool (with `method: "get_review_comments"` and `pullNumber: ${{ github.event.pull_request.number }}`) to fetch the PR's existing review comments, then reply to the most recent one using `reply_to_pull_request_review_comment` with its actual numeric `id` as the `comment_id`. Note: `create_pull_request_review_comment` does not return a `comment_id` — you must fetch existing comment IDs from the GitHub API. If the PR has no existing review comments, skip the reply sub-test.
+10. **Upload gh-aw binary as artifact**: After a successful build, use bash to copy the `./gh-aw` binary into the staging directory (`mkdir -p $RUNNER_TEMP/gh-aw/safeoutputs/upload-artifacts && cp ./gh-aw $RUNNER_TEMP/gh-aw/safeoutputs/upload-artifacts/gh-aw`), then call the `upload_artifact` safe-output tool with `path: "gh-aw"`, `retention_days: 1`, and `skip_archive: true`. Mark this test as ❌ if the build in step 9 failed.
+11. **Discussion Creation Testing**: Use the `create_discussion` safe-output tool to create a discussion in the announcements category titled "copilot was here" with the label "ai-generated"
+12. **Workflow Dispatch Testing**: Use the `dispatch_workflow` safe output tool to trigger the `haiku-printer` workflow with a haiku as the message input. Create an original, creative haiku about software testing or automation.
+13. **PR Review Testing**: Review the diff of the current pull request. Leave 1-2 inline `create_pull_request_review_comment` comments on specific lines, then call `submit_pull_request_review` with a brief body summarizing your review and event `COMMENT`. To test `reply_to_pull_request_review_comment`: use the `pull_request_read` tool (with `method: "get_review_comments"` and `pullNumber: ${{ github.event.pull_request.number }}`) to fetch the PR's existing review comments, then reply to the most recent one using `reply_to_pull_request_review_comment` with its actual numeric `id` as the `comment_id`. Note: `create_pull_request_review_comment` does not return a `comment_id` — you must fetch existing comment IDs from the GitHub API. If the PR has no existing review comments, skip the reply sub-test.
## Output
diff --git a/pkg/workflow/publish_artifacts.go b/pkg/workflow/publish_artifacts.go
index a565b3c2c3a..62e42b73238 100644
--- a/pkg/workflow/publish_artifacts.go
+++ b/pkg/workflow/publish_artifacts.go
@@ -219,13 +219,11 @@ func (c *Compiler) buildUploadArtifactJob(data *WorkflowData, mainJobName string
preSteps = append(preSteps, c.generateSetupStep(setupActionRef, SetupActionDestination, false, publishTraceID)...)
}
- // Download agent output artifact (to read upload_artifact requests).
+ // Download the staging artifact that contains the files staged by the model.
+ // The agent output artifact (carrying upload_artifact NDJSON records) is NOT added here
+ // because buildCustomActionStep / buildGitHubScriptStep already prepends that step
+ // automatically to every safe-output job.
artifactPrefix := artifactPrefixExprForAgentDownstreamJob(data)
- preSteps = append(preSteps,
- buildAgentOutputDownloadSteps(artifactPrefix)...,
- )
-
- // Download the staging artifact that holds the files the model wants to upload.
stagingArtifactName := artifactPrefix + SafeOutputsUploadArtifactStagingArtifactName
preSteps = append(preSteps,
" - name: Download upload-artifact staging\n",
@@ -316,7 +314,7 @@ func (c *Compiler) buildUploadArtifactJob(data *WorkflowData, mainJobName string
MainJobName: mainJobName,
CustomEnvVars: customEnvVars,
Script: "",
- Permissions: NewPermissions(),
+ Permissions: NewPermissionsActionsWrite(),
Outputs: outputs,
Condition: jobCondition,
PreSteps: preSteps,
From bedab80ef099e60d748066c996ad90162f81f97d Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 04:21:55 +0000
Subject: [PATCH 08/23] fix: remove actions:write from upload_artifact job -
uses ACTIONS_RUNTIME_TOKEN instead
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/4404709a-1bdd-4947-b526-9edf180e24cc
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/smoke-copilot.lock.yml | 2 --
pkg/workflow/publish_artifacts.go | 2 +-
2 files changed, 1 insertion(+), 3 deletions(-)
diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml
index 5d75913a2e5..753f03400b1 100644
--- a/.github/workflows/smoke-copilot.lock.yml
+++ b/.github/workflows/smoke-copilot.lock.yml
@@ -2406,8 +2406,6 @@ jobs:
- agent
if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'upload_artifact')
runs-on: ubuntu-slim
- permissions:
- actions: write
timeout-minutes: 10
outputs:
artifact_count: ${{ steps.upload_artifacts.outputs.artifact_count }}
diff --git a/pkg/workflow/publish_artifacts.go b/pkg/workflow/publish_artifacts.go
index 62e42b73238..2a4dd21e5a1 100644
--- a/pkg/workflow/publish_artifacts.go
+++ b/pkg/workflow/publish_artifacts.go
@@ -314,7 +314,7 @@ func (c *Compiler) buildUploadArtifactJob(data *WorkflowData, mainJobName string
MainJobName: mainJobName,
CustomEnvVars: customEnvVars,
Script: "",
- Permissions: NewPermissionsActionsWrite(),
+ Permissions: NewPermissions(),
Outputs: outputs,
Condition: jobCondition,
PreSteps: preSteps,
From fdeeba3ed4ebd87ea70498db1fe60912194956a7 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
<41898282+github-actions[bot]@users.noreply.github.com>
Date: Tue, 7 Apr 2026 04:35:15 +0000
Subject: [PATCH 09/23] Add changeset
---
.changeset/patch-add-upload-artifact-safe-output.md | 5 +++++
1 file changed, 5 insertions(+)
create mode 100644 .changeset/patch-add-upload-artifact-safe-output.md
diff --git a/.changeset/patch-add-upload-artifact-safe-output.md b/.changeset/patch-add-upload-artifact-safe-output.md
new file mode 100644
index 00000000000..6159e700eeb
--- /dev/null
+++ b/.changeset/patch-add-upload-artifact-safe-output.md
@@ -0,0 +1,5 @@
+---
+"gh-aw": patch
+---
+
+Add a new `upload-artifact` safe output type for run-scoped GitHub Actions artifact uploads, including frontmatter configuration, validation, and runtime handling that returns temporary artifact IDs for downstream resolution.
From 4efbf5baa8432002546e4cb975b0d46996fb3069 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 04:53:25 +0000
Subject: [PATCH 10/23] fix: use runner.temp expression syntax in
upload-artifact path inputs
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/7e04c8c2-0529-439d-b4cd-b385b1a309c8
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/smoke-copilot.lock.yml | 6 +++---
pkg/workflow/publish_artifacts.go | 18 +++++++++++++++---
pkg/workflow/publish_artifacts_test.go | 2 +-
3 files changed, 19 insertions(+), 7 deletions(-)
diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml
index 753f03400b1..ab82c951b89 100644
--- a/.github/workflows/smoke-copilot.lock.yml
+++ b/.github/workflows/smoke-copilot.lock.yml
@@ -1835,7 +1835,7 @@ jobs:
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: safe-outputs-upload-artifacts
- path: ${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/
+ path: ${{ runner.temp }}/gh-aw/safeoutputs/upload-artifacts/
retention-days: 1
if-no-files-found: ignore
- name: Upload agent artifacts
@@ -2430,7 +2430,7 @@ jobs:
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
name: safe-outputs-upload-artifacts
- path: ${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/
+ path: ${{ runner.temp }}/gh-aw/safeoutputs/upload-artifacts/
- name: Download agent output artifact
id: download-agent-output
continue-on-error: true
@@ -2471,7 +2471,7 @@ jobs:
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: ${{ steps.upload_artifacts.outputs.slot_0_name }}
- path: ${RUNNER_TEMP}/gh-aw/upload-artifacts/slot_0/
+ path: ${{ runner.temp }}/gh-aw/upload-artifacts/slot_0/
retention-days: ${{ steps.upload_artifacts.outputs.slot_0_retention_days }}
if-no-files-found: ignore
- name: Restore actions folder
diff --git a/pkg/workflow/publish_artifacts.go b/pkg/workflow/publish_artifacts.go
index 2a4dd21e5a1..d9427882f44 100644
--- a/pkg/workflow/publish_artifacts.go
+++ b/pkg/workflow/publish_artifacts.go
@@ -25,11 +25,23 @@ const defaultArtifactMaxRetentionDays = 30
const defaultArtifactMaxSizeBytes int64 = 104857600
// artifactStagingDir is the path where the model stages files to be uploaded as artifacts.
+// Use the shell-variable form only inside `run:` blocks; for `with: path:` inputs use
+// artifactStagingDirExpr which uses the GitHub Actions expression syntax.
const artifactStagingDir = "${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/"
+// artifactStagingDirExpr is the GitHub Actions expression form of artifactStagingDir.
+// `actions/upload-artifact` and `actions/download-artifact` do not expand shell variables
+// in their `path:` inputs, so we must use ${{ runner.temp }} here.
+const artifactStagingDirExpr = "${{ runner.temp }}/gh-aw/safeoutputs/upload-artifacts/"
+
// artifactSlotDir is the per-slot directory used by the handler to organise staged files.
+// Use the shell-variable form only inside `run:` blocks; for `with: path:` inputs use
+// artifactSlotDirExpr which uses the GitHub Actions expression syntax.
const artifactSlotDir = "${RUNNER_TEMP}/gh-aw/upload-artifacts/"
+// artifactSlotDirExpr is the GitHub Actions expression form of artifactSlotDir.
+const artifactSlotDirExpr = "${{ runner.temp }}/gh-aw/upload-artifacts/"
+
// SafeOutputsUploadArtifactStagingArtifactName is the artifact that carries the staging directory
// from the main agent job to the upload_artifact job.
const SafeOutputsUploadArtifactStagingArtifactName = "safe-outputs-upload-artifacts"
@@ -231,7 +243,7 @@ func (c *Compiler) buildUploadArtifactJob(data *WorkflowData, mainJobName string
fmt.Sprintf(" uses: %s\n", GetActionPin("actions/download-artifact")),
" with:\n",
fmt.Sprintf(" name: %s\n", stagingArtifactName),
- fmt.Sprintf(" path: %s\n", artifactStagingDir),
+ fmt.Sprintf(" path: %s\n", artifactStagingDirExpr),
)
// Build custom environment variables consumed by upload_artifact.cjs.
@@ -276,7 +288,7 @@ func (c *Compiler) buildUploadArtifactJob(data *WorkflowData, mainJobName string
// the Nth upload_artifact request was successfully validated and staged.
var postSteps []string
for i := range cfg.MaxUploads {
- slotDir := fmt.Sprintf("%sslot_%d/", artifactSlotDir, i)
+ slotDir := fmt.Sprintf("%sslot_%d/", artifactSlotDirExpr, i)
postSteps = append(postSteps,
fmt.Sprintf(" - name: Upload artifact slot %d\n", i),
fmt.Sprintf(" if: steps.upload_artifacts.outputs.slot_%d_enabled == 'true'\n", i),
@@ -342,7 +354,7 @@ func generateSafeOutputsArtifactStagingUpload(builder *strings.Builder, data *Wo
fmt.Fprintf(builder, " uses: %s\n", GetActionPin("actions/upload-artifact"))
builder.WriteString(" with:\n")
fmt.Fprintf(builder, " name: %s%s\n", prefix, SafeOutputsUploadArtifactStagingArtifactName)
- fmt.Fprintf(builder, " path: %s\n", artifactStagingDir)
+ fmt.Fprintf(builder, " path: %s\n", artifactStagingDirExpr)
builder.WriteString(" retention-days: 1\n")
builder.WriteString(" if-no-files-found: ignore\n")
}
diff --git a/pkg/workflow/publish_artifacts_test.go b/pkg/workflow/publish_artifacts_test.go
index 459fc3a8649..7642075d9a8 100644
--- a/pkg/workflow/publish_artifacts_test.go
+++ b/pkg/workflow/publish_artifacts_test.go
@@ -274,7 +274,7 @@ func TestGenerateSafeOutputsArtifactStagingUpload(t *testing.T) {
generateSafeOutputsArtifactStagingUpload(&b, data)
result := b.String()
assert.Contains(t, result, "safe-outputs-upload-artifacts", "should reference staging artifact name")
- assert.Contains(t, result, artifactStagingDir, "should reference staging directory")
+ assert.Contains(t, result, artifactStagingDirExpr, "should reference staging directory")
assert.Contains(t, result, "if: always()", "should have always() condition")
})
From d577e59111aac801a8739fd1e082be0d8a283b82 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 7 Apr 2026 05:39:02 +0000
Subject: [PATCH 11/23] feat: handle upload_artifact inline in safe_outputs
handler loop instead of standalone job
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/ce9744d6-4d2c-4fce-b5c8-19ddd4452fe7
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/smoke-copilot.lock.yml | 103 ++-----
.../setup/js/safe_output_handler_manager.cjs | 5 +-
actions/setup/js/upload_artifact.cjs | 219 +++++++--------
actions/setup/js/upload_artifact.test.cjs | 253 +++++++-----------
pkg/workflow/compiler_safe_output_jobs.go | 18 --
pkg/workflow/compiler_safe_outputs_config.go | 8 +
pkg/workflow/compiler_safe_outputs_job.go | 51 +++-
pkg/workflow/publish_artifacts.go | 152 +----------
pkg/workflow/publish_artifacts_test.go | 61 -----
9 files changed, 276 insertions(+), 594 deletions(-)
diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml
index ab82c951b89..f8dc552f423 100644
--- a/.github/workflows/smoke-copilot.lock.yml
+++ b/.github/workflows/smoke-copilot.lock.yml
@@ -1880,7 +1880,6 @@ jobs:
- safe_outputs
- send_slack_message
- update_cache_memory
- - upload_artifact
if: always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true')
runs-on: ubuntu-slim
permissions:
@@ -2011,7 +2010,7 @@ jobs:
GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }}
GH_AW_DETECTION_CONCLUSION: ${{ needs.detection.outputs.detection_conclusion }}
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📰 *BREAKING: Report filed by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"appendOnlyComments\":true,\"runStarted\":\"📰 BREAKING: [{workflow_name}]({run_url}) is now investigating this {event_type}. Sources say the story is developing...\",\"runSuccess\":\"📰 VERDICT: [{workflow_name}]({run_url}) has concluded. All systems operational. This is a developing story. 🎤\",\"runFailure\":\"📰 DEVELOPING STORY: [{workflow_name}]({run_url}) reports {status}. Our correspondents are investigating the incident...\"}"
- GH_AW_SAFE_OUTPUT_JOBS: "{\"send_slack_message\":\"\",\"upload_artifact\":\"\"}"
+ GH_AW_SAFE_OUTPUT_JOBS: "{\"send_slack_message\":\"\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -2246,6 +2245,8 @@ jobs:
created_issue_url: ${{ steps.process_safe_outputs.outputs.created_issue_url }}
process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }}
process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }}
+ upload_artifact_count: ${{ steps.process_safe_outputs.outputs.upload_artifact_count }}
+ upload_artifact_slot_0_tmp_id: ${{ steps.process_safe_outputs.outputs.slot_0_tmp_id }}
steps:
- name: Checkout actions folder
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -2286,6 +2287,12 @@ jobs:
GH_HOST="${GITHUB_SERVER_URL#https://}"
GH_HOST="${GH_HOST#http://}"
echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV"
+ - name: Download upload-artifact staging
+ continue-on-error: true
+ uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
+ with:
+ name: safe-outputs-upload-artifacts
+ path: ${{ runner.temp }}/gh-aw/safeoutputs/upload-artifacts/
- name: Process Safe Outputs
id: process_safe_outputs
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
@@ -2303,6 +2310,14 @@ jobs:
setupGlobals(core, github, context, exec, io);
const { main } = require('${{ runner.temp }}/gh-aw/actions/safe_output_handler_manager.cjs');
await main();
+ - name: Upload artifact slot 0
+ if: steps.process_safe_outputs.outputs.slot_0_enabled == 'true'
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
+ with:
+ name: ${{ steps.process_safe_outputs.outputs.slot_0_name }}
+ path: ${{ runner.temp }}/gh-aw/upload-artifacts/slot_0/
+ retention-days: ${{ steps.process_safe_outputs.outputs.slot_0_retention_days }}
+ if-no-files-found: ignore
- name: Upload Safe Output Items
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
@@ -2400,87 +2415,3 @@ jobs:
key: memory-approved-a3cea483-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }}
path: /tmp/gh-aw/cache-memory
- upload_artifact:
- needs:
- - activation
- - agent
- if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'upload_artifact')
- runs-on: ubuntu-slim
- timeout-minutes: 10
- outputs:
- artifact_count: ${{ steps.upload_artifacts.outputs.artifact_count }}
- slot_0_tmp_id: ${{ steps.upload_artifacts.outputs.slot_0_tmp_id }}
- steps:
- - name: Checkout actions folder
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- with:
- repository: github/gh-aw
- sparse-checkout: |
- actions
- persist-credentials: false
- - name: Setup Scripts
- id: setup
- uses: ./actions/setup
- with:
- destination: ${{ runner.temp }}/gh-aw/actions
- job-name: ${{ github.job }}
- trace-id: ${{ needs.activation.outputs.setup-trace-id }}
- - name: Download upload-artifact staging
- continue-on-error: true
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
- with:
- name: safe-outputs-upload-artifacts
- path: ${{ runner.temp }}/gh-aw/safeoutputs/upload-artifacts/
- - name: Download agent output artifact
- id: download-agent-output
- continue-on-error: true
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
- with:
- name: agent
- path: /tmp/gh-aw/
- - name: Setup agent output environment variable
- id: setup-agent-output-env
- if: steps.download-agent-output.outcome == 'success'
- run: |
- mkdir -p /tmp/gh-aw/
- find "/tmp/gh-aw/" -type f -print
- echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT"
- - name: Upload artifacts
- id: upload_artifacts
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
- env:
- GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }}
- GH_AW_ARTIFACT_MAX_UPLOADS: 1
- GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS: 1
- GH_AW_ARTIFACT_MAX_RETENTION_DAYS: 1
- GH_AW_ARTIFACT_MAX_SIZE_BYTES: 104857600
- GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE: "true"
- GH_AW_WORKFLOW_NAME: "Smoke Copilot"
- GH_AW_ENGINE_ID: "copilot"
- GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }}
- GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📰 *BREAKING: Report filed by [{workflow_name}]({run_url})*{effective_tokens_suffix}{history_link}\",\"appendOnlyComments\":true,\"runStarted\":\"📰 BREAKING: [{workflow_name}]({run_url}) is now investigating this {event_type}. Sources say the story is developing...\",\"runSuccess\":\"📰 VERDICT: [{workflow_name}]({run_url}) has concluded. All systems operational. This is a developing story. 🎤\",\"runFailure\":\"📰 DEVELOPING STORY: [{workflow_name}]({run_url}) reports {status}. Our correspondents are investigating the incident...\"}"
- with:
- github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- script: |
- const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
- setupGlobals(core, github, context, exec, io);
- const { main } = require('${{ runner.temp }}/gh-aw/actions/upload_artifact.cjs');
- await main();
- - name: Upload artifact slot 0
- if: steps.upload_artifacts.outputs.slot_0_enabled == 'true'
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
- with:
- name: ${{ steps.upload_artifacts.outputs.slot_0_name }}
- path: ${{ runner.temp }}/gh-aw/upload-artifacts/slot_0/
- retention-days: ${{ steps.upload_artifacts.outputs.slot_0_retention_days }}
- if-no-files-found: ignore
- - name: Restore actions folder
- if: always()
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- with:
- repository: github/gh-aw
- sparse-checkout: |
- actions/setup
- sparse-checkout-cone-mode: true
- persist-credentials: false
-
diff --git a/actions/setup/js/safe_output_handler_manager.cjs b/actions/setup/js/safe_output_handler_manager.cjs
index bc11c8758aa..2600089c7b0 100644
--- a/actions/setup/js/safe_output_handler_manager.cjs
+++ b/actions/setup/js/safe_output_handler_manager.cjs
@@ -74,16 +74,17 @@ const HANDLER_MAP = {
create_project: "./create_project.cjs",
create_project_status_update: "./create_project_status_update.cjs",
update_project: "./update_project.cjs",
+ upload_artifact: "./upload_artifact.cjs",
};
/**
* Message types handled by standalone steps (not through the handler manager)
* These types should not trigger warnings when skipped by the handler manager
*
- * Standalone types: upload_asset, upload_artifact, noop
+ * Standalone types: upload_asset, noop
* - Have dedicated processing steps with specialized logic
*/
-const STANDALONE_STEP_TYPES = new Set(["upload_asset", "upload_artifact", "noop"]);
+const STANDALONE_STEP_TYPES = new Set(["upload_asset", "noop"]);
/**
* Code-push safe output types that must succeed before remaining outputs are processed.
diff --git a/actions/setup/js/upload_artifact.cjs b/actions/setup/js/upload_artifact.cjs
index 2ad264b17e3..440769b9ede 100644
--- a/actions/setup/js/upload_artifact.cjs
+++ b/actions/setup/js/upload_artifact.cjs
@@ -8,36 +8,35 @@
* safe output tool. The model must have already copied the files it wants to upload to
* ${RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/ before calling the tool.
*
- * This handler:
- * 1. Reads upload_artifact records from agent output.
- * 2. Validates each request against the workflow's policy configuration.
- * 3. Resolves the requested files (path or filter-based) from the staging directory.
- * 4. Copies approved files into per-slot directories under ${RUNNER_TEMP}/gh-aw/upload-artifacts/slot_N/.
- * 5. Sets step outputs so the wrapping job's actions/upload-artifact steps can run conditionally.
- * 6. Generates a temporary artifact ID for each slot.
+ * This handler follows the per-message handler pattern used by the safe_outputs handler loop.
+ * main(config) returns a per-message handler function that:
+ * 1. Validates the request against the workflow's policy configuration.
+ * 2. Resolves the requested files (path or filter-based) from the staging directory.
+ * 3. Copies approved files into per-slot directories under ${RUNNER_TEMP}/gh-aw/upload-artifacts/slot_N/.
+ * 4. Sets step outputs (slot_N_enabled, slot_N_name, etc.) so the wrapping job's
+ * actions/upload-artifact steps can run conditionally.
+ * 5. Generates a temporary artifact ID for each slot.
*
- * Environment variables consumed (set by the Go job builder):
- * GH_AW_ARTIFACT_MAX_UPLOADS - Max number of upload_artifact calls allowed
- * GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS - Default retention period
- * GH_AW_ARTIFACT_MAX_RETENTION_DAYS - Maximum retention cap
- * GH_AW_ARTIFACT_MAX_SIZE_BYTES - Maximum total bytes per upload
- * GH_AW_ARTIFACT_ALLOWED_PATHS - JSON array of allowed path patterns
- * GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE - "true" if skip_archive is permitted
- * GH_AW_ARTIFACT_DEFAULT_SKIP_ARCHIVE - "true" if skip_archive defaults to true
- * GH_AW_ARTIFACT_DEFAULT_IF_NO_FILES - "error" or "ignore"
- * GH_AW_ARTIFACT_FILTERS_INCLUDE - JSON array of default include patterns
- * GH_AW_ARTIFACT_FILTERS_EXCLUDE - JSON array of default exclude patterns
- * GH_AW_AGENT_OUTPUT - Path to agent output file
- * GH_AW_SAFE_OUTPUTS_STAGED - "true" for staged/dry-run mode
+ * Configuration keys (passed via config parameter from handler manager):
+ * max-uploads - Max number of upload_artifact calls allowed (default: 1)
+ * default-retention-days - Default retention period (default: 7)
+ * max-retention-days - Maximum retention cap (default: 30)
+ * max-size-bytes - Maximum total bytes per upload (default: 100 MB)
+ * allowed-paths - Array of allowed path glob patterns
+ * allow-skip-archive - true if skip_archive is permitted
+ * default-skip-archive - true if skip_archive defaults to true
+ * default-if-no-files - "error" or "ignore" (default: "error")
+ * filters-include - Array of default include glob patterns
+ * filters-exclude - Array of default exclude glob patterns
+ * staged - true for staged/dry-run mode
*/
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
-const { loadAgentOutput } = require("./load_agent_output.cjs");
const { getErrorMessage } = require("./error_helpers.cjs");
const { globPatternToRegex } = require("./glob_pattern_helpers.cjs");
-const { ERR_CONFIG, ERR_SYSTEM, ERR_VALIDATION } = require("./error_codes.cjs");
+const { ERR_VALIDATION } = require("./error_codes.cjs");
/** Staging directory where the model places files to be uploaded. */
const STAGING_DIR = `${process.env.RUNNER_TEMP}/gh-aw/safeoutputs/upload-artifacts/`;
@@ -65,21 +64,6 @@ function generateTemporaryArtifactId() {
return id;
}
-/**
- * Parse a JSON array from an environment variable, returning an empty array on failure.
- * @param {string|undefined} envVar
- * @returns {string[]}
- */
-function parseJsonArrayEnv(envVar) {
- if (!envVar) return [];
- try {
- const parsed = JSON.parse(envVar);
- return Array.isArray(parsed) ? parsed.filter(v => typeof v === "string") : [];
- } catch {
- return [];
- }
-}
-
/**
* Check whether a relative path matches any of the provided glob patterns.
* @param {string} relPath - Path relative to the staging root
@@ -281,105 +265,103 @@ function stageFilesToSlot(files, slotDir) {
}
}
-async function main() {
- const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
-
- // Load policy configuration from environment variables.
- const maxUploads = parseInt(process.env.GH_AW_ARTIFACT_MAX_UPLOADS || "1", 10) || 1;
- const defaultRetentionDays = parseInt(process.env.GH_AW_ARTIFACT_DEFAULT_RETENTION_DAYS || "7", 10) || 7;
- const maxRetentionDays = parseInt(process.env.GH_AW_ARTIFACT_MAX_RETENTION_DAYS || "30", 10) || 30;
- const maxSizeBytes = parseInt(process.env.GH_AW_ARTIFACT_MAX_SIZE_BYTES || "104857600", 10) || 104857600;
- const allowSkipArchive = process.env.GH_AW_ARTIFACT_ALLOW_SKIP_ARCHIVE === "true";
- const defaultSkipArchive = process.env.GH_AW_ARTIFACT_DEFAULT_SKIP_ARCHIVE === "true";
- const defaultIfNoFiles = process.env.GH_AW_ARTIFACT_DEFAULT_IF_NO_FILES || "error";
- const allowedPaths = parseJsonArrayEnv(process.env.GH_AW_ARTIFACT_ALLOWED_PATHS);
- const filtersInclude = parseJsonArrayEnv(process.env.GH_AW_ARTIFACT_FILTERS_INCLUDE);
- const filtersExclude = parseJsonArrayEnv(process.env.GH_AW_ARTIFACT_FILTERS_EXCLUDE);
+/**
+ * Main handler factory for upload_artifact.
+ * Returns a per-message handler function that processes a single upload_artifact request.
+ *
+ * @param {Object} config - Handler configuration from the safe outputs config
+ * @returns {Promise} Per-message handler function
+ */
+async function main(config = {}) {
+ const maxUploads = typeof config["max-uploads"] === "number" ? config["max-uploads"] : 1;
+ const defaultRetentionDays = typeof config["default-retention-days"] === "number" ? config["default-retention-days"] : 7;
+ const maxRetentionDays = typeof config["max-retention-days"] === "number" ? config["max-retention-days"] : 30;
+ const maxSizeBytes = typeof config["max-size-bytes"] === "number" ? config["max-size-bytes"] : 104857600;
+ const allowSkipArchive = config["allow-skip-archive"] === true;
+ const defaultSkipArchive = config["default-skip-archive"] === true;
+ const defaultIfNoFiles = typeof config["default-if-no-files"] === "string" ? config["default-if-no-files"] : "error";
+ const allowedPaths = Array.isArray(config["allowed-paths"]) ? config["allowed-paths"] : [];
+ const filtersInclude = Array.isArray(config["filters-include"]) ? config["filters-include"] : [];
+ const filtersExclude = Array.isArray(config["filters-exclude"]) ? config["filters-exclude"] : [];
+ const isStaged = config["staged"] === true || process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
core.info(`upload_artifact handler: max_uploads=${maxUploads}, default_retention=${defaultRetentionDays}, max_retention=${maxRetentionDays}`);
core.info(`Allowed paths: ${allowedPaths.length > 0 ? allowedPaths.join(", ") : "(none – all staging files allowed)"}`);
- // Load agent output to find upload_artifact records.
- const result = loadAgentOutput();
- if (!result.success) {
- core.info("No agent output found, skipping upload_artifact processing");
- core.setOutput("artifact_count", "0");
- return;
- }
-
- const uploadRequests = result.items.filter(/** @param {any} item */ item => item.type === "upload_artifact");
-
- if (uploadRequests.length === 0) {
- core.info("No upload_artifact records in agent output");
- core.setOutput("artifact_count", "0");
- return;
- }
-
- core.info(`Found ${uploadRequests.length} upload_artifact request(s)`);
-
- // Enforce max-uploads policy.
- if (uploadRequests.length > maxUploads) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact: ${uploadRequests.length} requests exceed max-uploads policy (${maxUploads}). Reduce the number of upload_artifact calls or raise max-uploads in workflow configuration.`);
- return;
- }
-
- if (!fs.existsSync(STAGING_DIR)) {
- core.warning(`Staging directory ${STAGING_DIR} does not exist. Did the model copy files there before calling upload_artifact?`);
- fs.mkdirSync(STAGING_DIR, { recursive: true });
- }
+ // Slot index tracks which slot each successful request maps to.
+ let slotIndex = 0;
/** @type {Record} resolver: tmpId → artifact name */
const resolver = {};
- let successfulUploads = 0;
+ /**
+ * Per-message handler: processes one upload_artifact request.
+ *
+ * Called by the safe_outputs handler manager for each `upload_artifact` message emitted
+ * by the model. State (slotIndex, resolver) is shared across calls via closure so that
+ * successive requests are assigned to sequential slot directories.
+ *
+ * @param {Object} message - The upload_artifact message from the model
+ * @param {Object} resolvedTemporaryIds - Map of already-resolved temporary IDs (unused here)
+ * @param {Map} temporaryIdMap - Shared temp-ID map; the handler does not modify it
+ * @returns {Promise<{success: boolean, error?: string, skipped?: boolean, tmpId?: string, artifactName?: string, slotIndex?: number}>}
+ */
+ return async function handleUploadArtifact(message, resolvedTemporaryIds, temporaryIdMap) {
+ if (slotIndex >= maxUploads) {
+ return {
+ success: false,
+ error: `${ERR_VALIDATION}: upload_artifact: exceeded max-uploads policy (${maxUploads}). Reduce the number of upload_artifact calls or raise max-uploads in workflow configuration.`,
+ };
+ }
- for (let i = 0; i < uploadRequests.length; i++) {
- const request = uploadRequests[i];
- core.info(`Processing upload_artifact request ${i + 1}/${uploadRequests.length}`);
+ const i = slotIndex;
// Resolve skip_archive.
- const skipArchive = typeof request.skip_archive === "boolean" ? request.skip_archive : defaultSkipArchive;
+ const skipArchive = typeof message.skip_archive === "boolean" ? message.skip_archive : defaultSkipArchive;
if (skipArchive && !allowSkipArchive) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: skip_archive=true is not permitted. Enable it with allow.skip-archive: true in workflow configuration.`);
- return;
+ return {
+ success: false,
+ error: `${ERR_VALIDATION}: upload_artifact: skip_archive=true is not permitted. Enable it with allow.skip-archive: true in workflow configuration.`,
+ };
}
// Resolve files.
- const { files, error: resolveError } = resolveFiles(request, allowedPaths, filtersInclude, filtersExclude);
+ const { files, error: resolveError } = resolveFiles(message, allowedPaths, filtersInclude, filtersExclude);
if (resolveError) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: ${resolveError}`);
- return;
+ return { success: false, error: `${ERR_VALIDATION}: upload_artifact: ${resolveError}` };
}
if (files.length === 0) {
if (defaultIfNoFiles === "ignore") {
- core.warning(`upload_artifact request ${i + 1}: no files matched, skipping (if-no-files=ignore)`);
- continue;
- } else {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: no files matched the selection criteria. Check allowed-paths, filters, or use defaults.if-no-files: ignore to skip empty uploads.`);
- return;
+ core.warning(`upload_artifact: no files matched, skipping (if-no-files=ignore)`);
+ return { success: false, skipped: true, error: "No files matched the selection criteria" };
}
+ return {
+ success: false,
+ error: `${ERR_VALIDATION}: upload_artifact: no files matched the selection criteria. Check allowed-paths, filters, or use defaults.if-no-files: ignore to skip empty uploads.`,
+ };
}
// Validate skip_archive file-count constraint.
const skipArchiveError = validateSkipArchive(skipArchive, files);
if (skipArchiveError) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: ${skipArchiveError}`);
- return;
+ return { success: false, error: `${ERR_VALIDATION}: upload_artifact: ${skipArchiveError}` };
}
// Validate total size.
const totalSize = computeTotalSize(files);
if (totalSize > maxSizeBytes) {
- core.setFailed(`${ERR_VALIDATION}: upload_artifact request ${i + 1}: total file size ${totalSize} bytes exceeds max-size-bytes limit of ${maxSizeBytes} bytes.`);
- return;
+ return {
+ success: false,
+ error: `${ERR_VALIDATION}: upload_artifact: total file size ${totalSize} bytes exceeds max-size-bytes limit of ${maxSizeBytes} bytes.`,
+ };
}
// Compute retention days.
- const retentionDays = clampRetention(typeof request.retention_days === "number" ? request.retention_days : undefined, defaultRetentionDays, maxRetentionDays);
+ const retentionDays = clampRetention(typeof message.retention_days === "number" ? message.retention_days : undefined, defaultRetentionDays, maxRetentionDays);
// Derive artifact name and generate temporary ID.
- const artifactName = deriveArtifactName(request, i);
+ const artifactName = deriveArtifactName(message, i);
const tmpId = generateTemporaryArtifactId();
resolver[tmpId] = artifactName;
@@ -394,7 +376,7 @@ async function main() {
core.info(`Staged mode: skipping file staging for slot ${i}`);
}
- // Set step outputs for the conditional actions/upload-artifact steps in the job YAML.
+ // Set step outputs for the conditional actions/upload-artifact steps in the safe_outputs job.
core.setOutput(`slot_${i}_enabled`, "true");
core.setOutput(`slot_${i}_name`, artifactName);
core.setOutput(`slot_${i}_retention_days`, String(retentionDays));
@@ -402,26 +384,27 @@ async function main() {
core.setOutput(`slot_${i}_file_count`, String(files.length));
core.setOutput(`slot_${i}_size_bytes`, String(totalSize));
- successfulUploads++;
- }
+ slotIndex++;
- // Write resolver mapping so downstream steps can resolve tmp IDs to artifact names.
- try {
- fs.mkdirSync(path.dirname(RESOLVER_FILE), { recursive: true });
- fs.writeFileSync(RESOLVER_FILE, JSON.stringify(resolver, null, 2));
- core.info(`Wrote artifact resolver mapping to ${RESOLVER_FILE}`);
- } catch (err) {
- core.warning(`Failed to write artifact resolver file: ${getErrorMessage(err)}`);
- }
+ // Update the count output.
+ core.setOutput("upload_artifact_count", String(slotIndex));
- core.setOutput("artifact_count", String(successfulUploads));
- core.info(`upload_artifact handler complete: ${successfulUploads} artifact(s) staged`);
+ // Write/update resolver mapping so downstream steps can resolve tmp IDs to artifact names.
+ try {
+ fs.mkdirSync(path.dirname(RESOLVER_FILE), { recursive: true });
+ fs.writeFileSync(RESOLVER_FILE, JSON.stringify(resolver, null, 2));
+ core.info(`Wrote artifact resolver mapping to ${RESOLVER_FILE}`);
+ } catch (err) {
+ core.warning(`Failed to write artifact resolver file: ${getErrorMessage(err)}`);
+ }
- if (isStaged) {
- core.summary.addHeading("🎭 Staged Mode: Artifact Upload Preview", 2);
- core.summary.addRaw(`Would upload **${successfulUploads}** artifact(s). Files staged at ${STAGING_DIR}.`);
- await core.summary.write();
- }
+ return {
+ success: true,
+ tmpId,
+ artifactName,
+ slotIndex: i,
+ };
+ };
}
module.exports = { main };
diff --git a/actions/setup/js/upload_artifact.test.cjs b/actions/setup/js/upload_artifact.test.cjs
index cf1055530ec..4b6b3d39c4d 100644
--- a/actions/setup/js/upload_artifact.test.cjs
+++ b/actions/setup/js/upload_artifact.test.cjs
@@ -16,18 +16,8 @@ const RESOLVER_FILE = `${RUNNER_TEMP}/gh-aw/artifact-resolver.json`;
describe("upload_artifact.cjs", () => {
let mockCore;
- let agentOutputPath;
let originalEnv;
- /**
- * @param {object} data
- */
- function writeAgentOutput(data) {
- agentOutputPath = path.join(os.tmpdir(), `test_upload_artifact_${Date.now()}_${Math.random().toString(36).slice(2)}.json`);
- fs.writeFileSync(agentOutputPath, JSON.stringify(data));
- process.env.GH_AW_AGENT_OUTPUT = agentOutputPath;
- }
-
/**
* @param {string} relPath
* @param {string} content
@@ -39,12 +29,41 @@ describe("upload_artifact.cjs", () => {
}
/**
- * @returns {Promise}
+ * Build a config object (replaces ENV vars in the old standalone approach).
+ * @param {object} overrides
+ */
+ function buildConfig(overrides = {}) {
+ return {
+ "max-uploads": 3,
+ "default-retention-days": 7,
+ "max-retention-days": 30,
+ "max-size-bytes": 104857600,
+ ...overrides,
+ };
+ }
+
+ /**
+ * Run the handler against a list of messages using the new per-message pattern.
+ * Simulates what the handler manager does.
+ * @param {object} config
+ * @param {object[]} messages
+ * @returns {Promise